summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/npm-registry-fetch
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/npm-registry-fetch')
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/CHANGELOG.md156
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/README.md93
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/auth.js12
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/check-response.js30
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/config.js24
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/errors.js21
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/index.js149
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/CHANGELOG.md478
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md16
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.es.md628
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.md624
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/en.js3
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/es.js3
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/get.js190
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/index.js3
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js26
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js125
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js21
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js162
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js225
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js69
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/fix-owner.js44
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js11
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/move-file.js51
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js32
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/y.js25
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js213
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.js44
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.json6
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.js46
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.json6
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/ls.js6
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/changelog.md7
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/index.js10
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/license7
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/package.json62
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/readme.md411
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/package.json137
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/put.js71
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/rm.js28
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/verify.js3
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/CHANGELOG.md29
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/LICENSE.md16
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/README.md121
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/index.js60
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/package.json70
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md525
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE16
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md404
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js171
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js257
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js482
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json95
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js24
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/.travis.yml5
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/LICENSE21
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/README.md56
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/index.js82
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/package.json59
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-browser.js62
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-node.js53
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.npmignore5
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.travis.yml11
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/LICENSE20
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/README.md307
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js726
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js.map1
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/lib/smart-buffer.js371
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/package.json70
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/test/smart-buffer.test.js410
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/typings/index.d.ts383
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.npmignore1
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.travis.yml22
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/History.md96
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/README.md134
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/index.js141
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/package.json66
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key15
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem12
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/test.js144
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/.npmignore4
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/LICENSE20
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/README.md339
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/associate.js33
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/bind.js30
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/connect.js31
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/index.js6
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-agent.js108
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-client.js306
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/socks/package.json68
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/CHANGELOG.md256
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/LICENSE.md16
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/README.md488
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/index.js379
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/package.json90
-rw-r--r--deps/npm/node_modules/npm-registry-fetch/package.json92
96 files changed, 415 insertions, 11672 deletions
diff --git a/deps/npm/node_modules/npm-registry-fetch/CHANGELOG.md b/deps/npm/node_modules/npm-registry-fetch/CHANGELOG.md
index 05d5741ee8..71232eff75 100644
--- a/deps/npm/node_modules/npm-registry-fetch/CHANGELOG.md
+++ b/deps/npm/node_modules/npm-registry-fetch/CHANGELOG.md
@@ -2,6 +2,162 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+<a name="3.8.0"></a>
+# [3.8.0](https://github.com/npm/registry-fetch/compare/v3.7.0...v3.8.0) (2018-08-23)
+
+
+### Features
+
+* **mapJson:** add support for passing in json stream mapper ([0600986](https://github.com/npm/registry-fetch/commit/0600986))
+
+
+
+<a name="3.7.0"></a>
+# [3.7.0](https://github.com/npm/registry-fetch/compare/v3.6.0...v3.7.0) (2018-08-23)
+
+
+### Features
+
+* **json.stream:** add utility function for streamed JSON parsing ([051d969](https://github.com/npm/registry-fetch/commit/051d969))
+
+
+
+<a name="3.6.0"></a>
+# [3.6.0](https://github.com/npm/registry-fetch/compare/v3.5.0...v3.6.0) (2018-08-22)
+
+
+### Bug Fixes
+
+* **docs:** document opts.forceAuth ([40bcd65](https://github.com/npm/registry-fetch/commit/40bcd65))
+
+
+### Features
+
+* **opts.ignoreBody:** add a boolean to throw away response bodies ([6923702](https://github.com/npm/registry-fetch/commit/6923702))
+
+
+
+<a name="3.5.0"></a>
+# [3.5.0](https://github.com/npm/registry-fetch/compare/v3.4.0...v3.5.0) (2018-08-22)
+
+
+### Features
+
+* **pkgid:** heuristic pkgid calculation for errors ([2e789a5](https://github.com/npm/registry-fetch/commit/2e789a5))
+
+
+
+<a name="3.4.0"></a>
+# [3.4.0](https://github.com/npm/registry-fetch/compare/v3.3.0...v3.4.0) (2018-08-22)
+
+
+### Bug Fixes
+
+* **deps:** use new figgy-pudding with aliases fix ([0308f54](https://github.com/npm/registry-fetch/commit/0308f54))
+
+
+### Features
+
+* **auth:** add forceAuth option to force a specific auth mechanism ([4524d17](https://github.com/npm/registry-fetch/commit/4524d17))
+
+
+
+<a name="3.3.0"></a>
+# [3.3.0](https://github.com/npm/registry-fetch/compare/v3.2.1...v3.3.0) (2018-08-21)
+
+
+### Bug Fixes
+
+* **query:** stop including undefined keys ([4718b1b](https://github.com/npm/registry-fetch/commit/4718b1b))
+
+
+### Features
+
+* **otp:** use heuristic detection for malformed EOTP responses ([f035194](https://github.com/npm/registry-fetch/commit/f035194))
+
+
+
+<a name="3.2.1"></a>
+## [3.2.1](https://github.com/npm/registry-fetch/compare/v3.2.0...v3.2.1) (2018-08-16)
+
+
+### Bug Fixes
+
+* **opts:** pass through non-null opts.retry ([beba040](https://github.com/npm/registry-fetch/commit/beba040))
+
+
+
+<a name="3.2.0"></a>
+# [3.2.0](https://github.com/npm/registry-fetch/compare/v3.1.1...v3.2.0) (2018-07-27)
+
+
+### Features
+
+* **gzip:** add opts.gzip convenience opt ([340abe0](https://github.com/npm/registry-fetch/commit/340abe0))
+
+
+
+<a name="3.1.1"></a>
+## [3.1.1](https://github.com/npm/registry-fetch/compare/v3.1.0...v3.1.1) (2018-04-09)
+
+
+
+<a name="3.1.0"></a>
+# [3.1.0](https://github.com/npm/registry-fetch/compare/v3.0.0...v3.1.0) (2018-04-09)
+
+
+### Features
+
+* **config:** support no-proxy and https-proxy options ([9aa906b](https://github.com/npm/registry-fetch/commit/9aa906b))
+
+
+
+<a name="3.0.0"></a>
+# [3.0.0](https://github.com/npm/registry-fetch/compare/v2.1.0...v3.0.0) (2018-04-09)
+
+
+### Bug Fixes
+
+* **api:** pacote integration-related fixes ([a29de4f](https://github.com/npm/registry-fetch/commit/a29de4f))
+* **config:** stop caring about opts.config ([5856a6f](https://github.com/npm/registry-fetch/commit/5856a6f))
+
+
+### BREAKING CHANGES
+
+* **config:** opts.config is no longer supported. Pass the options down in opts itself.
+
+
+
+<a name="2.1.0"></a>
+# [2.1.0](https://github.com/npm/registry-fetch/compare/v2.0.0...v2.1.0) (2018-04-08)
+
+
+### Features
+
+* **token:** accept opts.token for opts._authToken ([108c9f0](https://github.com/npm/registry-fetch/commit/108c9f0))
+
+
+
+<a name="2.0.0"></a>
+# [2.0.0](https://github.com/npm/registry-fetch/compare/v1.1.1...v2.0.0) (2018-04-08)
+
+
+### meta
+
+* drop support for node@4 ([758536e](https://github.com/npm/registry-fetch/commit/758536e))
+
+
+### BREAKING CHANGES
+
+* node@4 is no longer supported
+
+
+
+<a name="1.1.1"></a>
+## [1.1.1](https://github.com/npm/registry-fetch/compare/v1.1.0...v1.1.1) (2018-04-06)
+
+
+
<a name="1.1.0"></a>
# [1.1.0](https://github.com/npm/registry-fetch/compare/v1.0.1...v1.1.0) (2018-03-16)
diff --git a/deps/npm/node_modules/npm-registry-fetch/README.md b/deps/npm/node_modules/npm-registry-fetch/README.md
index 4947dc891f..0c3f4f9469 100644
--- a/deps/npm/node_modules/npm-registry-fetch/README.md
+++ b/deps/npm/node_modules/npm-registry-fetch/README.md
@@ -82,27 +82,33 @@ const res = await fetch.json('/-/ping')
console.log(res) // Body parsed as JSON
```
-#### <a name="fetch-opts"></a> `fetch` Options
+#### <a name="fetch-json-stream"></a> `> fetch.json.stream(url, jsonPath, [opts]) -> Stream`
-Fetch options are optional, and can be passed in as either a Map-like object
-(one with a `.get()` method), a plain javascript object, or a
-[`figgy-pudding`](https://npm.im/figgy-pudding) instance.
+Performs a request to a given registry URL and parses the body of the response
+as JSON, with each entry being emitted through the stream.
-##### <a name="opts-auth-token"></a> `opts._authToken`
-
-* Type: String
-* Default: null
+The `jsonPath` argument is a [`JSONStream.parse()`
+path](https://github.com/dominictarr/JSONStream#jsonstreamparsepath), and the
+returned stream (unlike default `JSONStream`s), has a valid
+`Symbol.asyncIterator` implementation.
-Authentication token string.
+For available options, please see the section on [`fetch` options](#fetch-opts).
-Can be scoped to a registry by using a "nerf dart" for that registry. That is:
+##### Example
-```
-{
- '//registry.npmjs.org/:_authToken': 't0k3nH34r'
+```javascript
+console.log('https://npm.im/~zkat has access to the following packages:')
+for await (let {key, value} of fetch.json.stream('/-/user/zkat/package', '$*')) {
+ console.log(`https://npm.im/${key} (perms: ${value})`)
}
```
+#### <a name="fetch-opts"></a> `fetch` Options
+
+Fetch options are optional, and can be passed in as either a Map-like object
+(one with a `.get()` method), a plain javascript object, or a
+[`figgy-pudding`](https://npm.im/figgy-pudding) instance.
+
##### <a name="opts-agent"></a> `opts.agent`
* Type: http.Agent
@@ -229,6 +235,25 @@ packages.
See also [`opts.retry`](#opts-retry) to provide all retry options as a single
object.
+##### <a name="opts-force-auth"></a> `opts.force-auth`
+
+* Alias: `opts.forceAuth`
+* Type: Object
+* Default: null
+
+If present, other auth-related values in `opts` will be completely ignored,
+including `alwaysAuth`, `email`, and `otp`, when calculating auth for a request,
+and the auth details in `opts.forceAuth` will be used instead.
+
+##### <a name="opts-gzip"></a> `opts.gzip`
+
+* Type: Boolean
+* Default: false
+
+If true, `npm-registry-fetch` will set the `Content-Encoding` header to `gzip`
+and use `zlib.gzip()` or `zlib.createGzip()` to gzip-encode
+[`opts.body`](#opts-body).
+
##### <a name="opts-headers"></a> `opts.headers`
* Type: Object
@@ -238,6 +263,16 @@ Additional headers for the outgoing request. This option can also be used to
override headers automatically generated by `npm-registry-fetch`, such as
`Content-Type`.
+##### <a name="opts-ignore-body"></a> `opts.ignore-body`
+
+* Alias: `opts.ignoreBody`
+* Type: Boolean
+* Default: false
+
+If true, the **response body** will be thrown away and `res.body` set to `null`.
+This will prevent dangling response sockets for requests where you don't usually
+care what the response body is.
+
##### <a name="opts-integrity"></a> `opts.integrity`
* Type: String | [SRI object](https://npm.im/ssri)
@@ -301,6 +336,16 @@ See also [`opts.proxy`](#opts-proxy)
Logger object to use for logging operation details. Must have the same methods
as `npmlog`.
+##### <a name="opts-map-json"></a> `opts.map-json`
+
+* Alias: `mapJson`, `mapJSON`
+* Type: Function
+* Default: undefined
+
+When using `fetch.json.stream()` (NOT `fetch.json()`), this will be passed down
+to [`JSONStream`](https://npm.im/JSONStream) as the second argument to
+`JSONStream.parse`, and can be used to transform stream data before output.
+
##### <a name="opts-maxsockets"></a> `opts.maxsockets`
* Alias: `opts.max-sockets`
@@ -362,7 +407,7 @@ account.
* Default: null
Password used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts._authToken`](#opts-auth-token)
+method, please use the (more secure) [`opts.token`](#opts-token)
Can optionally be scoped to a registry by using a "nerf dart" for that registry.
That is:
@@ -513,6 +558,22 @@ See also [`opts.ca`](#opts-ca).
Time before a hanging request times out.
+##### <a name="opts-token"></a> `opts.token`
+
+* Alias: `opts._authToken`
+* Type: String
+* Default: null
+
+Authentication token string.
+
+Can be scoped to a registry by using a "nerf dart" for that registry. That is:
+
+```
+{
+ '//registry.npmjs.org/:token': 't0k3nH34r'
+}
+```
+
##### <a name="opts-user-agent"></a> `opts.user-agent`
* Type: String
@@ -526,7 +587,7 @@ User agent string to send in the `User-Agent` header.
* Default: null
Username used for basic authentication. For the more modern authentication
-method, please use the (more secure) [`opts._authToken`](#opts-auth-token)
+method, please use the (more secure) [`opts.token`](#opts-token)
Can optionally be scoped to a registry by using a "nerf dart" for that registry.
That is:
@@ -545,4 +606,4 @@ See also [`opts.password`](#opts-password)
* Default: null
** DEPRECATED ** This is a legacy authentication token supported only for
-*compatibility. Please us [`opts._authToken`](#opts-auth-token) instead.
+*compatibility. Please use [`opts.token`](#opts-token) instead.
diff --git a/deps/npm/node_modules/npm-registry-fetch/auth.js b/deps/npm/node_modules/npm-registry-fetch/auth.js
index f4f7316db1..fa696b97dd 100644
--- a/deps/npm/node_modules/npm-registry-fetch/auth.js
+++ b/deps/npm/node_modules/npm-registry-fetch/auth.js
@@ -9,7 +9,11 @@ function getAuth (registry, opts) {
opts = config(opts)
let AUTH = {}
const regKey = registry && registryKey(registry)
+ if (opts.forceAuth) {
+ opts = opts.forceAuth
+ }
const doKey = (key, alias) => addKey(opts, AUTH, regKey, key, alias)
+ doKey('token')
doKey('_authToken', 'token')
doKey('username')
doKey('password')
@@ -26,11 +30,11 @@ function getAuth (registry, opts) {
}
function addKey (opts, obj, scope, key, objKey) {
- if (opts.get(key)) {
- obj[objKey || key] = opts.get(key)
+ if (opts[key]) {
+ obj[objKey || key] = opts[key]
}
- if (scope && opts.get(`${scope}:${key}`)) {
- obj[objKey || key] = opts.get(`${scope}:${key}`)
+ if (scope && opts[`${scope}:${key}`]) {
+ obj[objKey || key] = opts[`${scope}:${key}`]
}
}
diff --git a/deps/npm/node_modules/npm-registry-fetch/check-response.js b/deps/npm/node_modules/npm-registry-fetch/check-response.js
index 407a80e4ce..bfde699edc 100644
--- a/deps/npm/node_modules/npm-registry-fetch/check-response.js
+++ b/deps/npm/node_modules/npm-registry-fetch/check-response.js
@@ -8,7 +8,7 @@ module.exports = checkResponse
function checkResponse (method, res, registry, startTime, opts) {
opts = config(opts)
if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
- opts.get('log').notice('', res.headers.get('npm-notice'))
+ opts.log.notice('', res.headers.get('npm-notice'))
}
checkWarnings(res, registry, opts)
if (res.status >= 400) {
@@ -16,6 +16,10 @@ function checkResponse (method, res, registry, startTime, opts) {
return checkErrors(method, res, startTime, opts)
} else {
res.body.on('end', () => logRequest(method, res, startTime, opts))
+ if (opts.ignoreBody) {
+ res.body.resume()
+ res.body = null
+ }
return res
}
}
@@ -25,7 +29,7 @@ function logRequest (method, res, startTime, opts) {
const attempt = res.headers.get('x-fetch-attempts')
const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
const cacheStr = res.headers.get('x-local-cache') ? ' (from cache)' : ''
- opts.get('log').http(
+ opts.log.http(
'fetch',
`${method.toUpperCase()} ${res.status} ${res.url} ${elapsedTime}ms${attemptStr}${cacheStr}`
)
@@ -51,14 +55,14 @@ function checkWarnings (res, registry, opts) {
BAD_HOSTS.set(registry, true)
if (warnings['199']) {
if (warnings['199'].message.match(/ENOTFOUND/)) {
- opts.get('log').warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`)
+ opts.log.warn('registry', `Using stale data from ${registry} because the host is inaccessible -- are you offline?`)
} else {
- opts.get('log').warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`)
+ opts.log.warn('registry', `Unexpected warning for ${registry}: ${warnings['199'].message}`)
}
}
if (warnings['111']) {
// 111 Revalidation failed -- we're using stale data
- opts.get('log').warn(
+ opts.log.warn(
'registry',
`Using stale data from ${registry} due to a request error during revalidation.`
)
@@ -70,8 +74,9 @@ function checkErrors (method, res, startTime, opts) {
return res.buffer()
.catch(() => null)
.then(body => {
+ let parsed = body
try {
- body = JSON.parse(body.toString('utf8'))
+ parsed = JSON.parse(body.toString('utf8'))
} catch (e) {}
if (res.status === 401 && res.headers.get('www-authenticate')) {
const auth = res.headers.get('www-authenticate')
@@ -79,20 +84,25 @@ function checkErrors (method, res, startTime, opts) {
.map(s => s.toLowerCase())
if (auth.indexOf('ipaddress') !== -1) {
throw new errors.HttpErrorAuthIPAddress(
- method, res, body, opts.spec
+ method, res, parsed, opts.spec
)
} else if (auth.indexOf('otp') !== -1) {
throw new errors.HttpErrorAuthOTP(
- method, res, body, opts.spec
+ method, res, parsed, opts.spec
)
} else {
throw new errors.HttpErrorAuthUnknown(
- method, res, body, opts.spec
+ method, res, parsed, opts.spec
)
}
+ } else if (res.status === 401 && /one-time pass/.test(body.toString('utf8'))) {
+ // Heuristic for malformed OTP responses that don't include the www-authenticate header.
+ throw new errors.HttpErrorAuthOTP(
+ method, res, parsed, opts.spec
+ )
} else {
throw new errors.HttpErrorGeneral(
- method, res, body, opts.spec
+ method, res, parsed, opts.spec
)
}
})
diff --git a/deps/npm/node_modules/npm-registry-fetch/config.js b/deps/npm/node_modules/npm-registry-fetch/config.js
index 6249f66e05..7fe5dacc94 100644
--- a/deps/npm/node_modules/npm-registry-fetch/config.js
+++ b/deps/npm/node_modules/npm-registry-fetch/config.js
@@ -1,12 +1,12 @@
'use strict'
const pkg = require('./package.json')
-const pudding = require('figgy-pudding')
+const figgyPudding = require('figgy-pudding')
const silentLog = require('./silentlog.js')
-const AUTH_REGEX = /^(?:.*:)?(_authToken|username|_password|password|email|always-auth|_auth|otp)$/
+const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
-const RegFetchConfig = pudding({
+module.exports = figgyPudding({
'agent': {},
'algorithms': {},
'body': {},
@@ -17,8 +17,14 @@ const RegFetchConfig = pudding({
'fetch-retry-factor': {},
'fetch-retry-maxtimeout': {},
'fetch-retry-mintimeout': {},
+ 'force-auth': {},
+ forceAuth: 'force-auth',
'gid': {},
+ 'gzip': {},
'headers': {},
+ 'https-proxy': {},
+ 'ignore-body': {},
+ ignoreBody: 'ignore-body',
'integrity': {},
'is-from-ci': 'isFromCI',
'isFromCI': {
@@ -37,6 +43,9 @@ const RegFetchConfig = pudding({
'log': {
default: silentLog
},
+ 'map-json': 'mapJson',
+ 'mapJSON': 'mapJson',
+ 'mapJson': {},
'max-sockets': 'maxsockets',
'maxsockets': {
default: 12
@@ -45,6 +54,7 @@ const RegFetchConfig = pudding({
'method': {
default: 'GET'
},
+ 'no-proxy': {},
'noproxy': {},
'npm-session': 'npmSession',
'npmSession': {},
@@ -54,7 +64,7 @@ const RegFetchConfig = pudding({
'prefer-online': {},
'projectScope': {},
'project-scope': 'projectScope',
- 'Promise': {},
+ 'Promise': {default: () => Promise},
'proxy': {},
'query': {},
'refer': {},
@@ -86,9 +96,3 @@ const RegFetchConfig = pudding({
return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
}
})
-
-module.exports = config
-function config (opts) {
- opts = opts || {}
- return RegFetchConfig(opts, opts.config)
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/errors.js b/deps/npm/node_modules/npm-registry-fetch/errors.js
index 217f46f977..ba78735fce 100644
--- a/deps/npm/node_modules/npm-registry-fetch/errors.js
+++ b/deps/npm/node_modules/npm-registry-fetch/errors.js
@@ -1,5 +1,25 @@
'use strict'
+const url = require('url')
+
+function packageName (href) {
+ try {
+ let basePath = url.parse(href).pathname.substr(1)
+ if (!basePath.match(/^-/)) {
+ basePath = basePath.split('/')
+ var index = basePath.indexOf('_rewrite')
+ if (index === -1) {
+ index = basePath.length - 1
+ } else {
+ index++
+ }
+ return decodeURIComponent(basePath[index])
+ }
+ } catch (_) {
+ // this is ok
+ }
+}
+
class HttpErrorBase extends Error {
constructor (method, res, body, spec) {
super()
@@ -9,6 +29,7 @@ class HttpErrorBase extends Error {
this.method = method
this.uri = res.url
this.body = body
+ this.pkgid = spec ? spec.toString() : packageName(res.url)
}
}
module.exports.HttpErrorBase = HttpErrorBase
diff --git a/deps/npm/node_modules/npm-registry-fetch/index.js b/deps/npm/node_modules/npm-registry-fetch/index.js
index cf6e8d3bf3..4ba3c19243 100644
--- a/deps/npm/node_modules/npm-registry-fetch/index.js
+++ b/deps/npm/node_modules/npm-registry-fetch/index.js
@@ -6,19 +6,19 @@ const checkResponse = require('./check-response.js')
const config = require('./config.js')
const getAuth = require('./auth.js')
const fetch = require('make-fetch-happen')
+const JSONStream = require('JSONStream')
const npa = require('npm-package-arg')
+const {PassThrough} = require('stream')
const qs = require('querystring')
-const silentLog = require('./silentlog.js')
const url = require('url')
+const zlib = require('zlib')
module.exports = regFetch
function regFetch (uri, opts) {
- opts = config(Object.assign({
- log: silentLog
- }, opts))
+ opts = config(opts)
const registry = (
- (opts.get('spec') && pickRegistry(opts.get('spec'), opts)) ||
- opts.get('registry') ||
+ (opts.spec && pickRegistry(opts.spec, opts)) ||
+ opts.registry ||
'https://registry.npmjs.org/'
)
uri = url.parse(uri).protocol
@@ -31,7 +31,7 @@ function regFetch (uri, opts) {
// through that takes into account the scope, the prefix of `uri`, etc
const startTime = Date.now()
const headers = getHeaders(registry, uri, opts)
- let body = opts.get('body')
+ let body = opts.body
const bodyIsStream = body &&
typeof body === 'object' &&
typeof body.pipe === 'function'
@@ -41,51 +41,70 @@ function regFetch (uri, opts) {
} else if (body && !headers['content-type']) {
headers['content-type'] = 'application/octet-stream'
}
- if (opts.get('query')) {
- let q = opts.get('query')
+ if (opts.gzip) {
+ headers['content-encoding'] = 'gzip'
+ if (bodyIsStream) {
+ const gz = zlib.createGzip()
+ body.on('error', err => gz.emit('error', err))
+ body = body.pipe(gz)
+ } else {
+ body = new opts.Promise((resolve, reject) => {
+ zlib.gzip(body, (err, gz) => err ? reject(err) : resolve(gz))
+ })
+ }
+ }
+ if (opts.query) {
+ let q = opts.query
if (typeof q === 'string') {
q = qs.parse(q)
}
- const parsed = url.parse(uri)
- parsed.search = '?' + qs.stringify(
- parsed.query
- ? Object.assign(qs.parse(parsed.query), q)
- : q
- )
- uri = url.format(parsed)
+ Object.keys(q).forEach(key => {
+ if (q[key] === undefined) {
+ delete q[key]
+ }
+ })
+ if (Object.keys(q).length) {
+ const parsed = url.parse(uri)
+ parsed.search = '?' + qs.stringify(
+ parsed.query
+ ? Object.assign(qs.parse(parsed.query), q)
+ : q
+ )
+ uri = url.format(parsed)
+ }
}
- return fetch(uri, {
- agent: opts.get('agent'),
- algorithms: opts.get('algorithms'),
+ return opts.Promise.resolve(body).then(body => fetch(uri, {
+ agent: opts.agent,
+ algorithms: opts.algorithms,
body,
cache: getCacheMode(opts),
- cacheManager: opts.get('cache'),
- ca: opts.get('ca'),
- cert: opts.get('cert'),
+ cacheManager: opts.cache,
+ ca: opts.ca,
+ cert: opts.cert,
headers,
- integrity: opts.get('integrity'),
- key: opts.get('key'),
- localAddress: opts.get('local-address'),
- maxSockets: opts.get('maxsockets'),
- memoize: opts.get('memoize'),
- method: opts.get('method') || 'GET',
- noProxy: opts.get('noproxy'),
- Promise: opts.get('Promise'),
- proxy: opts.get('proxy'),
- referer: opts.get('refer'),
- retry: opts.get('retry') || {
- retries: opts.get('fetch-retries'),
- factor: opts.get('fetch-retry-factor'),
- minTimeout: opts.get('fetch-retry-mintimeout'),
- maxTimeout: opts.get('fetch-retry-maxtimeout')
+ integrity: opts.integrity,
+ key: opts.key,
+ localAddress: opts['local-address'],
+ maxSockets: opts.maxsockets,
+ memoize: opts.memoize,
+ method: opts.method || 'GET',
+ noProxy: opts['no-proxy'] || opts.noproxy,
+ Promise: opts.Promise,
+ proxy: opts['https-proxy'] || opts.proxy,
+ referer: opts.refer,
+ retry: opts.retry != null ? opts.retry : {
+ retries: opts['fetch-retries'],
+ factor: opts['fetch-retry-factor'],
+ minTimeout: opts['fetch-retry-mintimeout'],
+ maxTimeout: opts['fetch-retry-maxtimeout']
},
- strictSSL: !!opts.get('strict-ssl'),
- timeout: opts.get('timeout'),
- uid: opts.get('uid'),
- gid: opts.get('gid')
+ strictSSL: !!opts['strict-ssl'],
+ timeout: opts.timeout,
+ uid: opts.uid,
+ gid: opts.gid
}).then(res => checkResponse(
- opts.get('method') || 'GET', res, registry, startTime, opts
- ))
+ opts.method || 'GET', res, registry, startTime, opts
+ )))
}
module.exports.json = fetchJSON
@@ -93,35 +112,43 @@ function fetchJSON (uri, opts) {
return regFetch(uri, opts).then(res => res.json())
}
+module.exports.json.stream = fetchJSONStream
+function fetchJSONStream (uri, jsonPath, opts) {
+ opts = config(opts)
+ const parser = JSONStream.parse(jsonPath, opts.mapJson)
+ const pt = parser.pipe(new PassThrough({objectMode: true}))
+ parser.on('error', err => pt.emit('error', err))
+ regFetch(uri, opts).then(res => {
+ res.body.on('error', err => parser.emit('error', err))
+ res.body.pipe(parser)
+ }, err => pt.emit('error', err))
+ return pt
+}
+
module.exports.pickRegistry = pickRegistry
function pickRegistry (spec, opts) {
spec = npa(spec)
opts = config(opts)
- if (!spec.registry) {
- throw new Error(`${spec} is not a valid registry dependency spec`)
- }
let registry = spec.scope &&
- opts.get(spec.scope.replace(/^@?/, '@') + ':registry')
+ opts[spec.scope.replace(/^@?/, '@') + ':registry']
- if (!registry && opts.get('scope')) {
- registry = opts.get(
- opts.get('scope').replace(/^@?/, '@') + ':registry'
- )
+ if (!registry && opts.scope) {
+ registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
}
if (!registry) {
- registry = opts.get('registry') || 'https://registry.npmjs.org/'
+ registry = opts.registry || 'https://registry.npmjs.org/'
}
return registry
}
function getCacheMode (opts) {
- return opts.get('offline')
+ return opts.offline
? 'only-if-cached'
- : opts.get('prefer-offline')
+ : opts['prefer-offline']
? 'force-cache'
- : opts.get('prefer-online')
+ : opts['prefer-online']
? 'no-cache'
: 'default'
}
@@ -129,18 +156,18 @@ function getCacheMode (opts) {
function getHeaders (registry, uri, opts) {
const headers = Object.assign({
'npm-in-ci': !!(
- opts.get('is-from-ci') ||
+ opts['is-from-ci'] ||
process.env['CI'] === 'true' ||
process.env['TDDIUM'] ||
process.env['JENKINS_URL'] ||
process.env['bamboo.buildKey'] ||
process.env['GO_PIPELINE_NAME']
),
- 'npm-scope': opts.get('project-scope'),
- 'npm-session': opts.get('npm-session'),
- 'user-agent': opts.get('user-agent'),
- 'referer': opts.get('refer')
- }, opts.get('headers'))
+ 'npm-scope': opts['project-scope'],
+ 'npm-session': opts['npm-session'],
+ 'user-agent': opts['user-agent'],
+ 'referer': opts.refer
+ }, opts.headers)
const auth = getAuth(registry, opts)
// If a tarball is hosted on a different place than the manifest, only send
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/CHANGELOG.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/CHANGELOG.md
deleted file mode 100644
index 5c3e7911db..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/CHANGELOG.md
+++ /dev/null
@@ -1,478 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="10.0.4"></a>
-## [10.0.4](https://github.com/zkat/cacache/compare/v10.0.3...v10.0.4) (2018-02-16)
-
-
-
-<a name="10.0.3"></a>
-## [10.0.3](https://github.com/zkat/cacache/compare/v10.0.2...v10.0.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **content:** rethrow aggregate errors as ENOENT ([fa918f5](https://github.com/zkat/cacache/commit/fa918f5))
-
-
-
-<a name="10.0.2"></a>
-## [10.0.2](https://github.com/zkat/cacache/compare/v10.0.1...v10.0.2) (2018-01-07)
-
-
-### Bug Fixes
-
-* **ls:** deleted entries could cause a premature stream EOF ([347dc36](https://github.com/zkat/cacache/commit/347dc36))
-
-
-
-<a name="10.0.1"></a>
-## [10.0.1](https://github.com/zkat/cacache/compare/v10.0.0...v10.0.1) (2017-11-15)
-
-
-### Bug Fixes
-
-* **move-file:** actually use the fallback to `move-concurrently` (#110) ([073fbe1](https://github.com/zkat/cacache/commit/073fbe1))
-
-
-
-<a name="10.0.0"></a>
-# [10.0.0](https://github.com/zkat/cacache/compare/v9.3.0...v10.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#111) ([fdbb4e5](https://github.com/zkat/cacache/commit/fdbb4e5))
-
-
-### Performance Improvements
-
-* more copyFile benchmarks ([63787bb](https://github.com/zkat/cacache/commit/63787bb))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="9.3.0"></a>
-# [9.3.0](https://github.com/zkat/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
-
-
-### Features
-
-* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/zkat/cacache/commit/067b5f6))
-
-
-
-<a name="9.2.9"></a>
-## [9.2.9](https://github.com/zkat/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
-
-
-
-<a name="9.2.8"></a>
-## [9.2.8](https://github.com/zkat/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
-
-
-### Bug Fixes
-
-* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/zkat/cacache/commit/c3232ea))
-
-
-
-<a name="9.2.7"></a>
-## [9.2.7](https://github.com/zkat/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
-
-
-### Bug Fixes
-
-* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/zkat/cacache/commit/4131196))
-
-
-
-<a name="9.2.6"></a>
-## [9.2.6](https://github.com/zkat/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/zkat/cacache/commit/5209ffe))
-
-
-
-<a name="9.2.5"></a>
-## [9.2.5](https://github.com/zkat/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
-
-
-### Bug Fixes
-
-* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/zkat/cacache/commit/84e1d7e))
-
-
-
-<a name="9.2.4"></a>
-## [9.2.4](https://github.com/zkat/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** bumping deps ([bbccb12](https://github.com/zkat/cacache/commit/bbccb12))
-
-
-
-<a name="9.2.3"></a>
-## [9.2.3](https://github.com/zkat/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/zkat/cacache/commit/ac90bc0))
-
-
-
-<a name="9.2.2"></a>
-## [9.2.2](https://github.com/zkat/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
-
-
-### Bug Fixes
-
-* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/zkat/cacache/commit/519b4ee))
-
-
-
-<a name="9.2.1"></a>
-## [9.2.1](https://github.com/zkat/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
-
-
-### Bug Fixes
-
-* **docs:** fixing translation messup ([bb9e4f9](https://github.com/zkat/cacache/commit/bb9e4f9))
-
-
-
-<a name="9.2.0"></a>
-# [9.2.0](https://github.com/zkat/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/zkat/cacache/commit/531f9a4))
-
-
-
-<a name="9.1.0"></a>
-# [9.1.0](https://github.com/zkat/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
-
-
-### Features
-
-* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/zkat/cacache/commit/323b90c))
-
-
-
-<a name="9.0.0"></a>
-# [9.0.0](https://github.com/zkat/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoization:** actually use the LRU ([0e55dc9](https://github.com/zkat/cacache/commit/0e55dc9))
-
-
-### Features
-
-* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/zkat/cacache/commit/e5614c7))
-
-
-### BREAKING CHANGES
-
-* **memoization:** If you were passing an object to opts.memoize, it will now be used as an injected memoization object. If you were only passing booleans and other non-objects through that option, no changes are needed.
-
-
-
-<a name="8.0.0"></a>
-# [8.0.0](https://github.com/zkat/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
-
-
-### Features
-
-* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/zkat/cacache/commit/bad6c49)), closes [#87](https://github.com/zkat/cacache/issues/87)
-
-
-### BREAKING CHANGES
-
-* **read:** hasContent now returns an object with `{sri, size}` instead of `sri`. Use `result.sri` anywhere that needed the old return value.
-
-
-
-<a name="7.1.0"></a>
-# [7.1.0](https://github.com/zkat/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
-
-
-### Features
-
-* **size:** handle content size info (#49) ([91230af](https://github.com/zkat/cacache/commit/91230af))
-
-
-
-<a name="7.0.5"></a>
-## [7.0.5](https://github.com/zkat/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
-
-
-### Bug Fixes
-
-* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/zkat/cacache/commit/6d13e8e))
-* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/zkat/cacache/commit/3624fc5))
-
-
-
-<a name="7.0.4"></a>
-## [7.0.4](https://github.com/zkat/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
-
-
-### Bug Fixes
-
-* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/zkat/cacache/commit/d49bbcd))
-
-
-
-<a name="7.0.3"></a>
-## [7.0.3](https://github.com/zkat/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
-
-
-### Bug Fixes
-
-* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/zkat/cacache/commit/9d4f0a5))
-
-
-
-<a name="7.0.2"></a>
-## [7.0.2](https://github.com/zkat/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/zkat/cacache/commit/8dc2e62))
-
-
-
-<a name="7.0.1"></a>
-## [7.0.1](https://github.com/zkat/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **docs:** fix header name conflict in readme ([afcd456](https://github.com/zkat/cacache/commit/afcd456))
-
-
-
-<a name="7.0.0"></a>
-# [7.0.0](https://github.com/zkat/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/zkat/cacache/commit/d2e9b6a))
-
-
-### Features
-
-* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/zkat/cacache/commit/b1e731f))
-
-
-### BREAKING CHANGES
-
-* **integrity:** The entire API has been overhauled to use SRI hashes instead of digest/hashAlgorithm pairs. SRI hashes follow the Subresource Integrity standard and support strings and objects compatible with [`ssri`](https://npm.im/ssri).
-
-* This change bumps the index version, which will invalidate all previous index entries. Content entries will remain intact, and existing caches will automatically reuse any content from before this breaking change.
-
-* `cacache.get.info()`, `cacache.ls()`, and `cacache.ls.stream()` will now return objects that looks like this:
-
-```
-{
- key: String,
- integrity: '<algorithm>-<base64hash>',
- path: ContentPath,
- time: Date<ms>,
- metadata: Any
-}
-```
-
-* `opts.digest` and `opts.hashAlgorithm` are obsolete for any API calls that used them.
-
-* Anywhere `opts.digest` was accepted, `opts.integrity` is now an option. Any valid SRI hash is accepted here -- multiple hash entries will be resolved according to the standard: first, the "strongest" hash algorithm will be picked, and then each of the entries for that algorithm will be matched against the content. Content will be validated if *any* of the entries match (so, a single integrity string can be used for multiple "versions" of the same document/data).
-
-* `put.byDigest()`, `put.stream.byDigest`, `get.byDigest()` and `get.stream.byDigest()` now expect an SRI instead of a `digest` + `opts.hashAlgorithm` pairing.
-
-* `get.hasContent()` now expects an integrity hash instead of a digest. If content exists, it will return the specific single integrity hash that was found in the cache.
-
-* `verify()` has learned to handle integrity-based caches, and forgotten how to handle old-style cache indices due to the format change.
-
-* `cacache.rm.content()` now expects an integrity hash instead of a hex digest.
-
-
-
-<a name="6.3.0"></a>
-# [6.3.0](https://github.com/zkat/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
-
-
-### Bug Fixes
-
-* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/zkat/cacache/commit/4670e9b))
-* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/zkat/cacache/commit/b9d2fa2))
-* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/zkat/cacache/commit/d8ac5aa))
-
-
-### Features
-
-* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/zkat/cacache/commit/dc6482d))
-
-
-
-<a name="6.2.0"></a>
-# [6.2.0](https://github.com/zkat/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
-
-
-### Bug Fixes
-
-* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/zkat/cacache/commit/f8e0f25))
-* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/zkat/cacache/commit/6818521))
-
-
-### Features
-
-* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/zkat/cacache/commit/c42da71))
-
-
-
-<a name="6.1.2"></a>
-## [6.1.2](https://github.com/zkat/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
-
-
-### Bug Fixes
-
-* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/zkat/cacache/commit/d6eb2f0))
-
-
-
-<a name="6.1.1"></a>
-## [6.1.1](https://github.com/zkat/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
-
-
-### Bug Fixes
-
-* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/zkat/cacache/commit/0b7faf6))
-* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/zkat/cacache/commit/0640bc4))
-
-
-
-<a name="6.1.0"></a>
-# [6.1.0](https://github.com/zkat/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
-
-
-### Bug Fixes
-
-* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/zkat/cacache/commit/ef4f70a))
-* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/zkat/cacache/commit/6ab8132))
-
-
-### Features
-
-* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/zkat/cacache/commit/d5d25ba)), closes [#66](https://github.com/zkat/cacache/issues/66)
-* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/zkat/cacache/commit/45ad77a))
-
-
-
-<a name="6.0.2"></a>
-## [6.0.2](https://github.com/zkat/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
-
-
-### Bug Fixes
-
-* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/zkat/cacache/commit/c3644e5))
-
-
-
-<a name="6.0.1"></a>
-## [6.0.1](https://github.com/zkat/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
-
-
-### Bug Fixes
-
-* **docs:** Missed spots in README ([8ffb7fa](https://github.com/zkat/cacache/commit/8ffb7fa))
-
-
-
-<a name="6.0.0"></a>
-# [6.0.0](https://github.com/zkat/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
-
-
-### Bug Fixes
-
-* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/zkat/cacache/commit/2f72d0a))
-* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/zkat/cacache/commit/fa8f3c3))
-* **deps:** removed `format-number[@2](https://github.com/2).0.2` ([1187791](https://github.com/zkat/cacache/commit/1187791))
-* **deps:** removed inflight[@1](https://github.com/1).0.6 ([0d1819c](https://github.com/zkat/cacache/commit/0d1819c))
-* **deps:** rimraf[@2](https://github.com/2).6.1 ([9efab6b](https://github.com/zkat/cacache/commit/9efab6b))
-* **deps:** standard[@9](https://github.com/9).0.0 ([4202cba](https://github.com/zkat/cacache/commit/4202cba))
-* **deps:** tap[@10](https://github.com/10).3.0 ([aa03088](https://github.com/zkat/cacache/commit/aa03088))
-* **deps:** weallcontribute[@1](https://github.com/1).0.8 ([ad4f4dc](https://github.com/zkat/cacache/commit/ad4f4dc))
-* **docs:** add security note to hashKey ([03f81ba](https://github.com/zkat/cacache/commit/03f81ba))
-* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/zkat/cacache/commit/ea00ba6))
-* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/zkat/cacache/commit/45997d8))
-* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/zkat/cacache/commit/fb8cb4d))
-* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/zkat/cacache/commit/bbc5fca))
-* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/zkat/cacache/commit/b1d3888))
-* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/zkat/cacache/commit/d26cdf9))
-* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/zkat/cacache/commit/79a8891))
-* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/zkat/cacache/commit/65f6632))
-* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/zkat/cacache/commit/daf9e08))
-* **put-stream:** use new promise API for moves ([1d36013](https://github.com/zkat/cacache/commit/1d36013))
-* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/zkat/cacache/commit/c60a2fa))
-* **verify:** tiny typo fix ([db22d05](https://github.com/zkat/cacache/commit/db22d05))
-
-
-### Features
-
-* **api:** converted external api ([7bf032f](https://github.com/zkat/cacache/commit/7bf032f))
-* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/zkat/cacache/commit/8d2c5b6))
-* **cache:** add versioning to content and index ([31bc549](https://github.com/zkat/cacache/commit/31bc549))
-* **content:** collate content files into subdirs ([c094d9f](https://github.com/zkat/cacache/commit/c094d9f))
-* **deps:** [@npmcorp](https://github.com/npmcorp)/move[@1](https://github.com/1).0.0 ([bdd00bf](https://github.com/zkat/cacache/commit/bdd00bf))
-* **deps:** bluebird[@3](https://github.com/3).4.7 ([3a17aff](https://github.com/zkat/cacache/commit/3a17aff))
-* **deps:** promise-inflight[@1](https://github.com/1).0.1 ([a004fe6](https://github.com/zkat/cacache/commit/a004fe6))
-* **get:** added memoization support for get ([c77d794](https://github.com/zkat/cacache/commit/c77d794))
-* **get:** export hasContent ([2956ec3](https://github.com/zkat/cacache/commit/2956ec3))
-* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/zkat/cacache/commit/b639746))
-* **index:** collate index files into subdirs ([e8402a5](https://github.com/zkat/cacache/commit/e8402a5))
-* **index:** promisify entry index ([cda3335](https://github.com/zkat/cacache/commit/cda3335))
-* **memo:** added memoization lib ([da07b92](https://github.com/zkat/cacache/commit/da07b92))
-* **memo:** export memoization api ([954b1b3](https://github.com/zkat/cacache/commit/954b1b3))
-* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/zkat/cacache/commit/5cf4616))
-* **perf:** bulk content write api ([51b536e](https://github.com/zkat/cacache/commit/51b536e))
-* **put:** added memoization support to put ([b613a70](https://github.com/zkat/cacache/commit/b613a70))
-* **read:** switched to promises ([a869362](https://github.com/zkat/cacache/commit/a869362))
-* **rm:** added memoization support to rm ([4205cf0](https://github.com/zkat/cacache/commit/4205cf0))
-* **rm:** switched to promises ([a000d24](https://github.com/zkat/cacache/commit/a000d24))
-* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/zkat/cacache/commit/9517cd7))
-* **util:** use promises for api ([ae204bb](https://github.com/zkat/cacache/commit/ae204bb))
-* **verify:** converted to Promises ([f0b3974](https://github.com/zkat/cacache/commit/f0b3974))
-
-
-### BREAKING CHANGES
-
-* cache: index/content directories are now versioned. Previous caches are no longer compatible and cannot be migrated.
-* util: fix-owner now uses Promises instead of callbacks
-* index: Previously-generated index entries are no longer compatible and the index must be regenerated.
-* index: The index format has changed and previous caches are no longer compatible. Existing caches will need to be regenerated.
-* hashes: Default hashAlgorithm changed from sha1 to sha512. If you
-rely on the prior setting, pass `opts.hashAlgorithm` in explicitly.
-* content: Previously-generated content directories are no longer compatible
-and must be regenerated.
-* verify: API is now promise-based
-* read: Switches to a Promise-based API and removes callback stuff
-* rm: Switches to a Promise-based API and removes callback stuff
-* index: this changes the API to work off promises instead of callbacks
-* api: this means we are going all in on promises now
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.es.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.es.md
deleted file mode 100644
index 783a0a19b0..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.es.md
+++ /dev/null
@@ -1,628 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
-
-[`cacache`](https://github.com/zkat/cacache) es una librería de Node.js para
-manejar caches locales en disco, con acceso tanto con claves únicas como
-direcciones de contenido (hashes/hacheos). Es súper rápida, excelente con el
-acceso concurrente, y jamás te dará datos incorrectos, aún si se corrompen o
-manipulan directamente los ficheros del cache.
-
-El propósito original era reemplazar el caché local de
-[npm](https://npm.im/npm), pero se puede usar por su propia cuenta.
-
-_Traducciones: [English](README.md)_
-
-## Instalación
-
-`$ npm install --save cacache`
-
-## Índice
-
-* [Ejemplo](#ejemplo)
-* [Características](#características)
-* [Cómo Contribuir](#cómo-contribuir)
-* [API](#api)
- * [Usando el API en español](#localized-api)
- * Leer
- * [`ls`](#ls)
- * [`ls.flujo`](#ls-stream)
- * [`saca`](#get-data)
- * [`saca.flujo`](#get-stream)
- * [`saca.info`](#get-info)
- * [`saca.tieneDatos`](#get-hasContent)
- * Escribir
- * [`mete`](#put-data)
- * [`mete.flujo`](#put-stream)
- * [opciones para `mete*`](#put-options)
- * [`rm.todo`](#rm-all)
- * [`rm.entrada`](#rm-entry)
- * [`rm.datos`](#rm-content)
- * Utilidades
- * [`ponLenguaje`](#set-locale)
- * [`limpiaMemoizado`](#clear-memoized)
- * [`tmp.hazdir`](#tmp-mkdir)
- * [`tmp.conTmp`](#with-tmp)
- * Integridad
- * [Subresource Integrity](#integrity)
- * [`verifica`](#verify)
- * [`verifica.ultimaVez`](#verify-last-run)
-
-### Ejemplo
-
-```javascript
-const cacache = require('cacache/es')
-const fs = require('fs')
-
-const tarbol = '/ruta/a/mi-tar.tgz'
-const rutaCache = '/tmp/my-toy-cache'
-const clave = 'mi-clave-única-1234'
-
-// ¡Añádelo al caché! Usa `rutaCache` como raíz del caché.
-cacache.mete(rutaCache, clave, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${rutaCache}.`)
-})
-
-const destino = '/tmp/mytar.tgz'
-
-// Copia el contenido del caché a otro fichero, pero esta vez con flujos.
-cacache.saca.flujo(
- rutaCache, clave
-).pipe(
- fs.createWriteStream(destino)
-).on('finish', () => {
- console.log('extracción completada')
-})
-
-// La misma cosa, pero accesando el contenido directamente, sin tocar el índice.
-cacache.saca.porHacheo(rutaCache, integridad).then(datos => {
- fs.writeFile(destino, datos, err => {
- console.log('datos del tarbol sacados basado en su sha512, y escrito a otro fichero')
- })
-})
-```
-
-### Características
-
-* Extracción por clave o por dirección de contenido (shasum, etc)
-* Usa el estándard de web, [Subresource Integrity](#integrity)
-* Compatible con multiples algoritmos - usa sha1, sha512, etc, en el mismo caché sin problema
-* Entradas con contenido idéntico comparten ficheros
-* Tolerancia de fallas (inmune a corrupción, ficheros parciales, carreras de proceso, etc)
-* Verificación completa de datos cuando (escribiendo y leyendo)
-* Concurrencia rápida, segura y "lockless"
-* Compatible con `stream`s (flujos)
-* Compatible con `Promise`s (promesas)
-* Bastante rápida -- acceso, incluyendo verificación, en microsegundos
-* Almacenaje de metadatos arbitrarios
-* Colección de basura y verificación adicional fuera de banda
-* Cobertura rigurosa de pruebas
-* Probablente hay un "Bloom filter" por ahí en algún lado. Eso le mola a la gente, ¿Verdad? 🤔
-
-### Cómo Contribuir
-
-El equipo de cacache felizmente acepta contribuciones de código y otras maneras de participación. ¡Hay muchas formas diferentes de contribuir! La [Guía de Colaboradores](CONTRIBUTING.md) (en inglés) tiene toda la información que necesitas para cualquier tipo de contribución: todo desde cómo reportar errores hasta cómo someter parches con nuevas características. Con todo y eso, no se preocupe por si lo que haces está exáctamente correcto: no hay ningún problema en hacer preguntas si algo no está claro, o no lo encuentras.
-
-El equipo de cacache tiene miembros hispanohablantes: es completamente aceptable crear `issues` y `pull requests` en español/castellano.
-
-Todos los participantes en este proyecto deben obedecer el [Código de Conducta](CODE_OF_CONDUCT.md) (en inglés), y en general actuar de forma amable y respetuosa mientras participan en esta comunidad.
-
-Por favor refiérase al [Historial de Cambios](CHANGELOG.md) (en inglés) para detalles sobre cambios importantes incluídos en cada versión.
-
-Finalmente, cacache tiene un sistema de localización de lenguaje. Si te interesa añadir lenguajes o mejorar los que existen, mira en el directorio `./locales` para comenzar.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Usando el API en español
-
-cacache incluye una traducción completa de su API al castellano, con las mismas
-características. Para usar el API como está documentado en este documento, usa
-`require('cacache/es')`
-
-cacache también tiene otros lenguajes: encuéntralos bajo `./locales`, y podrás
-usar el API en ese lenguaje con `require('cacache/<lenguaje>')`
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Enumera todas las entradas en el caché, dentro de un solo objeto. Cada entrada
-en el objeto tendrá como clave la clave única usada para el índice, el valor
-siendo un objeto de [`saca.info`](#get-info).
-
-##### Ejemplo
-
-```javascript
-cacache.ls(rutaCache).then(console.log)
-// Salida
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.flujo(cache) -> Readable`
-
-Enumera todas las entradas en el caché, emitiendo un objeto de
-[`saca.info`](#get-info) por cada evento de `data` en el flujo.
-
-##### Ejemplo
-
-```javascript
-cacache.ls.flujo(rutaCache).on('data', console.log)
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // unido con `rutaCache`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.saca(cache, clave, [ops]) -> Promise({data, metadata, integrity})`
-
-Devuelve un objeto con los datos, hacheo de integridad y metadatos identificados
-por la `clave`. La propiedad `data` de este objeto será una instancia de
-`Buffer` con los datos almacenados en el caché. to do with it! cacache just
-won't care.
-
-`integrity` es un `string` de [Subresource Integrity](#integrity). Dígase, un
-`string` que puede ser usado para verificar a la `data`, que tiene como formato
-`<algoritmo>-<hacheo-integridad-base64>`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-Una sub-función, `saca.porHacheo`, tiene casi el mismo comportamiento, excepto
-que busca entradas usando el hacheo de integridad, sin tocar el índice general.
-Esta versión *sólo* devuelve `data`, sin ningún objeto conteniéndola.
-
-##### Nota
-
-Esta función lee la entrada completa a la memoria antes de devolverla. Si estás
-almacenando datos Muy Grandes, es posible que [`saca.flujo`](#get-stream) sea
-una mejor solución.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca(rutaCache, 'my-thing').then(console.log)
-// Salida:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Busca por hacheo
-cache.saca.porHacheo(rutaCache, 'sha512-BaSe64HaSh').then(console.log)
-// Salida:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.saca.flujo(cache, clave, [ops]) -> Readable`
-
-Devuelve un [Readable
-Stream](https://nodejs.org/api/stream.html#stream_readable_streams) de los datos
-almacenados bajo `clave`.
-
-So no existe ninguna entrada identificada por `clave`, o se los datos
-almacenados localmente fallan verificación, el `Promise` fallará.
-
-`metadata` y `integrity` serán emitidos como eventos antes de que el flujo
-cierre.
-
-Una sub-función, `saca.flujo.porHacheo`, tiene casi el mismo comportamiento,
-excepto que busca entradas usando el hacheo de integridad, sin tocar el índice
-general. Esta versión no emite eventos de `metadata` o `integrity`.
-
-##### Ejemplo
-
-```javascript
-// Busca por clave
-cache.saca.flujo(
- rutaCache, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Salidas:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Busca por hacheo
-cache.saca.flujo.porHacheo(
- rutaCache, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.saca.info(cache, clave) -> Promise`
-
-Busca la `clave` en el índice del caché, devolviendo información sobre la
-entrada si existe.
-
-##### Campos
-
-* `key` - Clave de la entrada. Igual al argumento `clave`.
-* `integrity` - [hacheo de Subresource Integrity](#integrity) del contenido al que se refiere esta entrada.
-* `path` - Dirección del fichero de datos almacenados, relativa al argumento `cache`.
-* `time` - Hora de creación de la entrada
-* `metadata` - Metadatos asignados a esta entrada por el usuario
-
-##### Ejemplo
-
-```javascript
-cacache.saca.info(rutaCache, 'my-thing').then(console.log)
-
-// Salida
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.saca.tieneDatos(cache, integrity) -> Promise`
-
-Busca un [hacheo Subresource Integrity](#integrity) en el caché. Si existe el
-contenido asociado con `integrity`, devuelve un objeto con dos campos: el hacheo
-_específico_ que se usó para la búsqueda, `sri`, y el tamaño total del
-contenido, `size`. Si no existe ningún contenido asociado con `integrity`,
-devuelve `false`.
-
-##### Ejemplo
-
-```javascript
-cacache.saca.tieneDatos(rutaCache, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Salida
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.saca.tieneDatos(rutaCache, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Salida
-false
-```
-
-#### <a name="put-data"></a> `> cacache.mete(cache, clave, datos, [ops]) -> Promise`
-
-Inserta `datos` en el caché. El `Promise` devuelto se resuelve con un hacheo
-(generado conforme a [`ops.algorithms`](#optsalgorithms)) después que la entrada
-haya sido escrita en completo.
-
-##### Ejemplo
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(datos => {
- return cacache.mete(rutaCache, 'registry.npmjs.org|cacache@1.0.0', datos)
-}).then(integridad => {
- console.log('el hacheo de integridad es', integridad)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.mete.flujo(cache, clave, [ops]) -> Writable`
-
-Devuelve un [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) que inserta
-al caché los datos escritos a él. Emite un evento `integrity` con el hacheo del
-contenido escrito, cuando completa.
-
-##### Ejemplo
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.mete.flujo(
- rutaCache, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> opciones para cacache.mete`
-
-La funciones `cacache.mete` tienen un número de opciones en común.
-
-##### `ops.metadata`
-
-Metadatos del usuario que se almacenarán con la entrada.
-
-##### `ops.size`
-
-El tamaño declarado de los datos que se van a insertar. Si es proveído, cacache
-verificará que los datos escritos sean de ese tamaño, o si no, fallará con un
-error con código `EBADSIZE`.
-
-##### `ops.integrity`
-
-El hacheo de integridad de los datos siendo escritos.
-
-Si es proveído, y los datos escritos no le corresponden, la operación fallará
-con un error con código `EINTEGRITY`.
-
-`ops.algorithms` no tiene ningún efecto si esta opción está presente.
-
-##### `ops.algorithms`
-
-Por Defecto: `['sha512']`
-
-Algoritmos que se deben usar cuando se calcule el hacheo de [subresource
-integrity](#integrity) para los datos insertados. Puede usar cualquier algoritmo
-enumerado en `crypto.getHashes()`.
-
-Por el momento, sólo se acepta un algoritmo (dígase, un array con exáctamente un
-valor). No tiene ningún efecto si `ops.integrity` también ha sido proveido.
-
-##### `ops.uid`/`ops.gid`
-
-Si están presentes, cacache hará todo lo posible para asegurarse que todos los
-ficheros creados en el proceso de sus operaciones en el caché usen esta
-combinación en particular.
-
-##### `ops.memoize`
-
-Por Defecto: `null`
-
-Si es verdad, cacache tratará de memoizar los datos de la entrada en memoria. La
-próxima vez que el proceso corriente trate de accesar los datos o entrada,
-cacache buscará en memoria antes de buscar en disco.
-
-Si `ops.memoize` es un objeto regular o un objeto como `Map` (es decir, un
-objeto con métodos `get()` y `set()`), este objeto en sí sera usado en vez del
-caché de memoria global. Esto permite tener lógica específica a tu aplicación
-encuanto al almacenaje en memoria de tus datos.
-
-Si quieres asegurarte que los datos se lean del disco en vez de memoria, usa
-`memoize: false` cuando uses funciones de `cacache.saca`.
-
-#### <a name="rm-all"></a> `> cacache.rm.todo(cache) -> Promise`
-
-Borra el caché completo, incluyendo ficheros temporeros, ficheros de datos, y el
-índice del caché.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.todo(rutaCache).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entrada(cache, clave) -> Promise`
-
-Alias: `cacache.rm`
-
-Borra la entrada `clave` del índuce. El contenido asociado con esta entrada
-seguirá siendo accesible por hacheo usando
-[`saca.flujo.porHacheo`](#get-stream).
-
-Para borrar el contenido en sí, usa [`rm.datos`](#rm-content). Si quieres hacer
-esto de manera más segura (pues ficheros de contenido pueden ser usados por
-multiples entradas), usa [`verifica`](#verify) para borrar huérfanos.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.entrada(rutaCache, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.datos(cache, integrity) -> Promise`
-
-Borra el contenido identificado por `integrity`. Cualquier entrada que se
-refiera a este contenido quedarán huérfanas y se invalidarán si se tratan de
-accesar, al menos que contenido idéntico sea añadido bajo `integrity`.
-
-##### Ejemplo
-
-```javascript
-cacache.rm.datos(rutaCache, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('los datos para `mi-cosa` se borraron')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.ponLenguaje(locale)`
-
-Configura el lenguaje usado para mensajes y errores de cacache. La lista de
-lenguajes disponibles está en el directorio `./locales` del proyecto.
-
-_Te interesa añadir más lenguajes? [Somete un PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.limpiaMemoizado()`
-
-Completamente reinicializa el caché de memoria interno. Si estás usando tu
-propio objecto con `ops.memoize`, debes hacer esto de manera específica a él.
-
-#### <a name="tmp-mkdir"></a> `> tmp.hazdir(cache, ops) -> Promise<Path>`
-
-Alias: `tmp.mkdir`
-
-Devuelve un directorio único dentro del directorio `tmp` del caché.
-
-Una vez tengas el directorio, es responsabilidad tuya asegurarte que todos los
-ficheros escrito a él sean creados usando los permisos y `uid`/`gid` concordante
-con el caché. Si no, puedes pedirle a cacache que lo haga llamando a
-[`cacache.tmp.fix()`](#tmp-fix). Esta función arreglará todos los permisos en el
-directorio tmp.
-
-Si quieres que cacache limpie el directorio automáticamente cuando termines, usa
-[`cacache.tmp.conTmp()`](#with-tpm).
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.conTmp(cache, ops, cb) -> Promise`
-
-Crea un directorio temporero con [`tmp.mkdir()`](#tmp-mkdir) y ejecuta `cb` con
-él como primer argumento. El directorio creado será removido automáticamente
-cuando el valor devolvido por `cb()` se resuelva.
-
-Las mismas advertencias aplican en cuanto a manejando permisos para los ficheros
-dentro del directorio.
-
-##### Ejemplo
-
-```javascript
-cacache.tmp.conTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Hacheos de Subresource Integrity
-
-cacache usa strings que siguen la especificación de [Subresource Integrity
-spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-
-Es decir, donde quiera cacache espera un argumento o opción `integrity`, ese
-string debería usar el formato `<algoritmo>-<hacheo-base64>`.
-
-Una variación importante sobre los hacheos que cacache acepta es que acepta el
-nombre de cualquier algoritmo aceptado por el proceso de Node.js donde se usa.
-Puedes usar `crypto.getHashes()` para ver cuales están disponibles.
-
-##### Generando tus propios hacheos
-
-Si tienes un `shasum`, en general va a estar en formato de string hexadecimal
-(es decir, un `sha1` se vería como algo así:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`).
-
-Para ser compatible con cacache, necesitas convertir esto a su equivalente en
-subresource integrity. Por ejemplo, el hacheo correspondiente al ejemplo
-anterior sería: `sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-Puedes usar código así para generarlo por tu cuenta:
-
-```javascript
-const crypto = require('crypto')
-const algoritmo = 'sha512'
-const datos = 'foobarbaz'
-
-const integrity = (
- algorithm +
- '-' +
- crypto.createHash(algoritmo).update(datos).digest('base64')
-)
-```
-
-También puedes usar [`ssri`](https://npm.im/ssri) para deferir el trabajo a otra
-librería que garantiza que todo esté correcto, pues maneja probablemente todas
-las operaciones que tendrías que hacer con SRIs, incluyendo convirtiendo entre
-hexadecimal y el formato SRI.
-
-#### <a name="verify"></a> `> cacache.verifica(cache, ops) -> Promise`
-
-Examina y arregla tu caché:
-
-* Limpia entradas inválidas, huérfanas y corrompidas
-* Te deja filtrar cuales entradas retener, con tu propio filtro
-* Reclama cualquier ficheros de contenido sin referencias en el índice
-* Verifica integridad de todos los ficheros de contenido y remueve los malos
-* Arregla permisos del caché
-* Remieve el directorio `tmp` en el caché, y todo su contenido.
-
-Cuando termine, devuelve un objeto con varias estadísticas sobre el proceso de
-verificación, por ejemplo la cantidad de espacio de disco reclamado, el número
-de entradas válidas, número de entradas removidas, etc.
-
-##### Opciones
-
-* `ops.uid` - uid para asignarle al caché y su contenido
-* `ops.gid` - gid para asignarle al caché y su contenido
-* `ops.filter` - recibe una entrada como argumento. Devuelve falso para removerla. Nota: es posible que esta función sea invocada con la misma entrada más de una vez.
-
-##### Example
-
-```sh
-echo somegarbage >> $RUTACACHE/content/deadbeef
-```
-
-```javascript
-cacache.verifica(rutaCache).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verifica.ultimaVez(cache) -> Promise`
-
-Alias: `últimaVez`
-
-Devuelve un `Date` que representa la última vez que `cacache.verifica` fue
-ejecutada en `cache`.
-
-##### Example
-
-```javascript
-cacache.verifica(rutaCache).then(() => {
- cacache.verifica.ultimaVez(rutaCache).then(última => {
- console.log('La última vez que se usó cacache.verifica() fue ' + última)
- })
-})
-```
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.md
deleted file mode 100644
index 4b284588a6..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/README.md
+++ /dev/null
@@ -1,624 +0,0 @@
-# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
-
-[`cacache`](https://github.com/zkat/cacache) is a Node.js library for managing
-local key and content address caches. It's really fast, really good at
-concurrency, and it will never give you corrupted data, even if cache files
-get corrupted or manipulated.
-
-It was originally written to be used as [npm](https://npm.im)'s local cache, but
-can just as easily be used on its own.
-
-_Translations: [español](README.es.md)_
-
-## Install
-
-`$ npm install --save cacache`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [Using localized APIs](#localized-api)
- * Reading
- * [`ls`](#ls)
- * [`ls.stream`](#ls-stream)
- * [`get`](#get-data)
- * [`get.stream`](#get-stream)
- * [`get.info`](#get-info)
- * [`get.hasContent`](#get-hasContent)
- * Writing
- * [`put`](#put-data)
- * [`put.stream`](#put-stream)
- * [`put*` opts](#put-options)
- * [`rm.all`](#rm-all)
- * [`rm.entry`](#rm-entry)
- * [`rm.content`](#rm-content)
- * Utilities
- * [`setLocale`](#set-locale)
- * [`clearMemoized`](#clear-memoized)
- * [`tmp.mkdir`](#tmp-mkdir)
- * [`tmp.withTmp`](#with-tmp)
- * Integrity
- * [Subresource Integrity](#integrity)
- * [`verify`](#verify)
- * [`verify.lastRun`](#verify-last-run)
-
-### Example
-
-```javascript
-const cacache = require('cacache/en')
-const fs = require('fs')
-
-const tarball = '/path/to/mytar.tgz'
-const cachePath = '/tmp/my-toy-cache'
-const key = 'my-unique-key-1234'
-
-// Cache it! Use `cachePath` as the root of the content cache
-cacache.put(cachePath, key, '10293801983029384').then(integrity => {
- console.log(`Saved content to ${cachePath}.`)
-})
-
-const destination = '/tmp/mytar.tgz'
-
-// Copy the contents out of the cache and into their destination!
-// But this time, use stream instead!
-cacache.get.stream(
- cachePath, key
-).pipe(
- fs.createWriteStream(destination)
-).on('finish', () => {
- console.log('done extracting!')
-})
-
-// The same thing, but skip the key index.
-cacache.get.byDigest(cachePath, integrityHash).then(data => {
- fs.writeFile(destination, data, err => {
- console.log('tarball data fetched based on its sha512sum and written out!')
- })
-})
-```
-
-### Features
-
-* Extraction by key or by content address (shasum, etc)
-* [Subresource Integrity](#integrity) web standard support
-* Multi-hash support - safely host sha1, sha512, etc, in a single cache
-* Automatic content deduplication
-* Fault tolerance (immune to corruption, partial writes, process races, etc)
-* Consistency guarantees on read and write (full data verification)
-* Lockless, high-concurrency cache access
-* Streaming support
-* Promise support
-* Pretty darn fast -- sub-millisecond reads and writes including verification
-* Arbitrary metadata storage
-* Garbage collection and additional offline verification
-* Thorough test coverage
-* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
-
-### Contributing
-
-The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="localized-api"></a> Using localized APIs
-
-cacache includes a complete API in English, with the same features as other
-translations. To use the English API as documented in this README, use
-`require('cacache/en')`. This is also currently the default if you do
-`require('cacache')`, but may change in the future.
-
-cacache also supports other languages! You can find the list of currently
-supported ones by looking in `./locales` in the source directory. You can use
-the API in that language with `require('cacache/<lang>')`.
-
-Want to add support for a new language? Please go ahead! You should be able to
-copy `./locales/en.js` and `./locales/en.json` and fill them in. Translating the
-`README.md` is a bit more work, but also appreciated if you get around to it. 👍🏼
-
-#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
-
-Lists info for all entries currently in the cache as a single large object. Each
-entry in the object will be keyed by the unique index key, with corresponding
-[`get.info`](#get-info) objects as the values.
-
-##### Example
-
-```javascript
-cacache.ls(cachePath).then(console.log)
-// Output
-{
- 'my-thing': {
- key: 'my-thing',
- integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 4023948,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
- },
- 'other-thing': {
- key: 'other-thing',
- integrity: 'sha1-ANothER+hasH=',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 111112
- }
-}
-```
-
-#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
-
-Lists info for all entries currently in the cache as a single large object.
-
-This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
-returned as `'data'` events on the returned stream.
-
-##### Example
-
-```javascript
-cacache.ls.stream(cachePath).on('data', console.log)
-// Output
-{
- key: 'my-thing',
- integrity: 'sha512-BaSe64HaSh',
- path: '.testcache/content/deadbeef', // joined with `cachePath`
- time: 12345698490,
- size: 13423,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-
-{
- key: 'other-thing',
- integrity: 'whirlpool-WoWSoMuchSupport',
- path: '.testcache/content/bada55',
- time: 11992309289,
- size: 498023984029
-}
-
-{
- ...
-}
-```
-
-#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
-
-Returns an object with the cached data, digest, and metadata identified by
-`key`. The `data` property of this object will be a `Buffer` instance that
-presumably holds some data that means something to you. I'm sure you know what
-to do with it! cacache just won't care.
-
-`integrity` is a [Subresource
-Integrity](#integrity)
-string. That is, a string that can be used to verify `data`, which looks like
-`<hash-algorithm>-<base64-integrity-hash>`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, the promise will be rejected.
-
-A sub-function, `get.byDigest` may be used for identical behavior, except lookup
-will happen by integrity hash, bypassing the index entirely. This version of the
-function *only* returns `data` itself, without any wrapper.
-
-##### Note
-
-This function loads the entire cache entry into memory before returning it. If
-you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
-instead.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get(cachePath, 'my-thing').then(console.log)
-// Output:
-{
- metadata: {
- thingName: 'my'
- },
- integrity: 'sha512-BaSe64HaSh',
- data: Buffer#<deadbeef>,
- size: 9320
-}
-
-// Look up by digest
-cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
-// Output:
-Buffer#<deadbeef>
-```
-
-#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
-
-Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
-
-If there is no content identified by `key`, or if the locally-stored data does
-not pass the validity checksum, an error will be emitted.
-
-`metadata` and `integrity` events will be emitted before the stream closes, if
-you need to collect that extra data about the cached entry.
-
-A sub-function, `get.stream.byDigest` may be used for identical behavior,
-except lookup will happen by integrity hash, bypassing the index entirely. This
-version does not emit the `metadata` and `integrity` events at all.
-
-##### Example
-
-```javascript
-// Look up by key
-cache.get.stream(
- cachePath, 'my-thing'
-).on('metadata', metadata => {
- console.log('metadata:', metadata)
-}).on('integrity', integrity => {
- console.log('integrity:', integrity)
-}).pipe(
- fs.createWriteStream('./x.tgz')
-)
-// Outputs:
-metadata: { ... }
-integrity: 'sha512-SoMeDIGest+64=='
-
-// Look up by digest
-cache.get.stream.byDigest(
- cachePath, 'sha512-SoMeDIGest+64=='
-).pipe(
- fs.createWriteStream('./x.tgz')
-)
-```
-
-#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
-
-Looks up `key` in the cache index, returning information about the entry if
-one exists.
-
-##### Fields
-
-* `key` - Key the entry was looked up under. Matches the `key` argument.
-* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
-* `path` - Filesystem path relative to `cache` argument where content is stored.
-* `time` - Timestamp the entry was first added on.
-* `metadata` - User-assigned metadata associated with the entry/content.
-
-##### Example
-
-```javascript
-cacache.get.info(cachePath, 'my-thing').then(console.log)
-
-// Output
-{
- key: 'my-thing',
- integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
- path: '.testcache/content/deadbeef',
- time: 12345698490,
- size: 849234,
- metadata: {
- name: 'blah',
- version: '1.2.3',
- description: 'this was once a package but now it is my-thing'
- }
-}
-```
-
-#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
-
-Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
-exists for this `integrity`, it will return an object, with the specific single integrity hash
-that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
-
-##### Example
-
-```javascript
-cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
-
-// Output
-{
- sri: {
- source: 'sha256-MUSTVERIFY+ALL/THINGS==',
- algorithm: 'sha256',
- digest: 'MUSTVERIFY+ALL/THINGS==',
- options: []
- },
- size: 9001
-}
-
-cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
-
-// Output
-false
-```
-
-#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
-
-Inserts data passed to it into the cache. The returned Promise resolves with a
-digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
-cache entry has been successfully written.
-
-##### Example
-
-```javascript
-fetch(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).then(data => {
- return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
-}).then(integrity => {
- console.log('integrity hash is', integrity)
-})
-```
-
-#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
-
-Returns a [Writable
-Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
-data written to it into the cache. Emits an `integrity` event with the digest of
-written contents when it succeeds.
-
-##### Example
-
-```javascript
-request.get(
- 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
-).pipe(
- cacache.put.stream(
- cachePath, 'registry.npmjs.org|cacache@1.0.0'
- ).on('integrity', d => console.log(`integrity digest is ${d}`))
-)
-```
-
-#### <a name="put-options"></a> `> cacache.put options`
-
-`cacache.put` functions have a number of options in common.
-
-##### `opts.metadata`
-
-Arbitrary metadata to be attached to the inserted key.
-
-##### `opts.size`
-
-If provided, the data stream will be verified to check that enough data was
-passed through. If there's more or less data than expected, insertion will fail
-with an `EBADSIZE` error.
-
-##### `opts.integrity`
-
-If present, the pre-calculated digest for the inserted content. If this option
-if provided and does not match the post-insertion digest, insertion will fail
-with an `EINTEGRITY` error.
-
-`algorithms` has no effect if this option is present.
-
-##### `opts.algorithms`
-
-Default: ['sha512']
-
-Hashing algorithms to use when calculating the [subresource integrity
-digest](#integrity)
-for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
-`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
-may also use any anagram of `'modnar'` to use this feature.
-
-Currently only supports one algorithm at a time (i.e., an array length of
-exactly `1`). Has no effect if `opts.integrity` is present.
-
-##### `opts.uid`/`opts.gid`
-
-If provided, cacache will do its best to make sure any new files added to the
-cache use this particular `uid`/`gid` combination. This can be used,
-for example, to drop permissions when someone uses `sudo`, but cacache makes
-no assumptions about your needs here.
-
-##### `opts.memoize`
-
-Default: null
-
-If provided, cacache will memoize the given cache insertion in memory, bypassing
-any filesystem checks for that key or digest in future cache fetches. Nothing
-will be written to the in-memory cache unless this option is explicitly truthy.
-
-If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
-and `set` methods), it will be written to instead of the global memoization
-cache.
-
-Reading from disk data can be forced by explicitly passing `memoize: false` to
-the reader functions, but their default will be to read from memory.
-
-#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
-
-Clears the entire cache. Mainly by blowing away the cache directory itself.
-
-##### Example
-
-```javascript
-cacache.rm.all(cachePath).then(() => {
- console.log('THE APOCALYPSE IS UPON US 😱')
-})
-```
-
-#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise`
-
-Alias: `cacache.rm`
-
-Removes the index entry for `key`. Content will still be accessible if
-requested directly by content address ([`get.stream.byDigest`](#get-stream)).
-
-To remove the content itself (which might still be used by other entries), use
-[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
-[`verify`](#verify).
-
-##### Example
-
-```javascript
-cacache.rm.entry(cachePath, 'my-thing').then(() => {
- console.log('I did not like it anyway')
-})
-```
-
-#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
-
-Removes the content identified by `integrity`. Any index entries referring to it
-will not be usable again until the content is re-added to the cache with an
-identical digest.
-
-##### Example
-
-```javascript
-cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
- console.log('data for my-thing is gone!')
-})
-```
-
-#### <a name="set-locale"></a> `> cacache.setLocale(locale)`
-
-Configure the language/locale used for messages and errors coming from cacache.
-The list of available locales is in the `./locales` directory in the project
-root.
-
-_Interested in contributing more languages! [Submit a PR](CONTRIBUTING.md)!_
-
-#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
-
-Completely resets the in-memory entry cache.
-
-#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
-
-Returns a unique temporary directory inside the cache's `tmp` dir. This
-directory will use the same safe user assignment that all the other stuff use.
-
-Once the directory is made, it's the user's responsibility that all files within
-are made according to the same `opts.gid`/`opts.uid` settings that would be
-passed in. If not, you can ask cacache to do it for you by calling
-[`tmp.fix()`](#tmp-fix), which will fix all tmp directory permissions.
-
-If you want automatic cleanup of this directory, use
-[`tmp.withTmp()`](#with-tpm)
-
-##### Example
-
-```javascript
-cacache.tmp.mkdir(cache).then(dir => {
- fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-})
-```
-
-#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
-
-Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
-with it. The created temporary directory will be removed when the return value
-of `cb()` resolves -- that is, if you return a Promise from `cb()`, the tmp
-directory will be automatically deleted once that promise completes.
-
-The same caveats apply when it comes to managing permissions for the tmp dir's
-contents.
-
-##### Example
-
-```javascript
-cacache.tmp.withTmp(cache, dir => {
- return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
-}).then(() => {
- // `dir` no longer exists
-})
-```
-
-#### <a name="integrity"></a> Subresource Integrity Digests
-
-For content verification and addressing, cacache uses strings following the
-[Subresource
-Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
-That is, any time cacache expects an `integrity` argument or option, it
-should be in the format `<hashAlgorithm>-<base64-hash>`.
-
-One deviation from the current spec is that cacache will support any hash
-algorithms supported by the underlying Node.js process. You can use
-`crypto.getHashes()` to see which ones you can use.
-
-##### Generating Digests Yourself
-
-If you have an existing content shasum, they are generally formatted as a
-hexadecimal string (that is, a sha1 would look like:
-`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
-cacache, you'll need to convert this to an equivalent subresource integrity
-string. For this example, the corresponding hash would be:
-`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
-
-If you want to generate an integrity string yourself for existing data, you can
-use something like this:
-
-```javascript
-const crypto = require('crypto')
-const hashAlgorithm = 'sha512'
-const data = 'foobarbaz'
-
-const integrity = (
- hashAlgorithm +
- '-' +
- crypto.createHash(hashAlgorithm).update(data).digest('base64')
-)
-```
-
-You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
-around SRI strings, including generation, parsing, and translating from existing
-hex-formatted strings.
-
-#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
-
-Checks out and fixes up your cache:
-
-* Cleans up corrupted or invalid index entries.
-* Custom entry filtering options.
-* Garbage collects any content entries not referenced by the index.
-* Checks integrity for all content entries and removes invalid content.
-* Fixes cache ownership.
-* Removes the `tmp` directory in the cache and all its contents.
-
-When it's done, it'll return an object with various stats about the verification
-process, including amount of storage reclaimed, number of valid entries, number
-of entries removed, etc.
-
-##### Options
-
-* `opts.uid` - uid to assign to cache and its contents
-* `opts.gid` - gid to assign to cache and its contents
-* `opts.filter` - receives a formatted entry. Return false to remove it.
- Note: might be called more than once on the same entry.
-
-##### Example
-
-```sh
-echo somegarbage >> $CACHEPATH/content/deadbeef
-```
-
-```javascript
-cacache.verify(cachePath).then(stats => {
- // deadbeef collected, because of invalid checksum.
- console.log('cache is much nicer now! stats:', stats)
-})
-```
-
-#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
-
-Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
-
-##### Example
-
-```javascript
-cacache.verify(cachePath).then(() => {
- cacache.verify.lastRun(cachePath).then(lastTime => {
- console.log('cacache.verify was last called on' + lastTime)
- })
-})
-```
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/en.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/en.js
deleted file mode 100644
index a3db581c9f..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/en.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/es.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/es.js
deleted file mode 100644
index 6282363c3b..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/es.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/es.js')
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/get.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/get.js
deleted file mode 100644
index 2bb3afa528..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/get.js
+++ /dev/null
@@ -1,190 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fs = require('fs')
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const pipe = require('mississippi').pipe
-const pipeline = require('mississippi').pipeline
-const read = require('./lib/content/read')
-const through = require('mississippi').through
-
-module.exports = function get (cache, key, opts) {
- return getData(false, cache, key, opts)
-}
-module.exports.byDigest = function getByDigest (cache, digest, opts) {
- return getData(true, cache, digest, opts)
-}
-function getData (byDigest, cache, key, opts) {
- opts = opts || {}
- const memoized = (
- byDigest
- ? memo.get.byDigest(cache, key, opts)
- : memo.get(cache, key, opts)
- )
- if (memoized && opts.memoize !== false) {
- return BB.resolve(byDigest ? memoized : {
- metadata: memoized.entry.metadata,
- data: memoized.data,
- integrity: memoized.entry.integrity,
- size: memoized.entry.size
- })
- }
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read(cache, byDigest ? key : entry.integrity, {
- integrity: opts.integrity,
- size: opts.size
- }).then(data => byDigest ? data : {
- metadata: entry.metadata,
- data: data,
- size: entry.size,
- integrity: entry.integrity
- }).then(res => {
- if (opts.memoize && byDigest) {
- memo.put.byDigest(cache, key, res, opts)
- } else if (opts.memoize) {
- memo.put(cache, entry, res.data, opts)
- }
- return res
- })
- })
-}
-
-module.exports.stream = getStream
-function getStream (cache, key, opts) {
- opts = opts || {}
- let stream = through()
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(memoized.entry.metadata)
- ev === 'integrity' && cb(memoized.entry.integrity)
- ev === 'size' && cb(memoized.entry.size)
- })
- stream.write(memoized.data, () => stream.end())
- return stream
- }
- index.find(cache, key).then(entry => {
- if (!entry) {
- return stream.emit(
- 'error', new index.NotFoundError(cache, key)
- )
- }
- let memoStream
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put(cache, entry, Buffer.concat(memoData, memoLength), opts)
- cb()
- })
- } else {
- memoStream = through()
- }
- opts.size = opts.size == null ? entry.size : opts.size
- stream.emit('metadata', entry.metadata)
- stream.emit('integrity', entry.integrity)
- stream.emit('size', entry.size)
- stream.on('newListener', function (ev, cb) {
- ev === 'metadata' && cb(entry.metadata)
- ev === 'integrity' && cb(entry.integrity)
- ev === 'size' && cb(entry.size)
- })
- pipe(
- read.readStream(cache, entry.integrity, opts),
- memoStream,
- stream
- )
- }, err => stream.emit('error', err))
- return stream
-}
-
-module.exports.stream.byDigest = getStreamDigest
-function getStreamDigest (cache, integrity, opts) {
- opts = opts || {}
- const memoized = memo.get.byDigest(cache, integrity, opts)
- if (memoized && opts.memoize !== false) {
- const stream = through()
- stream.write(memoized, () => stream.end())
- return stream
- } else {
- let stream = read.readStream(cache, integrity, opts)
- if (opts.memoize) {
- let memoData = []
- let memoLength = 0
- const memoStream = through((c, en, cb) => {
- memoData && memoData.push(c)
- memoLength += c.length
- cb(null, c, en)
- }, cb => {
- memoData && memo.put.byDigest(
- cache,
- integrity,
- Buffer.concat(memoData, memoLength),
- opts
- )
- cb()
- })
- stream = pipeline(stream, memoStream)
- }
- return stream
- }
-}
-
-module.exports.info = info
-function info (cache, key, opts) {
- opts = opts || {}
- const memoized = memo.get(cache, key, opts)
- if (memoized && opts.memoize !== false) {
- return BB.resolve(memoized.entry)
- } else {
- return index.find(cache, key)
- }
-}
-
-module.exports.hasContent = read.hasContent
-
-module.exports.copy = function cp (cache, key, dest, opts) {
- return copy(false, cache, key, dest, opts)
-}
-module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
- return copy(true, cache, digest, dest, opts)
-}
-function copy (byDigest, cache, key, dest, opts) {
- opts = opts || {}
- if (read.copy) {
- return (
- byDigest ? BB.resolve(null) : index.find(cache, key, opts)
- ).then(entry => {
- if (!entry && !byDigest) {
- throw new index.NotFoundError(cache, key)
- }
- return read.copy(
- cache, byDigest ? key : entry.integrity, dest, opts
- ).then(() => byDigest ? key : {
- metadata: entry.metadata,
- size: entry.size,
- integrity: entry.integrity
- })
- })
- } else {
- return getData(byDigest, cache, key, opts).then(res => {
- return fs.writeFileAsync(dest, byDigest ? res : res.data)
- .then(() => byDigest ? key : {
- metadata: res.metadata,
- size: res.size,
- integrity: res.integrity
- })
- })
- }
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/index.js
deleted file mode 100644
index a3db581c9f..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/index.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./locales/en.js')
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index fa6491ba6f..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-function contentPath (cache, integrity) {
- const sri = ssri.parse(integrity, {single: true})
- // contentPath is the *strongest* algo given
- return path.join.apply(path, [
- contentDir(cache),
- sri.algorithm
- ].concat(hashToSegments(sri.hexDigest())))
-}
-
-module.exports._contentDir = contentDir
-function contentDir (cache) {
- return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 7a4da3beb8..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,125 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const fs = require('graceful-fs')
-const PassThrough = require('stream').PassThrough
-const pipe = BB.promisify(require('mississippi').pipe)
-const ssri = require('ssri')
-const Y = require('../util/y.js')
-
-BB.promisifyAll(fs)
-
-module.exports = read
-function read (cache, integrity, opts) {
- opts = opts || {}
- return pickContentSri(cache, integrity).then(content => {
- const sri = content.sri
- const cpath = contentPath(cache, sri)
- return fs.readFileAsync(cpath, null).then(data => {
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
- })
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-function readStream (cache, integrity, opts) {
- opts = opts || {}
- const stream = new PassThrough()
- pickContentSri(
- cache, integrity
- ).then(content => {
- const sri = content.sri
- return pipe(
- fs.createReadStream(contentPath(cache, sri)),
- ssri.integrityStream({
- integrity: sri,
- size: opts.size
- }),
- stream
- )
- }).catch(err => {
- stream.emit('error', err)
- })
- return stream
-}
-
-if (fs.copyFile) {
- module.exports.copy = copy
-}
-function copy (cache, integrity, dest, opts) {
- opts = opts || {}
- return pickContentSri(cache, integrity).then(content => {
- const sri = content.sri
- const cpath = contentPath(cache, sri)
- return fs.copyFileAsync(cpath, dest).then(() => content.size)
- })
-}
-
-module.exports.hasContent = hasContent
-function hasContent (cache, integrity) {
- if (!integrity) { return BB.resolve(false) }
- return pickContentSri(cache, integrity)
- .catch({code: 'ENOENT'}, () => false)
- .catch({code: 'EPERM'}, err => {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
- }).then(content => {
- if (!content.sri) return false
- return ({ sri: content.sri, size: content.stat.size })
- })
-}
-
-module.exports._pickContentSri = pickContentSri
-function pickContentSri (cache, integrity) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fs.lstatAsync(cpath).then(stat => ({ sri: digests[0], stat }))
- } else {
- return BB.any(sri[sri.pickAlgorithm()].map(meta => {
- return pickContentSri(cache, meta)
- }))
- .catch(err => {
- if ([].some.call(err, e => e.code === 'ENOENT')) {
- throw Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- {code: 'ENOENT'}
- )
- } else {
- throw err[0]
- }
- })
- }
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index 12cf158235..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,21 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const hasContent = require('./read').hasContent
-const rimraf = BB.promisify(require('rimraf'))
-
-module.exports = rm
-function rm (cache, integrity) {
- return hasContent(cache, integrity).then(content => {
- if (content) {
- const sri = content.sri
- if (sri) {
- return rimraf(contentPath(cache, sri)).then(() => true)
- }
- } else {
- return false
- }
- })
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index a79ae92902..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,162 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./path')
-const fixOwner = require('../util/fix-owner')
-const fs = require('graceful-fs')
-const moveFile = require('../util/move-file')
-const PassThrough = require('stream').PassThrough
-const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-const to = require('mississippi').to
-const uniqueFilename = require('unique-filename')
-const Y = require('../util/y.js')
-
-const writeFileAsync = BB.promisify(fs.writeFile)
-
-module.exports = write
-function write (cache, data, opts) {
- opts = opts || {}
- if (opts.algorithms && opts.algorithms.length > 1) {
- throw new Error(
- Y`opts.algorithms only supports a single algorithm for now`
- )
- }
- if (typeof opts.size === 'number' && data.length !== opts.size) {
- return BB.reject(sizeError(opts.size, data.length))
- }
- const sri = ssri.fromData(data, opts)
- if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
- return BB.reject(checksumError(opts.integrity, sri))
- }
- return BB.using(makeTmp(cache, opts), tmp => (
- writeFileAsync(
- tmp.target, data, {flag: 'wx'}
- ).then(() => (
- moveToDestination(tmp, cache, sri, opts)
- ))
- )).then(() => ({integrity: sri, size: data.length}))
-}
-
-module.exports.stream = writeStream
-function writeStream (cache, opts) {
- opts = opts || {}
- const inputStream = new PassThrough()
- let inputErr = false
- function errCheck () {
- if (inputErr) { throw inputErr }
- }
-
- let allDone
- const ret = to((c, n, cb) => {
- if (!allDone) {
- allDone = handleContent(inputStream, cache, opts, errCheck)
- }
- inputStream.write(c, n, cb)
- }, cb => {
- inputStream.end(() => {
- if (!allDone) {
- const e = new Error(Y`Cache input stream was empty`)
- e.code = 'ENODATA'
- return ret.emit('error', e)
- }
- allDone.then(res => {
- res.integrity && ret.emit('integrity', res.integrity)
- res.size !== null && ret.emit('size', res.size)
- cb()
- }, e => {
- ret.emit('error', e)
- })
- })
- })
- ret.once('error', e => {
- inputErr = e
- })
- return ret
-}
-
-function handleContent (inputStream, cache, opts, errCheck) {
- return BB.using(makeTmp(cache, opts), tmp => {
- errCheck()
- return pipeToTmp(
- inputStream, cache, tmp.target, opts, errCheck
- ).then(res => {
- return moveToDestination(
- tmp, cache, res.integrity, opts, errCheck
- ).then(() => res)
- })
- })
-}
-
-function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
- return BB.resolve().then(() => {
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size
- }).on('integrity', s => {
- integrity = s
- }).on('size', s => {
- size = s
- })
- const outStream = fs.createWriteStream(tmpTarget, {
- flags: 'wx'
- })
- errCheck()
- return pipe(inputStream, hashStream, outStream).then(() => {
- return {integrity, size}
- }, err => {
- return rimraf(tmpTarget).then(() => { throw err })
- })
- })
-}
-
-function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(
- path.dirname(tmpTarget), opts.uid, opts.gid
- ).then(() => ({
- target: tmpTarget,
- moved: false
- })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
-}
-
-function moveToDestination (tmp, cache, sri, opts, errCheck) {
- errCheck && errCheck()
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
-
- return fixOwner.mkdirfix(
- destDir, opts.uid, opts.gid
- ).then(() => {
- errCheck && errCheck()
- return moveFile(tmp.target, destination)
- }).then(() => {
- errCheck && errCheck()
- tmp.moved = true
- return fixOwner.chownr(destination, opts.uid, opts.gid)
- })
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- var err = new Error(Y`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index fe1cd06457..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,225 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const crypto = require('crypto')
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const hashToSegments = require('./util/hash-to-segments')
-const ms = require('mississippi')
-const path = require('path')
-const ssri = require('ssri')
-const Y = require('./util/y.js')
-
-const indexV = require('../package.json')['cache-version'].index
-
-const appendFileAsync = BB.promisify(fs.appendFile)
-const readFileAsync = BB.promisify(fs.readFile)
-const readdirAsync = BB.promisify(fs.readdir)
-const concat = ms.concat
-const from = ms.from
-
-module.exports.NotFoundError = class NotFoundError extends Error {
- constructor (cache, key) {
- super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
- this.code = 'ENOENT'
- this.cache = cache
- this.key = key
- }
-}
-
-module.exports.insert = insert
-function insert (cache, key, integrity, opts) {
- opts = opts || {}
- const bucket = bucketPath(cache, key)
- const entry = {
- key,
- integrity: integrity && ssri.stringify(integrity),
- time: Date.now(),
- size: opts.size,
- metadata: opts.metadata
- }
- return fixOwner.mkdirfix(
- path.dirname(bucket), opts.uid, opts.gid
- ).then(() => {
- const stringified = JSON.stringify(entry)
- // NOTE - Cleverness ahoy!
- //
- // This works because it's tremendously unlikely for an entry to corrupt
- // another while still preserving the string length of the JSON in
- // question. So, we just slap the length in there and verify it on read.
- //
- // Thanks to @isaacs for the whiteboarding session that ended up with this.
- return appendFileAsync(
- bucket, `\n${hashEntry(stringified)}\t${stringified}`
- )
- }).then(
- () => fixOwner.chownr(bucket, opts.uid, opts.gid)
- ).catch({code: 'ENOENT'}, () => {
- // There's a class of race conditions that happen when things get deleted
- // during fixOwner, or between the two mkdirfix/chownr calls.
- //
- // It's perfectly fine to just not bother in those cases and lie
- // that the index entry was written. Because it's a cache.
- }).then(() => {
- return formatEntry(cache, entry)
- })
-}
-
-module.exports.find = find
-function find (cache, key) {
- const bucket = bucketPath(cache, key)
- return bucketEntries(cache, bucket).then(entries => {
- return entries.reduce((latest, next) => {
- if (next && next.key === key) {
- return formatEntry(cache, next)
- } else {
- return latest
- }
- }, null)
- }).catch(err => {
- if (err.code === 'ENOENT') {
- return null
- } else {
- throw err
- }
- })
-}
-
-module.exports.delete = del
-function del (cache, key, opts) {
- return insert(cache, key, null, opts)
-}
-
-module.exports.lsStream = lsStream
-function lsStream (cache) {
- const indexDir = bucketDir(cache)
- const stream = from.obj()
-
- // "/cachename/*"
- readdirOrEmpty(indexDir).map(bucket => {
- const bucketPath = path.join(indexDir, bucket)
-
- // "/cachename/<bucket 0xFF>/*"
- return readdirOrEmpty(bucketPath).map(subbucket => {
- const subbucketPath = path.join(bucketPath, subbucket)
-
- // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
- return readdirOrEmpty(subbucketPath).map(entry => {
- const getKeyToEntry = bucketEntries(
- cache,
- path.join(subbucketPath, entry)
- ).reduce((acc, entry) => {
- acc.set(entry.key, entry)
- return acc
- }, new Map())
-
- return getKeyToEntry.then(reduced => {
- for (let entry of reduced.values()) {
- const formatted = formatEntry(cache, entry)
- formatted && stream.push(formatted)
- }
- }).catch({code: 'ENOENT'}, nop)
- })
- })
- }).then(() => {
- stream.push(null)
- }, err => {
- stream.emit('error', err)
- })
-
- return stream
-}
-
-module.exports.ls = ls
-function ls (cache) {
- return BB.fromNode(cb => {
- lsStream(cache).on('error', cb).pipe(concat(entries => {
- cb(null, entries.reduce((acc, xs) => {
- acc[xs.key] = xs
- return acc
- }, {}))
- }))
- })
-}
-
-function bucketEntries (cache, bucket, filter) {
- return readFileAsync(
- bucket, 'utf8'
- ).then(data => {
- let entries = []
- data.split('\n').forEach(entry => {
- if (!entry) { return }
- const pieces = entry.split('\t')
- if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
- // Hash is no good! Corruption or malice? Doesn't matter!
- // EJECT EJECT
- return
- }
- let obj
- try {
- obj = JSON.parse(pieces[1])
- } catch (e) {
- // Entry is corrupted!
- return
- }
- if (obj) {
- entries.push(obj)
- }
- })
- return entries
- })
-}
-
-module.exports._bucketDir = bucketDir
-function bucketDir (cache) {
- return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports._bucketPath = bucketPath
-function bucketPath (cache, key) {
- const hashed = hashKey(key)
- return path.join.apply(path, [bucketDir(cache)].concat(
- hashToSegments(hashed)
- ))
-}
-
-module.exports._hashKey = hashKey
-function hashKey (key) {
- return hash(key, 'sha256')
-}
-
-module.exports._hashEntry = hashEntry
-function hashEntry (str) {
- return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
- return crypto
- .createHash(digest)
- .update(str)
- .digest('hex')
-}
-
-function formatEntry (cache, entry) {
- // Treat null digests as deletions. They'll shadow any previous entries.
- if (!entry.integrity) { return null }
- return {
- key: entry.key,
- integrity: entry.integrity,
- path: contentPath(cache, entry.integrity),
- size: entry.size,
- time: entry.time,
- metadata: entry.metadata
- }
-}
-
-function readdirOrEmpty (dir) {
- return readdirAsync(dir)
- .catch({code: 'ENOENT'}, () => [])
- .catch({code: 'ENOTDIR'}, () => [])
-}
-
-function nop () {
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 92179c7ac6..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,69 +0,0 @@
-'use strict'
-
-const LRU = require('lru-cache')
-
-const MAX_SIZE = 50 * 1024 * 1024 // 50MB
-const MAX_AGE = 3 * 60 * 1000
-
-let MEMOIZED = new LRU({
- max: MAX_SIZE,
- maxAge: MAX_AGE,
- length: (entry, key) => {
- if (key.startsWith('key:')) {
- return entry.data.length
- } else if (key.startsWith('digest:')) {
- return entry.length
- }
- }
-})
-
-module.exports.clearMemoized = clearMemoized
-function clearMemoized () {
- const old = {}
- MEMOIZED.forEach((v, k) => {
- old[k] = v
- })
- MEMOIZED.reset()
- return old
-}
-
-module.exports.put = put
-function put (cache, entry, data, opts) {
- pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
- putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-function putDigest (cache, integrity, data, opts) {
- pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-function get (cache, key, opts) {
- return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-function getDigest (cache, integrity, opts) {
- return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
- constructor (obj) {
- this.obj = obj
- }
- get (key) { return this.obj[key] }
- set (key, val) { this.obj[key] = val }
-}
-
-function pickMem (opts) {
- if (!opts || !opts.memoize) {
- return MEMOIZED
- } else if (opts.memoize.get && opts.memoize.set) {
- return opts.memoize
- } else if (typeof opts.memoize === 'object') {
- return new ObjProxy(opts.memoize)
- } else {
- return MEMOIZED
- }
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/fix-owner.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/fix-owner.js
deleted file mode 100644
index 7000bff048..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/fix-owner.js
+++ /dev/null
@@ -1,44 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const chownr = BB.promisify(require('chownr'))
-const mkdirp = BB.promisify(require('mkdirp'))
-const inflight = require('promise-inflight')
-
-module.exports.chownr = fixOwner
-function fixOwner (filepath, uid, gid) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return BB.resolve()
- }
- if (typeof uid !== 'number' && typeof gid !== 'number') {
- // There's no permissions override. Nothing to do here.
- return BB.resolve()
- }
- if ((typeof uid === 'number' && process.getuid() === uid) &&
- (typeof gid === 'number' && process.getgid() === gid)) {
- // No need to override if it's already what we used.
- return BB.resolve()
- }
- return inflight(
- 'fixOwner: fixing ownership on ' + filepath,
- () => chownr(
- filepath,
- typeof uid === 'number' ? uid : process.getuid(),
- typeof gid === 'number' ? gid : process.getgid()
- ).catch({code: 'ENOENT'}, () => null)
- )
-}
-
-module.exports.mkdirfix = mkdirfix
-function mkdirfix (p, uid, gid, cb) {
- return mkdirp(p).then(made => {
- if (made) {
- return fixOwner(made, uid, gid).then(() => made)
- }
- }).catch({code: 'EEXIST'}, () => {
- // There's a race in mkdirp!
- return fixOwner(p, uid, gid).then(() => null)
- })
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 192be2a6d6..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,11 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
- return [
- hash.slice(0, 2),
- hash.slice(2, 4),
- hash.slice(4)
- ]
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/move-file.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/move-file.js
deleted file mode 100644
index b43744b3da..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/move-file.js
+++ /dev/null
@@ -1,51 +0,0 @@
-'use strict'
-
-const fs = require('graceful-fs')
-const BB = require('bluebird')
-const chmod = BB.promisify(fs.chmod)
-const unlink = BB.promisify(fs.unlink)
-let move
-let pinflight
-
-module.exports = moveFile
-function moveFile (src, dest) {
- // This isn't quite an fs.rename -- the assumption is that
- // if `dest` already exists, and we get certain errors while
- // trying to move it, we should just not bother.
- //
- // In the case of cache corruption, users will receive an
- // EINTEGRITY error elsewhere, and can remove the offending
- // content their own way.
- //
- // Note that, as the name suggests, this strictly only supports file moves.
- return BB.fromNode(cb => {
- fs.link(src, dest, err => {
- if (err) {
- if (err.code === 'EEXIST' || err.code === 'EBUSY') {
- // file already exists, so whatever
- } else if (err.code === 'EPERM' && process.platform === 'win32') {
- // file handle stayed open even past graceful-fs limits
- } else {
- return cb(err)
- }
- }
- return cb()
- })
- }).then(() => {
- // content should never change for any reason, so make it read-only
- return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
- }).catch(() => {
- if (!pinflight) { pinflight = require('promise-inflight') }
- return pinflight('cacache-move-file:' + dest, () => {
- return BB.promisify(fs.stat)(dest).catch(err => {
- if (err.code !== 'ENOENT') {
- // Something else is wrong here. Bail bail bail
- throw err
- }
- // file doesn't already exist! let's try a rename -> copy fallback
- if (!move) { move = require('move-concurrently') }
- return move(src, dest, { BB, fs })
- })
- })
- })
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 4fc4512cc8..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,32 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const fixOwner = require('./fix-owner')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const uniqueFilename = require('unique-filename')
-
-module.exports.mkdir = mktmpdir
-function mktmpdir (cache, opts) {
- opts = opts || {}
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid).then(() => {
- return tmpTarget
- })
-}
-
-module.exports.withTmp = withTmp
-function withTmp (cache, opts, cb) {
- if (!cb) {
- cb = opts
- opts = null
- }
- opts = opts || {}
- return BB.using(mktmpdir(cache, opts).disposer(rimraf), cb)
-}
-
-module.exports.fix = fixtmpdir
-function fixtmpdir (cache, opts) {
- return fixOwner(path.join(cache, 'tmp'), opts.uid, opts.gid)
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/y.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/y.js
deleted file mode 100644
index d62bedacb3..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/util/y.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict'
-
-const path = require('path')
-const y18n = require('y18n')({
- directory: path.join(__dirname, '../../locales'),
- locale: 'en',
- updateFiles: process.env.CACACHE_UPDATE_LOCALE_FILES === 'true'
-})
-
-module.exports = yTag
-function yTag (parts) {
- let str = ''
- parts.forEach((part, i) => {
- const arg = arguments[i + 1]
- str += part
- if (arg) {
- str += '%s'
- }
- })
- return y18n.__.apply(null, [str].concat([].slice.call(arguments, 1)))
-}
-
-module.exports.setLocale = locale => {
- y18n.setLocale(locale)
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js
deleted file mode 100644
index 6a01004c97..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,213 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const contentPath = require('./content/path')
-const finished = BB.promisify(require('mississippi').finished)
-const fixOwner = require('./util/fix-owner')
-const fs = require('graceful-fs')
-const glob = BB.promisify(require('glob'))
-const index = require('./entry-index')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const ssri = require('ssri')
-
-BB.promisifyAll(fs)
-
-module.exports = verify
-function verify (cache, opts) {
- opts = opts || {}
- opts.log && opts.log.silly('verify', 'verifying cache at', cache)
- return BB.reduce([
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime
- ], (stats, step, i) => {
- const label = step.name || `step #${i}`
- const start = new Date()
- return BB.resolve(step(cache, opts)).then(s => {
- s && Object.keys(s).forEach(k => {
- stats[k] = s[k]
- })
- const end = new Date()
- if (!stats.runTime) { stats.runTime = {} }
- stats.runTime[label] = end - start
- return stats
- })
- }, {}).tap(stats => {
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log && opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
- })
-}
-
-function markStartTime (cache, opts) {
- return { startTime: new Date() }
-}
-
-function markEndTime (cache, opts) {
- return { endTime: new Date() }
-}
-
-function fixPerms (cache, opts) {
- opts.log && opts.log.silly('verify', 'fixing cache permissions')
- return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => {
- // TODO - fix file permissions too
- return fixOwner.chownr(cache, opts.uid, opts.gid)
- }).then(() => null)
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rimraf it.
-//
-function garbageCollect (cache, opts) {
- opts.log && opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', entry => {
- if (opts && opts.filter && !opts.filter(entry)) { return }
- liveContent.add(entry.integrity.toString())
- })
- return finished(indexStream).then(() => {
- const contentDir = contentPath._contentDir(cache)
- return glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true
- }).then(files => {
- return BB.resolve({
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0
- }).tap((stats) => BB.map(files, (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- return verifyContent(f, integrity).then(info => {
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- return stats
- })
- } else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- return fs.statAsync(f).then(s => {
- return rimraf(f).then(() => {
- stats.reclaimedSize += s.size
- return stats
- })
- })
- }
- }, {concurrency: opts.concurrency || 20}))
- })
- })
-}
-
-function verifyContent (filepath, sri) {
- return fs.statAsync(filepath).then(stat => {
- const contentInfo = {
- size: stat.size,
- valid: true
- }
- return ssri.checkStream(
- fs.createReadStream(filepath),
- sri
- ).catch(err => {
- if (err.code !== 'EINTEGRITY') { throw err }
- return rimraf(filepath).then(() => {
- contentInfo.valid = false
- })
- }).then(() => contentInfo)
- }).catch({code: 'ENOENT'}, () => ({size: 0, valid: false}))
-}
-
-function rebuildIndex (cache, opts) {
- opts.log && opts.log.silly('verify', 'rebuilding index')
- return index.ls(cache).then(entries => {
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0
- }
- const buckets = {}
- for (let k in entries) {
- if (entries.hasOwnProperty(k)) {
- const hashed = index._hashKey(k)
- const entry = entries[k]
- const excluded = opts && opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index._bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index._bucketPath(cache, k)
- }
- }
- }
- return BB.map(Object.keys(buckets), key => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- }, {concurrency: opts.concurrency || 20}).then(() => stats)
- })
-}
-
-function rebuildBucket (cache, bucket, stats, opts) {
- return fs.truncateAsync(bucket._path).then(() => {
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- return BB.mapSeries(bucket, entry => {
- const content = contentPath(cache, entry.integrity)
- return fs.statAsync(content).then(() => {
- return index.insert(cache, entry.key, entry.integrity, {
- uid: opts.uid,
- gid: opts.gid,
- metadata: entry.metadata
- }).then(() => { stats.totalEntries++ })
- }).catch({code: 'ENOENT'}, () => {
- stats.rejectedEntries++
- stats.missingContent++
- })
- })
- })
-}
-
-function cleanTmp (cache, opts) {
- opts.log && opts.log.silly('verify', 'cleaning tmp directory')
- return rimraf(path.join(cache, 'tmp'))
-}
-
-function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log && opts.log.silly('verify', 'writing verifile to ' + verifile)
- return fs.writeFileAsync(verifile, '' + (+(new Date())))
-}
-
-module.exports.lastRun = lastRun
-function lastRun (cache) {
- return fs.readFileAsync(
- path.join(cache, '_lastverified'), 'utf8'
- ).then(data => new Date(+data))
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.js
deleted file mode 100644
index 22025cf0e8..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.js
+++ /dev/null
@@ -1,44 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('en')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.stream = cache => ls.stream(cache)
-
-x.get = (cache, key, opts) => get(cache, key, opts)
-x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
-x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
-x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
-x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
-x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
-x.get.info = (cache, key) => get.info(cache, key)
-x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
-
-x.put = (cache, key, data, opts) => put(cache, key, data, opts)
-x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
-
-x.rm = (cache, key) => rm.entry(cache, key)
-x.rm.all = cache => rm.all(cache)
-x.rm.entry = x.rm
-x.rm.content = (cache, hash) => rm.content(cache, hash)
-
-x.setLocale = lang => setLocale(lang)
-x.clearMemoized = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
-x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
-
-x.verify = (cache, opts) => verify(cache, opts)
-x.verify.lastRun = cache => verify.lastRun(cache)
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.json
deleted file mode 100644
index 82ecb08324..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/en.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No cache entry for %s found in %s",
- "Integrity verification failed for %s (%s)": "Integrity verification failed for %s (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Bad data size: expected inserted data to be %s bytes, but got %s instead",
- "Cache input stream was empty": "Cache input stream was empty"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.js
deleted file mode 100644
index 9a27de6585..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.js
+++ /dev/null
@@ -1,46 +0,0 @@
-'use strict'
-
-const ls = require('../ls.js')
-const get = require('../get.js')
-const put = require('../put.js')
-const rm = require('../rm.js')
-const verify = require('../verify.js')
-const setLocale = require('../lib/util/y.js').setLocale
-const clearMemoized = require('../lib/memoization.js').clearMemoized
-const tmp = require('../lib/util/tmp.js')
-
-setLocale('es')
-
-const x = module.exports
-
-x.ls = cache => ls(cache)
-x.ls.flujo = cache => ls.stream(cache)
-
-x.saca = (cache, clave, ops) => get(cache, clave, ops)
-x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops)
-x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops)
-x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops)
-x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts)
-x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts)
-x.saca.info = (cache, clave) => get.info(cache, clave)
-x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo)
-
-x.mete = (cache, clave, datos, ops) => put(cache, clave, datos, ops)
-x.mete.flujo = (cache, clave, ops) => put.stream(cache, clave, ops)
-
-x.rm = (cache, clave) => rm.entry(cache, clave)
-x.rm.todo = cache => rm.all(cache)
-x.rm.entrada = x.rm
-x.rm.datos = (cache, hacheo) => rm.content(cache, hacheo)
-
-x.ponLenguaje = lang => setLocale(lang)
-x.limpiaMemoizado = () => clearMemoized()
-
-x.tmp = {}
-x.tmp.mkdir = (cache, ops) => tmp.mkdir(cache, ops)
-x.tmp.hazdir = x.tmp.mkdir
-x.tmp.conTmp = (cache, ops, cb) => tmp.withTmp(cache, ops, cb)
-
-x.verifica = (cache, ops) => verify(cache, ops)
-x.verifica.ultimaVez = cache => verify.lastRun(cache)
-x.verifica.últimaVez = x.verifica.ultimaVez
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.json
deleted file mode 100644
index a91d76225b..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/locales/es.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "No cache entry for `%s` found in `%s`": "No existe ninguna entrada para «%s» en «%s»",
- "Integrity verification failed for %s (%s)": "Verificación de integridad falló para «%s» (%s)",
- "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Tamaño incorrecto de datos: los datos insertados debieron haber sido %s octetos, pero fueron %s",
- "Cache input stream was empty": "El stream de entrada al caché estaba vacío"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/ls.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/ls.js
deleted file mode 100644
index 9f49b388ac..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/ls.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-
-var index = require('./lib/entry-index')
-
-module.exports = index.ls
-module.exports.stream = index.lsStream
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/changelog.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/changelog.md
deleted file mode 100644
index 93430828f8..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/changelog.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# mississippi Change Log
-All notable changes to this project will be documented in this file.
-This project adheres to [Semantic Versioning](http://semver.org/).
-
-## 2.0.0 - 2018-01-30
-* Update to pump@2.0.1. (Use the individual modules to avoid potentially unnecessary major updates in your project)
-* Pin engines support to >= Node 4.0.0. Run Node LTS or greater.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/index.js
deleted file mode 100644
index d635b29c56..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/index.js
+++ /dev/null
@@ -1,10 +0,0 @@
-module.exports.pipe = require('pump')
-module.exports.each = require('stream-each')
-module.exports.pipeline = require('pumpify')
-module.exports.duplex = require('duplexify')
-module.exports.through = require('through2')
-module.exports.concat = require('concat-stream')
-module.exports.finished = require('end-of-stream')
-module.exports.from = require('from2')
-module.exports.to = require('flush-write-stream')
-module.exports.parallel = require('parallel-transform')
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/license b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/license
deleted file mode 100644
index e34763968c..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/license
+++ /dev/null
@@ -1,7 +0,0 @@
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/package.json
deleted file mode 100644
index 0d8c11480d..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "mississippi@^2.0.0",
- "_id": "mississippi@2.0.0",
- "_inBundle": false,
- "_integrity": "sha512-zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw==",
- "_location": "/npm-registry-fetch/cacache/mississippi",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "mississippi@^2.0.0",
- "name": "mississippi",
- "escapedName": "mississippi",
- "rawSpec": "^2.0.0",
- "saveSpec": null,
- "fetchSpec": "^2.0.0"
- },
- "_requiredBy": [
- "/npm-registry-fetch/cacache"
- ],
- "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-2.0.0.tgz",
- "_shasum": "3442a508fafc28500486feea99409676e4ee5a6f",
- "_spec": "mississippi@^2.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/cacache",
- "author": {
- "name": "max ogden"
- },
- "bugs": {
- "url": "https://github.com/maxogden/mississippi/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "concat-stream": "^1.5.0",
- "duplexify": "^3.4.2",
- "end-of-stream": "^1.1.0",
- "flush-write-stream": "^1.0.0",
- "from2": "^2.1.0",
- "parallel-transform": "^1.1.0",
- "pump": "^2.0.1",
- "pumpify": "^1.3.3",
- "stream-each": "^1.1.0",
- "through2": "^2.0.0"
- },
- "deprecated": false,
- "description": "a collection of useful streams",
- "devDependencies": {},
- "engines": {
- "node": ">=4.0.0"
- },
- "homepage": "https://github.com/maxogden/mississippi#readme",
- "license": "BSD-2-Clause",
- "main": "index.js",
- "name": "mississippi",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/maxogden/mississippi.git"
- },
- "scripts": {
- "test": "echo \"Error: no test specified\" && exit 1"
- },
- "version": "2.0.0"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/readme.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/readme.md
deleted file mode 100644
index 5fa6d66c89..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi/readme.md
+++ /dev/null
@@ -1,411 +0,0 @@
-# mississippi
-
-a collection of useful stream utility modules. learn how the modules work using this and then pick the ones you want and use them individually
-
-the goal of the modules included in mississippi is to make working with streams easy without sacrificing speed, error handling or composability.
-
-## usage
-
-```js
-var miss = require('mississippi')
-```
-
-## methods
-
-- [pipe](#pipe)
-- [each](#each)
-- [pipeline](#pipeline)
-- [duplex](#duplex)
-- [through](#through)
-- [from](#from)
-- [to](#to)
-- [concat](#concat)
-- [finished](#finished)
-- [parallel](#parallel)
-
-### pipe
-
-##### `miss.pipe(stream1, stream2, stream3, ..., cb)`
-
-Pipes streams together and destroys all of them if one of them closes. Calls `cb` with `(error)` if there was an error in any of the streams.
-
-When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when the pipe has finished.
-
-`miss.pipe` does these two things for you, ensuring you handle stream errors 100% of the time (unhandled errors are probably the most common bug in most node streams code)
-
-#### original module
-
-`miss.pipe` is provided by [`require('pump')`](https://www.npmjs.com/package/pump)
-
-#### example
-
-```js
-// lets do a simple file copy
-var fs = require('fs')
-
-var read = fs.createReadStream('./original.zip')
-var write = fs.createWriteStream('./copy.zip')
-
-// use miss.pipe instead of read.pipe(write)
-miss.pipe(read, write, function (err) {
- if (err) return console.error('Copy error!', err)
- console.log('Copied successfully')
-})
-```
-
-### each
-
-##### `miss.each(stream, each, [done])`
-
-Iterate the data in `stream` one chunk at a time. Your `each` function will be called with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk.
-
-Optionally you can call `next` with an error to destroy the stream. You can also pass the optional third argument, `done`, which is a function that will be called with `(err)` when the stream ends. The `err` argument will be populated with an error if the stream emitted an error.
-
-#### original module
-
-`miss.each` is provided by [`require('stream-each')`](https://www.npmjs.com/package/stream-each)
-
-#### example
-
-```js
-var fs = require('fs')
-var split = require('split2')
-
-var newLineSeparatedNumbers = fs.createReadStream('numbers.txt')
-
-var pipeline = miss.pipeline(newLineSeparatedNumbers, split())
-miss.each(pipeline, eachLine, done)
-var sum = 0
-
-function eachLine (line, next) {
- sum += parseInt(line.toString())
- next()
-}
-
-function done (err) {
- if (err) throw err
- console.log('sum is', sum)
-}
-```
-
-### pipeline
-
-##### `var pipeline = miss.pipeline(stream1, stream2, stream3, ...)`
-
-Builds a pipeline from all the transform streams passed in as arguments by piping them together and returning a single stream object that lets you write to the first stream and read from the last stream.
-
-If you are pumping object streams together use `pipeline = miss.pipeline.obj(s1, s2, ...)`.
-
-If any of the streams in the pipeline emits an error or gets destroyed, or you destroy the stream it returns, all of the streams will be destroyed and cleaned up for you.
-
-#### original module
-
-`miss.pipeline` is provided by [`require('pumpify')`](https://www.npmjs.com/package/pumpify)
-
-#### example
-
-```js
-// first create some transform streams (note: these two modules are fictional)
-var imageResize = require('image-resizer-stream')({width: 400})
-var pngOptimizer = require('png-optimizer-stream')({quality: 60})
-
-// instead of doing a.pipe(b), use pipelin
-var resizeAndOptimize = miss.pipeline(imageResize, pngOptimizer)
-// `resizeAndOptimize` is a transform stream. when you write to it, it writes
-// to `imageResize`. when you read from it, it reads from `pngOptimizer`.
-// it handles piping all the streams together for you
-
-// use it like any other transform stream
-var fs = require('fs')
-
-var read = fs.createReadStream('./image.png')
-var write = fs.createWriteStream('./resized-and-optimized.png')
-
-miss.pipe(read, resizeAndOptimize, write, function (err) {
- if (err) return console.error('Image processing error!', err)
- console.log('Image processed successfully')
-})
-```
-
-### duplex
-
-##### `var duplex = miss.duplex([writable, readable, opts])`
-
-Take two separate streams, a writable and a readable, and turn them into a single [duplex (readable and writable) stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex).
-
-The returned stream will emit data from the readable. When you write to it it writes to the writable.
-
-You can either choose to supply the writable and the readable at the time you create the stream, or you can do it later using the `.setWritable` and `.setReadable` methods and data written to the stream in the meantime will be buffered for you.
-
-#### original module
-
-`miss.duplex` is provided by [`require('duplexify')`](https://www.npmjs.com/package/duplexify)
-
-#### example
-
-```js
-// lets spawn a process and take its stdout and stdin and combine them into 1 stream
-var child = require('child_process')
-
-// @- tells it to read from stdin, --data-binary sets 'raw' binary mode
-var curl = child.spawn('curl -X POST --data-binary @- http://foo.com')
-
-// duplexCurl will write to stdin and read from stdout
-var duplexCurl = miss.duplex(curl.stdin, curl.stdout)
-```
-
-### through
-
-##### `var transformer = miss.through([options, transformFunction, flushFunction])`
-
-Make a custom [transform stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform).
-
-The `options` object is passed to the internal transform stream and can be used to create an `objectMode` stream (or use the shortcut `miss.through.obj([...])`)
-
-The `transformFunction` is called when data is available for the writable side and has the signature `(chunk, encoding, cb)`. Within the function, add data to the readable side any number of times with `this.push(data)`. Call `cb()` to indicate processing of the `chunk` is complete. Or to easily emit a single error or chunk, call `cb(err, chunk)`
-
-The `flushFunction`, with signature `(cb)`, is called just before the stream is complete and should be used to wrap up stream processing.
-
-#### original module
-
-`miss.through` is provided by [`require('through2')`](https://www.npmjs.com/package/through2)
-
-#### example
-
-```js
-var fs = require('fs')
-
-var read = fs.createReadStream('./boring_lowercase.txt')
-var write = fs.createWriteStream('./AWESOMECASE.TXT')
-
-// Leaving out the options object
-var uppercaser = miss.through(
- function (chunk, enc, cb) {
- cb(null, chunk.toString().toUpperCase())
- },
- function (cb) {
- cb(null, 'ONE LAST BIT OF UPPERCASE')
- }
-)
-
-miss.pipe(read, uppercaser, write, function (err) {
- if (err) return console.error('Trouble uppercasing!')
- console.log('Splendid uppercasing!')
-})
-```
-
-### from
-
-##### `miss.from([opts], read)`
-
-Make a custom [readable stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_readable).
-
-`opts` contains the options to pass on to the ReadableStream constructor e.g. for creating a readable object stream (or use the shortcut `miss.from.obj([...])`).
-
-Returns a readable stream that calls `read(size, next)` when data is requested from the stream.
-
-- `size` is the recommended amount of data (in bytes) to retrieve.
-- `next(err, chunk)` should be called when you're ready to emit more data.
-
-#### original module
-
-`miss.from` is provided by [`require('from2')`](https://www.npmjs.com/package/from2)
-
-#### example
-
-```js
-
-
-function fromString(string) {
- return miss.from(function(size, next) {
- // if there's no more content
- // left in the string, close the stream.
- if (string.length <= 0) return next(null, null)
-
- // Pull in a new chunk of text,
- // removing it from the string.
- var chunk = string.slice(0, size)
- string = string.slice(size)
-
- // Emit "chunk" from the stream.
- next(null, chunk)
- })
-}
-
-// pipe "hello world" out
-// to stdout.
-fromString('hello world').pipe(process.stdout)
-```
-
-### to
-
-##### `miss.to([options], write, [flush])`
-
-Make a custom [writable stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_writable).
-
-`opts` contains the options to pass on to the WritableStream constructor e.g. for creating a writable object stream (or use the shortcut `miss.to.obj([...])`).
-
-Returns a writable stream that calls `write(data, enc, cb)` when data is written to the stream.
-
-- `data` is the received data to write the destination.
-- `enc` encoding of the piece of data received.
-- `cb(err, data)` should be called when you're ready to write more data, or encountered an error.
-
-`flush(cb)` is called before `finish` is emitted and allows for cleanup steps to occur.
-
-#### original module
-
-`miss.to` is provided by [`require('flush-write-stream')`](https://www.npmjs.com/package/flush-write-stream)
-
-#### example
-
-```js
-var ws = miss.to(write, flush)
-
-ws.on('finish', function () {
- console.log('finished')
-})
-
-ws.write('hello')
-ws.write('world')
-ws.end()
-
-function write (data, enc, cb) {
- // i am your normal ._write method
- console.log('writing', data.toString())
- cb()
-}
-
-function flush (cb) {
- // i am called before finish is emitted
- setTimeout(cb, 1000) // wait 1 sec
-}
-```
-
-If you run the above it will produce the following output
-
-```
-writing hello
-writing world
-(nothing happens for 1 sec)
-finished
-```
-
-### concat
-
-##### `var concat = miss.concat(cb)`
-
-Returns a writable stream that concatenates all data written to the stream and calls a callback with the single result.
-
-Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will contain the result of concatenating all the data written to the stream.
-
-Note that `miss.concat` will not handle stream errors for you. To handle errors, use `miss.pipe` or handle the `error` event manually.
-
-#### original module
-
-`miss.concat` is provided by [`require('concat-stream')`](https://www.npmjs.com/package/concat-stream)
-
-#### example
-
-```js
-var fs = require('fs')
-
-var readStream = fs.createReadStream('cat.png')
-var concatStream = miss.concat(gotPicture)
-
-function callback (err) {
- if (err) {
- console.error(err)
- process.exit(1)
- }
-}
-
-miss.pipe(readStream, concatStream, callback)
-
-function gotPicture(imageBuffer) {
- // imageBuffer is all of `cat.png` as a node.js Buffer
-}
-
-function handleError(err) {
- // handle your error appropriately here, e.g.:
- console.error(err) // print the error to STDERR
- process.exit(1) // exit program with non-zero exit code
-}
-```
-
-### finished
-
-##### `miss.finished(stream, cb)`
-
-Waits for `stream` to finish or error and then calls `cb` with `(err)`. `cb` will only be called once. `err` will be null if the stream finished without error, or else it will be populated with the error from the streams `error` event.
-
-This function is useful for simplifying stream handling code as it lets you handle success or error conditions in a single code path. It's used internally `miss.pipe`.
-
-#### original module
-
-`miss.finished` is provided by [`require('end-of-stream')`](https://www.npmjs.com/package/end-of-stream)
-
-#### example
-
-```js
-var copySource = fs.createReadStream('./movie.mp4')
-var copyDest = fs.createWriteStream('./movie-copy.mp4')
-
-copySource.pipe(copyDest)
-
-miss.finished(copyDest, function(err) {
- if (err) return console.log('write failed', err)
- console.log('write success')
-})
-```
-
-### parallel
-
-##### `miss.parallel(concurrency, each)`
-
-This works like `through` except you can process items in parallel, while still preserving the original input order.
-
-This is handy if you wanna take advantage of node's async I/O and process streams of items in batches. With this module you can build your very own streaming parallel job queue.
-
-Note that `miss.parallel` preserves input ordering, if you don't need that then you can use [through2-concurrent](https://github.com/almost/through2-concurrent) instead, which is very similar to this otherwise.
-
-#### original module
-
-`miss.parallel` is provided by [`require('parallel-transform')`](https://npmjs.org/parallel-transform)
-
-#### example
-
-This example fetches the GET HTTP headers for a stream of input URLs 5 at a time in parallel.
-
-```js
-function getResponse (item, cb) {
- var r = request(item.url)
- r.on('error', function (err) {
- cb(err)
- })
- r.on('response', function (re) {
- cb(null, {url: item.url, date: new Date(), status: re.statusCode, headers: re.headers})
- r.abort()
- })
-}
-
-miss.pipe(
- fs.createReadStream('./urls.txt'), // one url per line
- split(),
- miss.parallel(5, getResponse),
- miss.through(function (row, enc, next) {
- console.log(JSON.stringify(row))
- next()
- })
-)
-```
-
-## see also
-
-- [substack/stream-handbook](https://github.com/substack/stream-handbook)
-- [nodejs.org/api/stream.html](https://nodejs.org/api/stream.html)
-- [awesome-nodejs-streams](https://github.com/thejmazz/awesome-nodejs-streams)
-
-## license
-
-Licensed under the BSD 2-clause license.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/package.json
deleted file mode 100644
index 12cfb5aac5..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/package.json
+++ /dev/null
@@ -1,137 +0,0 @@
-{
- "_from": "cacache@^10.0.4",
- "_id": "cacache@10.0.4",
- "_inBundle": false,
- "_integrity": "sha512-Dph0MzuH+rTQzGPNT9fAnrPmMmjKfST6trxJeK7NQuHRaVw24VzPRWTmg9MpcwOVQZO0E1FBICUlFeNaKPIfHA==",
- "_location": "/npm-registry-fetch/cacache",
- "_phantomChildren": {
- "concat-stream": "1.6.2",
- "duplexify": "3.6.0",
- "end-of-stream": "1.4.1",
- "flush-write-stream": "1.0.3",
- "from2": "2.3.0",
- "parallel-transform": "1.1.0",
- "pump": "2.0.1",
- "pumpify": "1.5.1",
- "stream-each": "1.2.2",
- "through2": "2.0.3"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "cacache@^10.0.4",
- "name": "cacache",
- "escapedName": "cacache",
- "rawSpec": "^10.0.4",
- "saveSpec": null,
- "fetchSpec": "^10.0.4"
- },
- "_requiredBy": [
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/cacache/-/cacache-10.0.4.tgz",
- "_shasum": "6452367999eff9d4188aefd9a14e9d7c6a263460",
- "_spec": "cacache@^10.0.4",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/cacache/issues"
- },
- "bundleDependencies": false,
- "cache-version": {
- "content": "2",
- "index": "5"
- },
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "contributors": [
- {
- "name": "Charlotte Spencer",
- "email": "charlottelaspencer@gmail.com"
- },
- {
- "name": "Rebecca Turner",
- "email": "me@re-becca.org"
- }
- ],
- "dependencies": {
- "bluebird": "^3.5.1",
- "chownr": "^1.0.1",
- "glob": "^7.1.2",
- "graceful-fs": "^4.1.11",
- "lru-cache": "^4.1.1",
- "mississippi": "^2.0.0",
- "mkdirp": "^0.5.1",
- "move-concurrently": "^1.0.1",
- "promise-inflight": "^1.0.1",
- "rimraf": "^2.6.2",
- "ssri": "^5.2.4",
- "unique-filename": "^1.1.0",
- "y18n": "^4.0.0"
- },
- "deprecated": false,
- "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
- "devDependencies": {
- "benchmark": "^2.1.4",
- "chalk": "^2.3.1",
- "cross-env": "^5.1.3",
- "nyc": "^11.4.1",
- "require-inject": "^1.4.2",
- "safe-buffer": "^5.1.1",
- "standard": "^10.0.3",
- "standard-version": "^4.3.0",
- "tacks": "^1.2.2",
- "tap": "^11.1.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib",
- "locales"
- ],
- "homepage": "https://github.com/zkat/cacache#readme",
- "keywords": [
- "cache",
- "caching",
- "content-addressable",
- "sri",
- "sri hash",
- "subresource integrity",
- "cache",
- "storage",
- "store",
- "file store",
- "filesystem",
- "disk cache",
- "disk storage"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "cacache",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/cacache.git"
- },
- "scripts": {
- "benchmarks": "node test/benchmarks",
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true nyc --all -- tap -J test/*.js",
- "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "10.0.4"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/put.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/put.js
deleted file mode 100644
index fe1293e5e7..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/put.js
+++ /dev/null
@@ -1,71 +0,0 @@
-'use strict'
-
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const write = require('./lib/content/write')
-const to = require('mississippi').to
-
-module.exports = putData
-function putData (cache, key, data, opts) {
- opts = opts || {}
- return write(cache, data, opts).then(res => {
- // TODO - stop modifying opts
- opts.size = res.size
- return index.insert(cache, key, res.integrity, opts).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, data, opts)
- }
- return res.integrity
- })
- })
-}
-
-module.exports.stream = putStream
-function putStream (cache, key, opts) {
- opts = opts || {}
- let integrity
- let size
- const contentStream = write.stream(
- cache, opts
- ).on('integrity', int => {
- integrity = int
- }).on('size', s => {
- size = s
- })
- let memoData
- let memoTotal = 0
- const stream = to((chunk, enc, cb) => {
- contentStream.write(chunk, enc, () => {
- if (opts.memoize) {
- if (!memoData) { memoData = [] }
- memoData.push(chunk)
- memoTotal += chunk.length
- }
- cb()
- })
- }, cb => {
- contentStream.end(() => {
- // TODO - stop modifying `opts`
- opts.size = size
- index.insert(cache, key, integrity, opts).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
- }
- stream.emit('integrity', integrity)
- cb()
- })
- })
- })
- let erred = false
- stream.once('error', err => {
- if (erred) { return }
- erred = true
- contentStream.emit('error', err)
- })
- contentStream.once('error', err => {
- if (erred) { return }
- erred = true
- stream.emit('error', err)
- })
- return stream
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/rm.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/rm.js
deleted file mode 100644
index e71a1d27b4..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/rm.js
+++ /dev/null
@@ -1,28 +0,0 @@
-'use strict'
-
-const BB = require('bluebird')
-
-const index = require('./lib/entry-index')
-const memo = require('./lib/memoization')
-const path = require('path')
-const rimraf = BB.promisify(require('rimraf'))
-const rmContent = require('./lib/content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-function entry (cache, key) {
- memo.clearMemoized()
- return index.delete(cache, key)
-}
-
-module.exports.content = content
-function content (cache, integrity) {
- memo.clearMemoized()
- return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-function all (cache) {
- memo.clearMemoized()
- return rimraf(path.join(cache, '*(content-*|index-*)'))
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/verify.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/verify.js
deleted file mode 100644
index db7763d7af..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/cacache/verify.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict'
-
-module.exports = require('./lib/verify')
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/CHANGELOG.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/CHANGELOG.md
deleted file mode 100644
index f70e02d71d..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/CHANGELOG.md
+++ /dev/null
@@ -1,29 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/zkat/figgy-pudding/compare/v2.0.0...v2.0.1) (2018-03-16)
-
-
-### Bug Fixes
-
-* **opts:** ignore non-object providers ([7b9c0f8](https://github.com/zkat/figgy-pudding/commit/7b9c0f8))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/figgy-pudding/compare/v1.0.0...v2.0.0) (2018-03-16)
-
-
-### Features
-
-* **api:** overhauled API with new opt handling concept ([e6cc929](https://github.com/zkat/figgy-pudding/commit/e6cc929))
-* **license:** relicense to ISC ([87479aa](https://github.com/zkat/figgy-pudding/commit/87479aa))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-* **api:** this is a completely different approach than previously
-used by this library. See the readme for the new API and an explanation.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/LICENSE.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/LICENSE.md
deleted file mode 100644
index 8d28acf866..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/README.md
deleted file mode 100644
index 3cdec3f81a..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/README.md
+++ /dev/null
@@ -1,121 +0,0 @@
-# figgy-pudding [![npm version](https://img.shields.io/npm/v/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![license](https://img.shields.io/npm/l/figgy-pudding.svg)](https://npm.im/figgy-pudding) [![Travis](https://img.shields.io/travis/zkat/figgy-pudding.svg)](https://travis-ci.org/zkat/figgy-pudding) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/figgy-pudding?svg=true)](https://ci.appveyor.com/project/zkat/figgy-pudding) [![Coverage Status](https://coveralls.io/repos/github/zkat/figgy-pudding/badge.svg?branch=latest)](https://coveralls.io/github/zkat/figgy-pudding?branch=latest)
-
-# Death to the God Object! Now Bring Us Some Figgy Pudding!
-
-[`figgy-pudding`](https://github.com/zkat/figgy-pudding) is a simple JavaScript library for managing and composing cascading options objects -- hiding what needs to be hidden from each layer, without having to do a lot of manual munging and passing of options.
-
-## Install
-
-`$ npm install figgy-pudding`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [API](#api)
- * [`figgyPudding(spec)`](#figgy-pudding)
- * [`Opts(values)`](#opts)
-
-### Example
-
-```javascript
-const puddin = require('figgyPudding')
-
-const RequestOpts = puddin({
- follow: {
- default: true
- },
- streaming: {
- default: false
- },
- log: {
- default: require('npmlog')
- }
-})
-
-const MyAppOpts = puddin({
- log: {
- default: require('npmlog')
- },
- cache: {
- default: './cache'
- }
-})
-
-function start (opts) {
- opts = MyAppOpts(opts)
- initCache(opts.get('cache'))
- opts.get('streaming') // => undefined
- reqStuff('https://npm.im/figgy-pudding', opts)
-}
-
-function reqStuff (uri, opts) {
- opts = RequestOpts(opts)
- require('request').get(uri, opts) // can't see `cache`
-}
-```
-
-### Features
-
-* Hide options from layer that didn't ask for it
-* Shared multi-layer options
-
-### API
-
-#### <a name="figgy-pudding"></a> `> figgyPudding({ key: { default: val } | String }, [opts])`
-
-Defines an Options constructor that can be used to collect only the needed
-options.
-
-An optional `default` property for specs can be used to specify default values
-if nothing was passed in.
-
-If the value for a spec is a string, it will be treated as an alias to that
-other key.
-
-##### Example
-
-```javascript
-const MyAppOpts = figgyPudding({
- lg: 'log',
- log: {
- default: () => require('npmlog')
- },
- cache: {}
-})
-```
-
-#### <a name="opts"></a> `> Opts(...providers)`
-
-Instantiates an options object defined by `figgyPudding()`, which uses
-`providers`, in order, to find requested properties.
-
-Each provider can be either a plain object, a `Map`-like object (that is, one
-with a `.get()` method) or another figgyPudding `Opts` object.
-
-When nesting `Opts` objects, their properties will not become available to the
-new object, but any further nested `Opts` that reference that property _will_ be
-able to read from their grandparent, as long as they define that key. Default
-values for nested `Opts` parents will be used, if found.
-
-##### Example
-
-```javascript
-const ReqOpts = figgyPudding({
- follow: {}
-})
-
-const opts = ReqOpts({
- follow: true,
- log: require('npmlog')
-})
-
-opts.get('follow') // => true
-opts.get('log') // => Error: ReqOpts does not define `log`
-
-const MoreOpts = figgyPudding({
- log: {}
-})
-MoreOpts(opts).log // => npmlog object (passed in from original plain obj)
-MoreOpts(opts).get('follow') // => Error: MoreOpts does not define `follow`
-```
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/index.js
deleted file mode 100644
index c13d143862..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/index.js
+++ /dev/null
@@ -1,60 +0,0 @@
-'use strict'
-
-class FiggyPudding {
- constructor (specs, opts, providers) {
- this.specs = specs || {}
- this.opts = opts || (() => false)
- this.providers = providers
- this.isFiggyPudding = true
- }
- get (key) {
- return pudGet(this, key, true)
- }
-}
-
-function pudGet (pud, key, validate) {
- let spec = pud.specs[key]
- if (typeof spec === 'string') {
- key = spec
- spec = pud.specs[key]
- }
- if (validate && !spec && (!pud.opts.other || !pud.opts.other(key))) {
- throw new Error(`invalid config key requested: ${key}`)
- } else {
- if (!spec) { spec = {} }
- let ret
- for (let p of pud.providers) {
- if (p.isFiggyPudding) {
- ret = pudGet(p, key, false)
- } else if (typeof p.get === 'function') {
- ret = p.get(key)
- } else {
- ret = p[key]
- }
- if (ret !== undefined) {
- break
- }
- }
- if (ret === undefined && spec.default !== undefined) {
- if (typeof spec.default === 'function') {
- return spec.default()
- } else {
- return spec.default
- }
- } else {
- return ret
- }
- }
-}
-
-module.exports = figgyPudding
-function figgyPudding (specs, opts) {
- function factory () {
- return new FiggyPudding(
- specs,
- opts,
- [].slice.call(arguments).filter(x => x != null && typeof x === 'object')
- )
- }
- return factory
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/package.json
deleted file mode 100644
index 7e71124643..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/figgy-pudding/package.json
+++ /dev/null
@@ -1,70 +0,0 @@
-{
- "_from": "figgy-pudding@^2.0.1",
- "_id": "figgy-pudding@2.0.1",
- "_inBundle": false,
- "_integrity": "sha512-yIJPhIBi/oFdU/P+GSXjmk/rmGjuZkm7A5LTXZxNrEprXJXRK012FiI1BR1Pga+0d/d6taWWD+B5d2ozqaxHig==",
- "_location": "/npm-registry-fetch/figgy-pudding",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "figgy-pudding@^2.0.1",
- "name": "figgy-pudding",
- "escapedName": "figgy-pudding",
- "rawSpec": "^2.0.1",
- "saveSpec": null,
- "fetchSpec": "^2.0.1"
- },
- "_requiredBy": [
- "/npm-registry-fetch"
- ],
- "_resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-2.0.1.tgz",
- "_shasum": "56c8fc878e06e1090799b9bcc91cbd85c2c92278",
- "_spec": "figgy-pudding@^2.0.1",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/figgy-pudding/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Delicious, festive, cascading config/opts definitions",
- "devDependencies": {
- "standard": "^11.0.1",
- "standard-version": "^4.3.0",
- "tap": "^11.1.2",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/zkat/figgy-pudding#readme",
- "keywords": [
- "config",
- "options",
- "yummy"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "figgy-pudding",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/figgy-pudding.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "2.0.1"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md
deleted file mode 100644
index 123133fe61..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/CHANGELOG.md
+++ /dev/null
@@ -1,525 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/make-fetch-happen/compare/v2.6.0...v3.0.0) (2018-03-12)
-
-
-### Bug Fixes
-
-* **license:** switch to ISC ([#49](https://github.com/zkat/make-fetch-happen/issues/49)) ([bf90c6d](https://github.com/zkat/make-fetch-happen/commit/bf90c6d))
-* **standard:** standard@11 update ([ff0aa70](https://github.com/zkat/make-fetch-happen/commit/ff0aa70))
-
-
-### BREAKING CHANGES
-
-* **license:** license changed from CC0 to ISC.
-
-
-
-<a name="2.6.0"></a>
-# [2.6.0](https://github.com/zkat/make-fetch-happen/compare/v2.5.0...v2.6.0) (2017-11-14)
-
-
-### Bug Fixes
-
-* **integrity:** disable node-fetch compress when checking integrity (#42) ([a7cc74c](https://github.com/zkat/make-fetch-happen/commit/a7cc74c))
-
-
-### Features
-
-* **onretry:** Add `options.onRetry` (#48) ([f90ccff](https://github.com/zkat/make-fetch-happen/commit/f90ccff))
-
-
-
-<a name="2.5.0"></a>
-# [2.5.0](https://github.com/zkat/make-fetch-happen/compare/v2.4.13...v2.5.0) (2017-08-24)
-
-
-### Bug Fixes
-
-* **agent:** support timeout durations greater than 30 seconds ([04875ae](https://github.com/zkat/make-fetch-happen/commit/04875ae)), closes [#35](https://github.com/zkat/make-fetch-happen/issues/35)
-
-
-### Features
-
-* **cache:** export cache deletion functionality (#40) ([3da4250](https://github.com/zkat/make-fetch-happen/commit/3da4250))
-
-
-
-<a name="2.4.13"></a>
-## [2.4.13](https://github.com/zkat/make-fetch-happen/compare/v2.4.12...v2.4.13) (2017-06-29)
-
-
-### Bug Fixes
-
-* **deps:** bump other deps for bugfixes ([eab8297](https://github.com/zkat/make-fetch-happen/commit/eab8297))
-* **proxy:** bump proxy deps with bugfixes (#32) ([632f860](https://github.com/zkat/make-fetch-happen/commit/632f860)), closes [#32](https://github.com/zkat/make-fetch-happen/issues/32)
-
-
-
-<a name="2.4.12"></a>
-## [2.4.12](https://github.com/zkat/make-fetch-happen/compare/v2.4.11...v2.4.12) (2017-06-06)
-
-
-### Bug Fixes
-
-* **cache:** encode x-local-cache-etc headers to be header-safe ([dc9fb1b](https://github.com/zkat/make-fetch-happen/commit/dc9fb1b))
-
-
-
-<a name="2.4.11"></a>
-## [2.4.11](https://github.com/zkat/make-fetch-happen/compare/v2.4.10...v2.4.11) (2017-06-05)
-
-
-### Bug Fixes
-
-* **deps:** bump deps with ssri fix ([bef1994](https://github.com/zkat/make-fetch-happen/commit/bef1994))
-
-
-
-<a name="2.4.10"></a>
-## [2.4.10](https://github.com/zkat/make-fetch-happen/compare/v2.4.9...v2.4.10) (2017-05-31)
-
-
-### Bug Fixes
-
-* **deps:** bump dep versions with bugfixes ([0af4003](https://github.com/zkat/make-fetch-happen/commit/0af4003))
-* **proxy:** use auth parameter for proxy authentication (#30) ([c687306](https://github.com/zkat/make-fetch-happen/commit/c687306))
-
-
-
-<a name="2.4.9"></a>
-## [2.4.9](https://github.com/zkat/make-fetch-happen/compare/v2.4.8...v2.4.9) (2017-05-25)
-
-
-### Bug Fixes
-
-* **cache:** use the passed-in promise for resolving cache stuff ([4c46257](https://github.com/zkat/make-fetch-happen/commit/4c46257))
-
-
-
-<a name="2.4.8"></a>
-## [2.4.8](https://github.com/zkat/make-fetch-happen/compare/v2.4.7...v2.4.8) (2017-05-25)
-
-
-### Bug Fixes
-
-* **cache:** pass uid/gid/Promise through to cache ([a847c92](https://github.com/zkat/make-fetch-happen/commit/a847c92))
-
-
-
-<a name="2.4.7"></a>
-## [2.4.7](https://github.com/zkat/make-fetch-happen/compare/v2.4.6...v2.4.7) (2017-05-24)
-
-
-### Bug Fixes
-
-* **deps:** pull in various fixes from deps ([fc2a587](https://github.com/zkat/make-fetch-happen/commit/fc2a587))
-
-
-
-<a name="2.4.6"></a>
-## [2.4.6](https://github.com/zkat/make-fetch-happen/compare/v2.4.5...v2.4.6) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** choose agent for http(s)-proxy by protocol of destUrl ([ea4832a](https://github.com/zkat/make-fetch-happen/commit/ea4832a))
-* **proxy:** make socks proxy working ([1de810a](https://github.com/zkat/make-fetch-happen/commit/1de810a))
-* **proxy:** revert previous proxy solution ([563b0d8](https://github.com/zkat/make-fetch-happen/commit/563b0d8))
-
-
-
-<a name="2.4.5"></a>
-## [2.4.5](https://github.com/zkat/make-fetch-happen/compare/v2.4.4...v2.4.5) (2017-05-24)
-
-
-### Bug Fixes
-
-* **proxy:** use the destination url when determining agent ([1a714e7](https://github.com/zkat/make-fetch-happen/commit/1a714e7))
-
-
-
-<a name="2.4.4"></a>
-## [2.4.4](https://github.com/zkat/make-fetch-happen/compare/v2.4.3...v2.4.4) (2017-05-23)
-
-
-### Bug Fixes
-
-* **redirect:** handle redirects explicitly (#27) ([4c4af54](https://github.com/zkat/make-fetch-happen/commit/4c4af54))
-
-
-
-<a name="2.4.3"></a>
-## [2.4.3](https://github.com/zkat/make-fetch-happen/compare/v2.4.2...v2.4.3) (2017-05-06)
-
-
-### Bug Fixes
-
-* **redirect:** redirects now delete authorization if hosts fail to match ([c071805](https://github.com/zkat/make-fetch-happen/commit/c071805))
-
-
-
-<a name="2.4.2"></a>
-## [2.4.2](https://github.com/zkat/make-fetch-happen/compare/v2.4.1...v2.4.2) (2017-05-04)
-
-
-### Bug Fixes
-
-* **cache:** reduce race condition window by checking for content ([24544b1](https://github.com/zkat/make-fetch-happen/commit/24544b1))
-* **match:** Rewrite the conditional stream logic (#25) ([66bba4b](https://github.com/zkat/make-fetch-happen/commit/66bba4b))
-
-
-
-<a name="2.4.1"></a>
-## [2.4.1](https://github.com/zkat/make-fetch-happen/compare/v2.4.0...v2.4.1) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoization:** missed spots + allow passthrough of memo objs ([ac0cd12](https://github.com/zkat/make-fetch-happen/commit/ac0cd12))
-
-
-
-<a name="2.4.0"></a>
-# [2.4.0](https://github.com/zkat/make-fetch-happen/compare/v2.3.0...v2.4.0) (2017-04-28)
-
-
-### Bug Fixes
-
-* **memoize:** cacache had a broken memoizer ([8a9ed4c](https://github.com/zkat/make-fetch-happen/commit/8a9ed4c))
-
-
-### Features
-
-* **memoization:** only slurp stuff into memory if opts.memoize is not false ([0744adc](https://github.com/zkat/make-fetch-happen/commit/0744adc))
-
-
-
-<a name="2.3.0"></a>
-# [2.3.0](https://github.com/zkat/make-fetch-happen/compare/v2.2.6...v2.3.0) (2017-04-27)
-
-
-### Features
-
-* **agent:** added opts.strictSSL and opts.localAddress ([c35015a](https://github.com/zkat/make-fetch-happen/commit/c35015a))
-* **proxy:** Added opts.noProxy and NO_PROXY support ([f45c915](https://github.com/zkat/make-fetch-happen/commit/f45c915))
-
-
-
-<a name="2.2.6"></a>
-## [2.2.6](https://github.com/zkat/make-fetch-happen/compare/v2.2.5...v2.2.6) (2017-04-26)
-
-
-### Bug Fixes
-
-* **agent:** check uppercase & lowercase proxy env (#24) ([acf2326](https://github.com/zkat/make-fetch-happen/commit/acf2326)), closes [#22](https://github.com/zkat/make-fetch-happen/issues/22)
-* **deps:** switch to node-fetch-npm and stop bundling ([3db603b](https://github.com/zkat/make-fetch-happen/commit/3db603b))
-
-
-
-<a name="2.2.5"></a>
-## [2.2.5](https://github.com/zkat/make-fetch-happen/compare/v2.2.4...v2.2.5) (2017-04-23)
-
-
-### Bug Fixes
-
-* **deps:** bump cacache and use its size feature ([926c1d3](https://github.com/zkat/make-fetch-happen/commit/926c1d3))
-
-
-
-<a name="2.2.4"></a>
-## [2.2.4](https://github.com/zkat/make-fetch-happen/compare/v2.2.3...v2.2.4) (2017-04-18)
-
-
-### Bug Fixes
-
-* **integrity:** hash verification issues fixed ([07f9402](https://github.com/zkat/make-fetch-happen/commit/07f9402))
-
-
-
-<a name="2.2.3"></a>
-## [2.2.3](https://github.com/zkat/make-fetch-happen/compare/v2.2.2...v2.2.3) (2017-04-18)
-
-
-### Bug Fixes
-
-* **staleness:** responses older than 8h were never stale :< ([b54dd75](https://github.com/zkat/make-fetch-happen/commit/b54dd75))
-* **warning:** remove spurious warning, make format more spec-compliant ([2e4f6bb](https://github.com/zkat/make-fetch-happen/commit/2e4f6bb))
-
-
-
-<a name="2.2.2"></a>
-## [2.2.2](https://github.com/zkat/make-fetch-happen/compare/v2.2.1...v2.2.2) (2017-04-12)
-
-
-### Bug Fixes
-
-* **retry:** stop retrying 404s ([6fafd53](https://github.com/zkat/make-fetch-happen/commit/6fafd53))
-
-
-
-<a name="2.2.1"></a>
-## [2.2.1](https://github.com/zkat/make-fetch-happen/compare/v2.2.0...v2.2.1) (2017-04-10)
-
-
-### Bug Fixes
-
-* **deps:** move test-only deps to devDeps ([2daaf80](https://github.com/zkat/make-fetch-happen/commit/2daaf80))
-
-
-
-<a name="2.2.0"></a>
-# [2.2.0](https://github.com/zkat/make-fetch-happen/compare/v2.1.0...v2.2.0) (2017-04-09)
-
-
-### Bug Fixes
-
-* **cache:** treat caches as private ([57b7dc2](https://github.com/zkat/make-fetch-happen/commit/57b7dc2))
-
-
-### Features
-
-* **retry:** accept shorthand retry settings ([dfed69d](https://github.com/zkat/make-fetch-happen/commit/dfed69d))
-
-
-
-<a name="2.1.0"></a>
-# [2.1.0](https://github.com/zkat/make-fetch-happen/compare/v2.0.4...v2.1.0) (2017-04-09)
-
-
-### Features
-
-* **cache:** cache now obeys Age and a variety of other things (#13) ([7b9652d](https://github.com/zkat/make-fetch-happen/commit/7b9652d))
-
-
-
-<a name="2.0.4"></a>
-## [2.0.4](https://github.com/zkat/make-fetch-happen/compare/v2.0.3...v2.0.4) (2017-04-09)
-
-
-### Bug Fixes
-
-* **agent:** accept Request as fetch input, not just strings ([b71669a](https://github.com/zkat/make-fetch-happen/commit/b71669a))
-
-
-
-<a name="2.0.3"></a>
-## [2.0.3](https://github.com/zkat/make-fetch-happen/compare/v2.0.2...v2.0.3) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** seriously ([c29e7e7](https://github.com/zkat/make-fetch-happen/commit/c29e7e7))
-
-
-
-<a name="2.0.2"></a>
-## [2.0.2](https://github.com/zkat/make-fetch-happen/compare/v2.0.1...v2.0.2) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** use bundleDeps instead ([c36ebf0](https://github.com/zkat/make-fetch-happen/commit/c36ebf0))
-
-
-
-<a name="2.0.1"></a>
-## [2.0.1](https://github.com/zkat/make-fetch-happen/compare/v2.0.0...v2.0.1) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** make sure node-fetch tarball included in release ([3bf49d1](https://github.com/zkat/make-fetch-happen/commit/3bf49d1))
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/make-fetch-happen/compare/v1.7.0...v2.0.0) (2017-04-09)
-
-
-### Bug Fixes
-
-* **deps:** manually pull in newer node-fetch to avoid babel prod dep ([66e5e87](https://github.com/zkat/make-fetch-happen/commit/66e5e87))
-* **retry:** be more specific about when we retry ([a47b782](https://github.com/zkat/make-fetch-happen/commit/a47b782))
-
-
-### Features
-
-* **agent:** add ca/cert/key support to auto-agent (#15) ([57585a7](https://github.com/zkat/make-fetch-happen/commit/57585a7))
-
-
-### BREAKING CHANGES
-
-* **agent:** pac proxies are no longer supported.
-* **retry:** Retry logic has changes.
-
-* 404s, 420s, and 429s all retry now.
-* ENOTFOUND no longer retries.
-* Only ECONNRESET, ECONNREFUSED, EADDRINUSE, ETIMEDOUT, and `request-timeout` errors are retried.
-
-
-
-<a name="1.7.0"></a>
-# [1.7.0](https://github.com/zkat/make-fetch-happen/compare/v1.6.0...v1.7.0) (2017-04-08)
-
-
-### Features
-
-* **cache:** add useful headers to inform users about cached data ([9bd7b00](https://github.com/zkat/make-fetch-happen/commit/9bd7b00))
-
-
-
-<a name="1.6.0"></a>
-# [1.6.0](https://github.com/zkat/make-fetch-happen/compare/v1.5.1...v1.6.0) (2017-04-06)
-
-
-### Features
-
-* **agent:** better, keepalive-supporting, default http agents ([16277f6](https://github.com/zkat/make-fetch-happen/commit/16277f6))
-
-
-
-<a name="1.5.1"></a>
-## [1.5.1](https://github.com/zkat/make-fetch-happen/compare/v1.5.0...v1.5.1) (2017-04-05)
-
-
-### Bug Fixes
-
-* **cache:** bump cacache for its fixed error messages ([2f2b916](https://github.com/zkat/make-fetch-happen/commit/2f2b916))
-* **cache:** fix handling of errors in cache reads ([5729222](https://github.com/zkat/make-fetch-happen/commit/5729222))
-
-
-
-<a name="1.5.0"></a>
-# [1.5.0](https://github.com/zkat/make-fetch-happen/compare/v1.4.0...v1.5.0) (2017-04-04)
-
-
-### Features
-
-* **retry:** retry requests on 408 timeouts, too ([8d8b5bd](https://github.com/zkat/make-fetch-happen/commit/8d8b5bd))
-
-
-
-<a name="1.4.0"></a>
-# [1.4.0](https://github.com/zkat/make-fetch-happen/compare/v1.3.1...v1.4.0) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** stop relying on BB.catch ([2b04494](https://github.com/zkat/make-fetch-happen/commit/2b04494))
-
-
-### Features
-
-* **retry:** report retry attempt number as extra header ([fd50927](https://github.com/zkat/make-fetch-happen/commit/fd50927))
-
-
-
-<a name="1.3.1"></a>
-## [1.3.1](https://github.com/zkat/make-fetch-happen/compare/v1.3.0...v1.3.1) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** pretend cache entry is missing on ENOENT ([9c2bb26](https://github.com/zkat/make-fetch-happen/commit/9c2bb26))
-
-
-
-<a name="1.3.0"></a>
-# [1.3.0](https://github.com/zkat/make-fetch-happen/compare/v1.2.1...v1.3.0) (2017-04-04)
-
-
-### Bug Fixes
-
-* **cache:** if metadata is missing for some odd reason, ignore the entry ([a021a6b](https://github.com/zkat/make-fetch-happen/commit/a021a6b))
-
-
-### Features
-
-* **cache:** add special headers when request was loaded straight from cache ([8a7dbd1](https://github.com/zkat/make-fetch-happen/commit/8a7dbd1))
-* **cache:** allow configuring algorithms to be calculated on insertion ([bf4a0f2](https://github.com/zkat/make-fetch-happen/commit/bf4a0f2))
-
-
-
-<a name="1.2.1"></a>
-## [1.2.1](https://github.com/zkat/make-fetch-happen/compare/v1.2.0...v1.2.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** update cacache and ssri and change EBADCHECKSUM -> EINTEGRITY ([b6cf6f6](https://github.com/zkat/make-fetch-happen/commit/b6cf6f6))
-
-
-
-<a name="1.2.0"></a>
-# [1.2.0](https://github.com/zkat/make-fetch-happen/compare/v1.1.0...v1.2.0) (2017-04-03)
-
-
-### Features
-
-* **integrity:** full Subresource Integrity support (#10) ([a590159](https://github.com/zkat/make-fetch-happen/commit/a590159))
-
-
-
-<a name="1.1.0"></a>
-# [1.1.0](https://github.com/zkat/make-fetch-happen/compare/v1.0.1...v1.1.0) (2017-04-01)
-
-
-### Features
-
-* **opts:** fetch.defaults() for default options ([522a65e](https://github.com/zkat/make-fetch-happen/commit/522a65e))
-
-
-
-<a name="1.0.1"></a>
-## [1.0.1](https://github.com/zkat/make-fetch-happen/compare/v1.0.0...v1.0.1) (2017-04-01)
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-04-01)
-
-
-### Bug Fixes
-
-* **cache:** default on cache-control header ([b872a2c](https://github.com/zkat/make-fetch-happen/commit/b872a2c))
-* standard stuff and cache matching ([753f2c2](https://github.com/zkat/make-fetch-happen/commit/753f2c2))
-* **agent:** nudge around things with opts.agent ([ed62b57](https://github.com/zkat/make-fetch-happen/commit/ed62b57))
-* **agent:** {agent: false} has special behavior ([b8cc923](https://github.com/zkat/make-fetch-happen/commit/b8cc923))
-* **cache:** invalidation on non-GET ([fe78fac](https://github.com/zkat/make-fetch-happen/commit/fe78fac))
-* **cache:** make force-cache and only-if-cached work as expected ([f50e9df](https://github.com/zkat/make-fetch-happen/commit/f50e9df))
-* **cache:** more spec compliance ([d5a56db](https://github.com/zkat/make-fetch-happen/commit/d5a56db))
-* **cache:** only cache 200 gets ([0abb25a](https://github.com/zkat/make-fetch-happen/commit/0abb25a))
-* **cache:** only load cache code if cache opt is a string ([250fcd5](https://github.com/zkat/make-fetch-happen/commit/250fcd5))
-* **cache:** oops ([e3fa15a](https://github.com/zkat/make-fetch-happen/commit/e3fa15a))
-* **cache:** refactored warning removal into main file ([5b0a9f9](https://github.com/zkat/make-fetch-happen/commit/5b0a9f9))
-* **cache:** req constructor no longer needed in Cache ([5b74cbc](https://github.com/zkat/make-fetch-happen/commit/5b74cbc))
-* **cache:** standard fetch api calls cacheMode "cache" ([6fba805](https://github.com/zkat/make-fetch-happen/commit/6fba805))
-* **cache:** was using wrong method for non-GET/HEAD cache invalidation ([810763a](https://github.com/zkat/make-fetch-happen/commit/810763a))
-* **caching:** a bunch of cache-related fixes ([8ebda1d](https://github.com/zkat/make-fetch-happen/commit/8ebda1d))
-* **deps:** `cacache[@6](https://github.com/6).3.0` - race condition fixes ([9528442](https://github.com/zkat/make-fetch-happen/commit/9528442))
-* **freshness:** fix regex for cacheControl matching ([070db86](https://github.com/zkat/make-fetch-happen/commit/070db86))
-* **freshness:** fixed default freshness heuristic value ([5d29e88](https://github.com/zkat/make-fetch-happen/commit/5d29e88))
-* **logging:** remove console.log calls ([a1d0a47](https://github.com/zkat/make-fetch-happen/commit/a1d0a47))
-* **method:** node-fetch guarantees uppercase ([a1d68d6](https://github.com/zkat/make-fetch-happen/commit/a1d68d6))
-* **opts:** simplified opts handling ([516fd6e](https://github.com/zkat/make-fetch-happen/commit/516fd6e))
-* **proxy:** pass proxy option directly to ProxyAgent ([3398460](https://github.com/zkat/make-fetch-happen/commit/3398460))
-* **retry:** false -> {retries: 0} ([297fbb6](https://github.com/zkat/make-fetch-happen/commit/297fbb6))
-* **retry:** only retry put if body is not a stream ([a24e599](https://github.com/zkat/make-fetch-happen/commit/a24e599))
-* **retry:** skip retries if body is a stream for ANY method ([780c0f8](https://github.com/zkat/make-fetch-happen/commit/780c0f8))
-
-
-### Features
-
-* **api:** initial implementation -- can make and cache requests ([7d55b49](https://github.com/zkat/make-fetch-happen/commit/7d55b49))
-* **fetch:** injectable cache, and retry support ([87b84bf](https://github.com/zkat/make-fetch-happen/commit/87b84bf))
-
-
-### BREAKING CHANGES
-
-* **cache:** opts.cache -> opts.cacheManager; opts.cacheMode -> opts.cache
-* **fetch:** opts.cache accepts a Cache-like obj or a path. Requests are now retried.
-* **api:** actual api implemented
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 8d28acf866..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md
deleted file mode 100644
index 4d12d8dae7..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/README.md
+++ /dev/null
@@ -1,404 +0,0 @@
-# make-fetch-happen [![npm version](https://img.shields.io/npm/v/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![license](https://img.shields.io/npm/l/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![Travis](https://img.shields.io/travis/zkat/make-fetch-happen.svg)](https://travis-ci.org/zkat/make-fetch-happen) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/make-fetch-happen?svg=true)](https://ci.appveyor.com/project/zkat/make-fetch-happen) [![Coverage Status](https://coveralls.io/repos/github/zkat/make-fetch-happen/badge.svg?branch=latest)](https://coveralls.io/github/zkat/make-fetch-happen?branch=latest)
-
-
-[`make-fetch-happen`](https://github.com/zkat/make-fetch-happen) is a Node.js
-library that wraps [`node-fetch-npm`](https://github.com/npm/node-fetch-npm) with additional
-features [`node-fetch`](https://github.com/bitinn/node-fetch) doesn't intend to include, including HTTP Cache support, request
-pooling, proxies, retries, [and more](#features)!
-
-## Install
-
-`$ npm install --save make-fetch-happen`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * [`fetch`](#fetch)
- * [`fetch.defaults`](#fetch-defaults)
- * [`node-fetch` options](#node-fetch-options)
- * [`make-fetch-happen` options](#extra-options)
- * [`opts.cacheManager`](#opts-cache-manager)
- * [`opts.cache`](#opts-cache)
- * [`opts.proxy`](#opts-proxy)
- * [`opts.noProxy`](#opts-no-proxy)
- * [`opts.ca, opts.cert, opts.key`](#https-opts)
- * [`opts.maxSockets`](#opts-max-sockets)
- * [`opts.retry`](#opts-retry)
- * [`opts.onRetry`](#opts-onretry)
- * [`opts.integrity`](#opts-integrity)
-* [Message From Our Sponsors](#wow)
-
-### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-cache' // path where cache will be written (and read)
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen').then(res => {
- return res.json() // download the body as JSON
-}).then(body => {
- console.log(`got ${body.name} from web`)
- return fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'no-cache' // forces a conditional request
- })
-}).then(res => {
- console.log(res.status) // 304! cache validated!
- return res.json().then(body => {
- console.log(`got ${body.name} from cache`)
- })
-})
-```
-
-### Features
-
-* Builds around [`node-fetch`](https://npm.im/node-fetch) for the core [`fetch` API](https://fetch.spec.whatwg.org) implementation
-* Request pooling out of the box
-* Quite fast, really
-* Automatic HTTP-semantics-aware request retries
-* Cache-fallback automatic "offline mode"
-* Proxy support (http, https, socks, socks4, socks5)
-* Built-in request caching following full HTTP caching rules (`Cache-Control`, `ETag`, `304`s, cache fallback on error, etc).
-* Customize cache storage with any [Cache API](https://developer.mozilla.org/en-US/docs/Web/API/Cache)-compliant `Cache` instance. Cache to Redis!
-* Node.js Stream support
-* Transparent gzip and deflate support
-* [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) support
-* Literally punches nazis
-* (PENDING) Range request caching and resuming
-
-### Contributing
-
-The make-fetch-happen team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
-
-All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
-
-Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
-
-Happy hacking!
-
-### API
-
-#### <a name="fetch"></a> `> fetch(uriOrRequest, [opts]) -> Promise<Response>`
-
-This function implements most of the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch): given a `uri` string or a `Request` instance, it will fire off an http request and return a Promise containing the relevant response.
-
-If `opts` is provided, the [`node-fetch`-specific options](#node-fetch-options) will be passed to that library. There are also [additional options](#extra-options) specific to make-fetch-happen that add various features, such as HTTP caching, integrity verification, proxy support, and more.
-
-##### Example
-
-```javascript
-fetch('https://google.com').then(res => res.buffer())
-```
-
-#### <a name="fetch-defaults"></a> `> fetch.defaults([defaultUrl], [defaultOpts])`
-
-Returns a new `fetch` function that will call `make-fetch-happen` using `defaultUrl` and `defaultOpts` as default values to any calls.
-
-A defaulted `fetch` will also have a `.defaults()` method, so they can be chained.
-
-##### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-local-cache'
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen') // will always use the cache
-```
-
-#### <a name="node-fetch-options"></a> `> node-fetch options`
-
-The following options for `node-fetch` are used as-is:
-
-* method
-* body
-* redirect
-* follow
-* timeout
-* compress
-* size
-
-These other options are modified or augmented by make-fetch-happen:
-
-* headers - Default `User-Agent` set to make-fetch happen. `Connection` is set to `keep-alive` or `close` automatically depending on `opts.agent`.
-* agent
- * If agent is null, an http or https Agent will be automatically used. By default, these will be `http.globalAgent` and `https.globalAgent`.
- * If [`opts.proxy`](#opts-proxy) is provided and `opts.agent` is null, the agent will be set to an appropriate proxy-handling agent.
- * If `opts.agent` is an object, it will be used as the request-pooling agent argument for this request.
- * If `opts.agent` is `false`, it will be passed as-is to the underlying request library. This causes a new Agent to be spawned for every request.
-
-For more details, see [the documentation for `node-fetch` itself](https://github.com/bitinn/node-fetch#options).
-
-#### <a name="extra-options"></a> `> make-fetch-happen options`
-
-make-fetch-happen augments the `node-fetch` API with additional features available through extra options. The following extra options are available:
-
-* [`opts.cacheManager`](#opts-cache-manager) - Cache target to read/write
-* [`opts.cache`](#opts-cache) - `fetch` cache mode. Controls cache *behavior*.
-* [`opts.proxy`](#opts-proxy) - Proxy agent
-* [`opts.noProxy`](#opts-no-proxy) - Domain segments to disable proxying for.
-* [`opts.ca, opts.cert, opts.key, opts.strictSSL`](#https-opts)
-* [`opts.localAddress`](#opts-local-address)
-* [`opts.maxSockets`](#opts-max-sockets)
-* [`opts.retry`](#opts-retry) - Request retry settings
-* [`opts.onRetry`](#opts-onretry) - a function called whenever a retry is attempted
-* [`opts.integrity`](#opts-integrity) - [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata.
-
-#### <a name="opts-cache-manager"></a> `> opts.cacheManager`
-
-Either a `String` or a `Cache`. If the former, it will be assumed to be a `Path` to be used as the cache root for [`cacache`](https://npm.im/cacache).
-
-If an object is provided, it will be assumed to be a compliant [`Cache` instance](https://developer.mozilla.org/en-US/docs/Web/API/Cache). Only `Cache.match()`, `Cache.put()`, and `Cache.delete()` are required. Options objects will not be passed in to `match()` or `delete()`.
-
-By implementing this API, you can customize the storage backend for make-fetch-happen itself -- for example, you could implement a cache that uses `redis` for caching, or simply keeps everything in memory. Most of the caching logic exists entirely on the make-fetch-happen side, so the only thing you need to worry about is reading, writing, and deleting, as well as making sure `fetch.Response` objects are what gets returned.
-
-You can refer to `cache.js` in the make-fetch-happen source code for a reference implementation.
-
-**NOTE**: Requests will not be cached unless their response bodies are consumed. You will need to use one of the `res.json()`, `res.buffer()`, etc methods on the response, or drain the `res.body` stream, in order for it to be written.
-
-The default cache manager also adds the following headers to cached responses:
-
-* `X-Local-Cache`: Path to the cache the content was found in
-* `X-Local-Cache-Key`: Unique cache entry key for this response
-* `X-Local-Cache-Hash`: Specific integrity hash for the cached entry
-* `X-Local-Cache-Time`: UTCString of the cache insertion time for the entry
-
-Using [`cacache`](https://npm.im/cacache), a call like this may be used to
-manually fetch the cached entry:
-
-```javascript
-const h = response.headers
-cacache.get(h.get('x-local-cache'), h.get('x-local-cache-key'))
-
-// grab content only, directly:
-cacache.get.byDigest(h.get('x-local-cache'), h.get('x-local-cache-hash'))
-```
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cacheManager: './my-local-cache'
-}) // -> 200-level response will be written to disk
-
-fetch('https://npm.im/cacache', {
- cacheManager: new MyCustomRedisCache(process.env.PORT)
-}) // -> 200-level response will be written to redis
-```
-
-A possible (minimal) implementation for `MyCustomRedisCache`:
-
-```javascript
-const bluebird = require('bluebird')
-const redis = require("redis")
-bluebird.promisifyAll(redis.RedisClient.prototype)
-class MyCustomRedisCache {
- constructor (opts) {
- this.redis = redis.createClient(opts)
- }
- match (req) {
- return this.redis.getAsync(req.url).then(res => {
- if (res) {
- const parsed = JSON.parse(res)
- return new fetch.Response(parsed.body, {
- url: req.url,
- headers: parsed.headers,
- status: 200
- })
- }
- })
- }
- put (req, res) {
- return res.buffer().then(body => {
- return this.redis.setAsync(req.url, JSON.stringify({
- body: body,
- headers: res.headers.raw()
- }))
- }).then(() => {
- // return the response itself
- return res
- })
- }
- 'delete' (req) {
- return this.redis.unlinkAsync(req.url)
- }
-}
-```
-
-#### <a name="opts-cache"></a> `> opts.cache`
-
-This option follows the standard `fetch` API cache option. This option will do nothing if [`opts.cacheManager`](#opts-cache-manager) is null. The following values are accepted (as strings):
-
-* `default` - Fetch will inspect the HTTP cache on the way to the network. If there is a fresh response it will be used. If there is a stale response a conditional request will be created, and a normal request otherwise. It then updates the HTTP cache with the response. If the revalidation request fails (for example, on a 500 or if you're offline), the stale response will be returned.
-* `no-store` - Fetch behaves as if there is no HTTP cache at all.
-* `reload` - Fetch behaves as if there is no HTTP cache on the way to the network. Ergo, it creates a normal request and updates the HTTP cache with the response.
-* `no-cache` - Fetch creates a conditional request if there is a response in the HTTP cache and a normal request otherwise. It then updates the HTTP cache with the response.
-* `force-cache` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it creates a normal request and updates the HTTP cache with the response.
-* `only-if-cached` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it returns a network error. (Can only be used when request’s mode is "same-origin". Any cached redirects will be followed assuming request’s redirect mode is "follow" and the redirects do not violate request’s mode.)
-
-(Note: option descriptions are taken from https://fetch.spec.whatwg.org/#http-network-or-cache-fetch)
-
-##### Example
-
-```javascript
-const fetch = require('make-fetch-happen').defaults({
- cacheManager: './my-cache'
-})
-
-// Will error with ENOTCACHED if we haven't already cached this url
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'only-if-cached'
-})
-
-// Will refresh any local content and cache the new response
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'reload'
-})
-
-// Will use any local data, even if stale. Otherwise, will hit network.
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- cache: 'force-cache'
-})
-```
-
-#### <a name="opts-proxy"></a> `> opts.proxy`
-
-A string or `url.parse`-d URI to proxy through. Different Proxy handlers will be
-used depending on the proxy's protocol.
-
-Additionally, `process.env.HTTP_PROXY`, `process.env.HTTPS_PROXY`, and
-`process.env.PROXY` are used if present and no `opts.proxy` value is provided.
-
-(Pending) `process.env.NO_PROXY` may also be configured to skip proxying requests for all, or specific domains.
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- proxy: 'https://corporate.yourcompany.proxy:4445'
-})
-
-fetch('https://registry.npmjs.org/make-fetch-happen', {
- proxy: {
- protocol: 'https:',
- hostname: 'corporate.yourcompany.proxy',
- port: 4445
- }
-})
-```
-
-#### <a name="opts-no-proxy"></a> `> opts.noProxy`
-
-If present, should be a comma-separated string or an array of domain extensions
-that a proxy should _not_ be used for.
-
-This option may also be provided through `process.env.NO_PROXY`.
-
-#### <a name="https-opts"></a> `> opts.ca, opts.cert, opts.key, opts.strictSSL`
-
-These values are passed in directly to the HTTPS agent and will be used for both
-proxied and unproxied outgoing HTTPS requests. They mostly correspond to the
-same options the `https` module accepts, which will be themselves passed to
-`tls.connect()`. `opts.strictSSL` corresponds to `rejectUnauthorized`.
-
-#### <a name="opts-local-address"></a> `> opts.localAddress`
-
-Passed directly to `http` and `https` request calls. Determines the local
-address to bind to.
-
-#### <a name="opts-max-sockets"></a> `> opts.maxSockets`
-
-Default: 15
-
-Maximum number of active concurrent sockets to use for the underlying
-Http/Https/Proxy agents. This setting applies once per spawned agent.
-
-15 is probably a _pretty good value_ for most use-cases, and balances speed
-with, uh, not knocking out people's routers. 🤓
-
-#### <a name="opts-retry"></a> `> opts.retry`
-
-An object that can be used to tune request retry settings. Retries will only be attempted on the following conditions:
-
-* Request method is NOT `POST` AND
-* Request status is one of: `408`, `420`, `429`, or any status in the 500-range. OR
-* Request errored with `ECONNRESET`, `ECONNREFUSED`, `EADDRINUSE`, `ETIMEDOUT`, or the `fetch` error `request-timeout`.
-
-The following are worth noting as explicitly not retried:
-
-* `getaddrinfo ENOTFOUND` and will be assumed to be either an unreachable domain or the user will be assumed offline. If a response is cached, it will be returned immediately.
-* `ECONNRESET` currently has no support for restarting. It will eventually be supported but requires a bit more juggling due to streaming.
-
-If `opts.retry` is `false`, it is equivalent to `{retries: 0}`
-
-If `opts.retry` is a number, it is equivalent to `{retries: num}`
-
-The following retry options are available if you want more control over it:
-
-* retries
-* factor
-* minTimeout
-* maxTimeout
-* randomize
-
-For details on what each of these do, refer to the [`retry`](https://npm.im/retry) documentation.
-
-##### Example
-
-```javascript
-fetch('https://flaky.site.com', {
- retry: {
- retries: 10,
- randomize: true
- }
-})
-
-fetch('http://reliable.site.com', {
- retry: false
-})
-
-fetch('http://one-more.site.com', {
- retry: 3
-})
-```
-
-#### <a name="opts-onretry"></a> `> opts.onRetry`
-
-A function called whenever a retry is attempted.
-
-##### Example
-
-```javascript
-fetch('https://flaky.site.com', {
- onRetry() {
- console.log('we will retry!')
- }
-})
-```
-
-#### <a name="opts-integrity"></a> `> opts.integrity`
-
-Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EINTEGRITY` error.
-
-`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like.
-
-##### Example
-
-```javascript
-fetch('https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', {
- integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q='
-}) // -> ok
-
-fetch('https://malicious-registry.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', {
- integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q='
-}) // Error: EINTEGRITY
-```
-
-### <a name="wow"></a> Message From Our Sponsors
-
-![](stop.gif)
-
-![](happening.gif)
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js
deleted file mode 100644
index 55675946ad..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/agent.js
+++ /dev/null
@@ -1,171 +0,0 @@
-'use strict'
-const LRU = require('lru-cache')
-const url = require('url')
-
-let AGENT_CACHE = new LRU({ max: 50 })
-let HttpsAgent
-let HttpAgent
-
-module.exports = getAgent
-
-function getAgent (uri, opts) {
- const parsedUri = url.parse(typeof uri === 'string' ? uri : uri.url)
- const isHttps = parsedUri.protocol === 'https:'
- const pxuri = getProxyUri(uri, opts)
-
- const key = [
- `https:${isHttps}`,
- pxuri
- ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
- : '>no-proxy<',
- `local-address:${opts.localAddress || '>no-local-address<'}`,
- `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
- `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
- `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
- `key:${(isHttps && opts.key) || '>no-key<'}`
- ].join(':')
-
- if (opts.agent != null) { // `agent: false` has special behavior!
- return opts.agent
- }
-
- if (AGENT_CACHE.peek(key)) {
- return AGENT_CACHE.get(key)
- }
-
- if (pxuri) {
- const proxy = getProxy(pxuri, opts, isHttps)
- AGENT_CACHE.set(key, proxy)
- return proxy
- }
-
- if (isHttps && !HttpsAgent) {
- HttpsAgent = require('agentkeepalive').HttpsAgent
- } else if (!isHttps && !HttpAgent) {
- HttpAgent = require('agentkeepalive')
- }
-
- // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
- // of zero disables the timeout behavior (OS limits still apply). Else, if
- // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
- // the node-fetch-npm timeout will always fire first, giving us more
- // consistent errors.
- const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1
-
- const agent = isHttps ? new HttpsAgent({
- maxSockets: opts.maxSockets || 15,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- localAddress: opts.localAddress,
- rejectUnauthorized: opts.strictSSL,
- timeout: agentTimeout
- }) : new HttpAgent({
- maxSockets: opts.maxSockets || 15,
- localAddress: opts.localAddress,
- timeout: agentTimeout
- })
- AGENT_CACHE.set(key, agent)
- return agent
-}
-
-function checkNoProxy (uri, opts) {
- const host = url.parse(uri).hostname.split('.').reverse()
- let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
- if (typeof noproxy === 'string') {
- noproxy = noproxy.split(/\s*,\s*/g)
- }
- return noproxy && noproxy.some(no => {
- const noParts = no.split('.').filter(x => x).reverse()
- if (!noParts.length) { return false }
- for (let i = 0; i < noParts.length; i++) {
- if (host[i] !== noParts[i]) {
- return false
- }
- }
- return true
- })
-}
-
-module.exports.getProcessEnv = getProcessEnv
-
-function getProcessEnv (env) {
- if (!env) { return }
-
- let value
-
- if (Array.isArray(env)) {
- for (let e of env) {
- value = process.env[e] ||
- process.env[e.toUpperCase()] ||
- process.env[e.toLowerCase()]
- if (typeof value !== 'undefined') { break }
- }
- }
-
- if (typeof env === 'string') {
- value = process.env[env] ||
- process.env[env.toUpperCase()] ||
- process.env[env.toLowerCase()]
- }
-
- return value
-}
-
-function getProxyUri (uri, opts) {
- const protocol = url.parse(uri).protocol
-
- const proxy = opts.proxy || (
- protocol === 'https:' && getProcessEnv('https_proxy')
- ) || (
- protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
- )
- if (!proxy) { return null }
-
- const parsedProxy = (typeof proxy === 'string') ? url.parse(proxy) : proxy
-
- return !checkNoProxy(uri, opts) && parsedProxy
-}
-
-let HttpProxyAgent
-let HttpsProxyAgent
-let SocksProxyAgent
-function getProxy (proxyUrl, opts, isHttps) {
- let popts = {
- host: proxyUrl.hostname,
- port: proxyUrl.port,
- protocol: proxyUrl.protocol,
- path: proxyUrl.path,
- auth: proxyUrl.auth,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- timeout: opts.timeout === 0 ? 0 : opts.timeout + 1,
- localAddress: opts.localAddress,
- maxSockets: opts.maxSockets || 15,
- rejectUnauthorized: opts.strictSSL
- }
-
- if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
- if (!isHttps) {
- if (!HttpProxyAgent) {
- HttpProxyAgent = require('http-proxy-agent')
- }
-
- return new HttpProxyAgent(popts)
- } else {
- if (!HttpsProxyAgent) {
- HttpsProxyAgent = require('https-proxy-agent')
- }
-
- return new HttpsProxyAgent(popts)
- }
- }
- if (proxyUrl.protocol.startsWith('socks')) {
- if (!SocksProxyAgent) {
- SocksProxyAgent = require('socks-proxy-agent')
- }
-
- return new SocksProxyAgent(popts)
- }
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js
deleted file mode 100644
index edb9b3d036..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/cache.js
+++ /dev/null
@@ -1,257 +0,0 @@
-'use strict'
-
-const cacache = require('cacache')
-const fetch = require('node-fetch-npm')
-const pipe = require('mississippi').pipe
-const ssri = require('ssri')
-const through = require('mississippi').through
-const to = require('mississippi').to
-const url = require('url')
-const stream = require('stream')
-
-const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
-
-function cacheKey (req) {
- const parsed = url.parse(req.url)
- return `make-fetch-happen:request-cache:${
- url.format({
- protocol: parsed.protocol,
- slashes: parsed.slashes,
- host: parsed.host,
- hostname: parsed.hostname,
- pathname: parsed.pathname
- })
- }`
-}
-
-// This is a cacache-based implementation of the Cache standard,
-// using node-fetch.
-// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache
-//
-module.exports = class Cache {
- constructor (path, opts) {
- this._path = path
- this._uid = opts && opts.uid
- this._gid = opts && opts.gid
- this.Promise = (opts && opts.Promise) || Promise
- }
-
- // Returns a Promise that resolves to the response associated with the first
- // matching request in the Cache object.
- match (req, opts) {
- opts = opts || {}
- const key = cacheKey(req)
- return cacache.get.info(this._path, key).then(info => {
- return info && cacache.get.hasContent(
- this._path, info.integrity, opts
- ).then(exists => exists && info)
- }).then(info => {
- if (info && info.metadata && matchDetails(req, {
- url: info.metadata.url,
- reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
- resHeaders: new fetch.Headers(info.metadata.resHeaders),
- cacheIntegrity: info.integrity,
- integrity: opts && opts.integrity
- })) {
- const resHeaders = new fetch.Headers(info.metadata.resHeaders)
- addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time)
- if (req.method === 'HEAD') {
- return new fetch.Response(null, {
- url: req.url,
- headers: resHeaders,
- status: 200
- })
- }
- let body
- const cachePath = this._path
- // avoid opening cache file handles until a user actually tries to
- // read from it.
- if (opts.memoize !== false && info.size > MAX_MEM_SIZE) {
- body = new stream.PassThrough()
- const realRead = body._read
- body._read = function (size) {
- body._read = realRead
- pipe(
- cacache.get.stream.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }),
- body,
- err => body.emit(err))
- return realRead.call(this, size)
- }
- } else {
- let readOnce = false
- // cacache is much faster at bulk reads
- body = new stream.Readable({
- read () {
- if (readOnce) return this.push(null)
- readOnce = true
- cacache.get.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }).then(data => {
- this.push(data)
- this.push(null)
- }, err => this.emit('error', err))
- }
- })
- }
- return this.Promise.resolve(new fetch.Response(body, {
- url: req.url,
- headers: resHeaders,
- status: 200,
- size: info.size
- }))
- }
- })
- }
-
- // Takes both a request and its response and adds it to the given cache.
- put (req, response, opts) {
- opts = opts || {}
- const size = response.headers.get('content-length')
- const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE
- const ckey = cacheKey(req)
- const cacheOpts = {
- algorithms: opts.algorithms,
- metadata: {
- url: req.url,
- reqHeaders: req.headers.raw(),
- resHeaders: response.headers.raw()
- },
- uid: this._uid,
- gid: this._gid,
- size,
- memoize: fitInMemory && opts.memoize
- }
- if (req.method === 'HEAD' || response.status === 304) {
- // Update metadata without writing
- return cacache.get.info(this._path, ckey).then(info => {
- // Providing these will bypass content write
- cacheOpts.integrity = info.integrity
- addCacheHeaders(
- response.headers, this._path, ckey, info.integrity, info.time
- )
- return new this.Promise((resolve, reject) => {
- pipe(
- cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts),
- cacache.put.stream(this._path, cacheKey(req), cacheOpts),
- err => err ? reject(err) : resolve(response)
- )
- })
- }).then(() => response)
- }
- let buf = []
- let bufSize = 0
- let cacheTargetStream = false
- const cachePath = this._path
- let cacheStream = to((chunk, enc, cb) => {
- if (!cacheTargetStream) {
- if (fitInMemory) {
- cacheTargetStream =
- to({highWaterMark: MAX_MEM_SIZE}, (chunk, enc, cb) => {
- buf.push(chunk)
- bufSize += chunk.length
- cb()
- }, done => {
- cacache.put(
- cachePath,
- cacheKey(req),
- Buffer.concat(buf, bufSize),
- cacheOpts
- ).then(
- () => done(),
- done
- )
- })
- } else {
- cacheTargetStream =
- cacache.put.stream(cachePath, cacheKey(req), cacheOpts)
- }
- }
- cacheTargetStream.write(chunk, enc, cb)
- }, done => {
- cacheTargetStream ? cacheTargetStream.end(done) : done()
- })
- const oldBody = response.body
- const newBody = through({highWaterMark: fitInMemory && MAX_MEM_SIZE})
- response.body = newBody
- oldBody.once('error', err => newBody.emit('error', err))
- newBody.once('error', err => oldBody.emit('error', err))
- cacheStream.once('error', err => newBody.emit('error', err))
- pipe(oldBody, to((chunk, enc, cb) => {
- cacheStream.write(chunk, enc, () => {
- newBody.write(chunk, enc, cb)
- })
- }, done => {
- cacheStream.end(() => {
- newBody.end(() => {
- done()
- })
- })
- }), err => err && newBody.emit('error', err))
- return response
- }
-
- // Finds the Cache entry whose key is the request, and if found, deletes the
- // Cache entry and returns a Promise that resolves to true. If no Cache entry
- // is found, it returns false.
- 'delete' (req, opts) {
- opts = opts || {}
- if (typeof opts.memoize === 'object') {
- if (opts.memoize.reset) {
- opts.memoize.reset()
- } else if (opts.memoize.clear) {
- opts.memoize.clear()
- } else {
- Object.keys(opts.memoize).forEach(k => {
- opts.memoize[k] = null
- })
- }
- }
- return cacache.rm.entry(
- this._path,
- cacheKey(req)
- // TODO - true/false
- ).then(() => false)
- }
-}
-
-function matchDetails (req, cached) {
- const reqUrl = url.parse(req.url)
- const cacheUrl = url.parse(cached.url)
- const vary = cached.resHeaders.get('Vary')
- // https://tools.ietf.org/html/rfc7234#section-4.1
- if (vary) {
- if (vary.match(/\*/)) {
- return false
- } else {
- const fieldsMatch = vary.split(/\s*,\s*/).every(field => {
- return cached.reqHeaders.get(field) === req.headers.get(field)
- })
- if (!fieldsMatch) {
- return false
- }
- }
- }
- if (cached.integrity) {
- const cachedSri = ssri.parse(cached.cacheIntegrity)
- const sri = ssri.parse(cached.integrity)
- const algo = sri.pickAlgorithm()
- if (cachedSri[algo] && !sri[algo].some(hash => {
- // cachedSri always has exactly one item per algorithm
- return cachedSri[algo][0].digest === hash.digest
- })) {
- return false
- }
- }
- reqUrl.hash = null
- cacheUrl.hash = null
- return url.format(reqUrl) === url.format(cacheUrl)
-}
-
-function addCacheHeaders (resHeaders, path, key, hash, time) {
- resHeaders.set('X-Local-Cache', encodeURIComponent(path))
- resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key))
- resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash))
- resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString())
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js
deleted file mode 100644
index 0f2c164e19..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/index.js
+++ /dev/null
@@ -1,482 +0,0 @@
-'use strict'
-
-let Cache
-const url = require('url')
-const CachePolicy = require('http-cache-semantics')
-const fetch = require('node-fetch-npm')
-const pkg = require('./package.json')
-const retry = require('promise-retry')
-let ssri
-const Stream = require('stream')
-const getAgent = require('./agent')
-const setWarning = require('./warning')
-
-const isURL = /^https?:/
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
- 'ECONNRESET', // remote socket closed on us
- 'ECONNREFUSED', // remote host refused to open connection
- 'EADDRINUSE', // failed to bind to a local port (proxy?)
- 'ETIMEDOUT' // someone in the transaction is WAY TOO SLOW
- // Known codes we do NOT retry on:
- // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-]
-
-const RETRY_TYPES = [
- 'request-timeout'
-]
-
-// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch
-module.exports = cachingFetch
-cachingFetch.defaults = function (_uri, _opts) {
- const fetch = this
- if (typeof _uri === 'object') {
- _opts = _uri
- _uri = null
- }
-
- function defaultedFetch (uri, opts) {
- const finalOpts = Object.assign({}, _opts || {}, opts || {})
- return fetch(uri || _uri, finalOpts)
- }
-
- defaultedFetch.defaults = fetch.defaults
- defaultedFetch.delete = fetch.delete
- return defaultedFetch
-}
-
-cachingFetch.delete = cacheDelete
-function cacheDelete (uri, opts) {
- opts = configureOptions(opts)
- if (opts.cacheManager) {
- const req = new fetch.Request(uri, {
- method: opts.method,
- headers: opts.headers
- })
- return opts.cacheManager.delete(req, opts)
- }
-}
-
-function initializeCache (opts) {
- if (typeof opts.cacheManager === 'string') {
- if (!Cache) {
- // Default cacache-based cache
- Cache = require('./cache')
- }
-
- opts.cacheManager = new Cache(opts.cacheManager, opts)
- }
-
- opts.cache = opts.cache || 'default'
-
- if (opts.cache === 'default' && isHeaderConditional(opts.headers)) {
- // If header list contains `If-Modified-Since`, `If-None-Match`,
- // `If-Unmodified-Since`, `If-Match`, or `If-Range`, fetch will set cache
- // mode to "no-store" if it is "default".
- opts.cache = 'no-store'
- }
-}
-
-function configureOptions (_opts) {
- const opts = Object.assign({}, _opts || {})
- opts.method = (opts.method || 'GET').toUpperCase()
-
- if (opts.retry && typeof opts.retry === 'number') {
- opts.retry = { retries: opts.retry }
- }
-
- if (opts.retry === false) {
- opts.retry = { retries: 0 }
- }
-
- if (opts.cacheManager) {
- initializeCache(opts)
- }
-
- return opts
-}
-
-function initializeSsri () {
- if (!ssri) {
- ssri = require('ssri')
- }
-}
-
-function cachingFetch (uri, _opts) {
- const opts = configureOptions(_opts)
-
- if (opts.integrity) {
- initializeSsri()
- // if verifying integrity, node-fetch must not decompress
- opts.compress = false
- }
-
- const isCachable = (opts.method === 'GET' || opts.method === 'HEAD') &&
- opts.cacheManager &&
- opts.cache !== 'no-store' &&
- opts.cache !== 'reload'
-
- if (isCachable) {
- const req = new fetch.Request(uri, {
- method: opts.method,
- headers: opts.headers
- })
-
- return opts.cacheManager.match(req, opts).then(res => {
- if (res) {
- const warningCode = (res.headers.get('Warning') || '').match(/^\d+/)
- if (warningCode && +warningCode >= 100 && +warningCode < 200) {
- // https://tools.ietf.org/html/rfc7234#section-4.3.4
- //
- // If a stored response is selected for update, the cache MUST:
- //
- // * delete any Warning header fields in the stored response with
- // warn-code 1xx (see Section 5.5);
- //
- // * retain any Warning header fields in the stored response with
- // warn-code 2xx;
- //
- res.headers.delete('Warning')
- }
-
- if (opts.cache === 'default' && !isStale(req, res)) {
- return res
- }
-
- if (opts.cache === 'default' || opts.cache === 'no-cache') {
- return conditionalFetch(req, res, opts)
- }
-
- if (opts.cache === 'force-cache' || opts.cache === 'only-if-cached') {
- // 112 Disconnected operation
- // SHOULD be included if the cache is intentionally disconnected from
- // the rest of the network for a period of time.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(res, 112, 'Disconnected operation')
- return res
- }
- }
-
- if (!res && opts.cache === 'only-if-cached') {
- const errorMsg = `request to ${
- uri
- } failed: cache mode is 'only-if-cached' but no cached response available.`
-
- const err = new Error(errorMsg)
- err.code = 'ENOTCACHED'
- throw err
- }
-
- // Missing cache entry, or mode is default (if stale), reload, no-store
- return remoteFetch(req.url, opts)
- })
- }
-
- return remoteFetch(uri, opts)
-}
-
-function iterableToObject (iter) {
- const obj = {}
- for (let k of iter.keys()) {
- obj[k] = iter.get(k)
- }
- return obj
-}
-
-function makePolicy (req, res) {
- const _req = {
- url: req.url,
- method: req.method,
- headers: iterableToObject(req.headers)
- }
- const _res = {
- status: res.status,
- headers: iterableToObject(res.headers)
- }
-
- return new CachePolicy(_req, _res, { shared: false })
-}
-
-// https://tools.ietf.org/html/rfc7234#section-4.2
-function isStale (req, res) {
- if (!res) {
- return null
- }
-
- const _req = {
- url: req.url,
- method: req.method,
- headers: iterableToObject(req.headers)
- }
-
- const policy = makePolicy(req, res)
-
- const responseTime = res.headers.get('x-local-cache-time') ||
- res.headers.get('date') ||
- 0
-
- policy._responseTime = new Date(responseTime)
-
- const bool = !policy.satisfiesWithoutRevalidation(_req)
- return bool
-}
-
-function mustRevalidate (res) {
- return (res.headers.get('cache-control') || '').match(/must-revalidate/i)
-}
-
-function conditionalFetch (req, cachedRes, opts) {
- const _req = {
- url: req.url,
- method: req.method,
- headers: Object.assign({}, opts.headers || {})
- }
-
- const policy = makePolicy(req, cachedRes)
- opts.headers = policy.revalidationHeaders(_req)
-
- return remoteFetch(req.url, opts)
- .then(condRes => {
- const revalidatedPolicy = policy.revalidatedPolicy(_req, {
- status: condRes.status,
- headers: iterableToObject(condRes.headers)
- })
-
- if (condRes.status >= 500 && !mustRevalidate(cachedRes)) {
- // 111 Revalidation failed
- // MUST be included if a cache returns a stale response because an
- // attempt to revalidate the response failed, due to an inability to
- // reach the server.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(cachedRes, 111, 'Revalidation failed')
- return cachedRes
- }
-
- if (condRes.status === 304) { // 304 Not Modified
- condRes.body = cachedRes.body
- return opts.cacheManager.put(req, condRes, opts)
- .then(newRes => {
- newRes.headers = new fetch.Headers(revalidatedPolicy.policy.responseHeaders())
- return newRes
- })
- }
-
- return condRes
- })
- .then(res => res)
- .catch(err => {
- if (mustRevalidate(cachedRes)) {
- throw err
- } else {
- // 111 Revalidation failed
- // MUST be included if a cache returns a stale response because an
- // attempt to revalidate the response failed, due to an inability to
- // reach the server.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(cachedRes, 111, 'Revalidation failed')
- // 199 Miscellaneous warning
- // The warning text MAY include arbitrary information to be presented to
- // a human user, or logged. A system receiving this warning MUST NOT take
- // any automated action, besides presenting the warning to the user.
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- setWarning(
- cachedRes,
- 199,
- `Miscellaneous Warning ${err.code}: ${err.message}`
- )
-
- return cachedRes
- }
- })
-}
-
-function remoteFetchHandleIntegrity (res, integrity) {
- const oldBod = res.body
- const newBod = ssri.integrityStream({
- integrity
- })
- oldBod.pipe(newBod)
- res.body = newBod
- oldBod.once('error', err => {
- newBod.emit('error', err)
- })
- newBod.once('error', err => {
- oldBod.emit('error', err)
- })
-}
-
-function remoteFetch (uri, opts) {
- const agent = getAgent(uri, opts)
- const headers = Object.assign({
- 'connection': agent ? 'keep-alive' : 'close',
- 'user-agent': USER_AGENT
- }, opts.headers || {})
-
- const reqOpts = {
- agent,
- body: opts.body,
- compress: opts.compress,
- follow: opts.follow,
- headers: new fetch.Headers(headers),
- method: opts.method,
- redirect: 'manual',
- size: opts.size,
- counter: opts.counter,
- timeout: opts.timeout
- }
-
- return retry(
- (retryHandler, attemptNum) => {
- const req = new fetch.Request(uri, reqOpts)
- return fetch(req)
- .then(res => {
- res.headers.set('x-fetch-attempts', attemptNum)
-
- if (opts.integrity) {
- remoteFetchHandleIntegrity(res, opts.integrity)
- }
-
- const isStream = req.body instanceof Stream
-
- if (opts.cacheManager) {
- const isMethodGetHead = req.method === 'GET' ||
- req.method === 'HEAD'
-
- const isCachable = opts.cache !== 'no-store' &&
- isMethodGetHead &&
- makePolicy(req, res).storable() &&
- res.status === 200 // No other statuses should be stored!
-
- if (isCachable) {
- return opts.cacheManager.put(req, res, opts)
- }
-
- if (!isMethodGetHead) {
- return opts.cacheManager.delete(req).then(() => {
- if (res.status >= 500 && req.method !== 'POST' && !isStream) {
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(res)
- }
-
- return retryHandler(res)
- }
-
- return res
- })
- }
- }
-
- const isRetriable = req.method !== 'POST' &&
- !isStream && (
- res.status === 408 || // Request Timeout
- res.status === 420 || // Enhance Your Calm (usually Twitter rate-limit)
- res.status === 429 || // Too Many Requests ("standard" rate-limiting)
- res.status >= 500 // Assume server errors are momentary hiccups
- )
-
- if (isRetriable) {
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(res)
- }
-
- return retryHandler(res)
- }
-
- if (!fetch.isRedirect(res.status) || opts.redirect === 'manual') {
- return res
- }
-
- // handle redirects - matches behavior of npm-fetch: https://github.com/bitinn/node-fetch
- if (opts.redirect === 'error') {
- const err = new Error(`redirect mode is set to error: ${uri}`)
- err.code = 'ENOREDIRECT'
- throw err
- }
-
- if (!res.headers.get('location')) {
- const err = new Error(`redirect location header missing at: ${uri}`)
- err.code = 'EINVALIDREDIRECT'
- throw err
- }
-
- if (req.counter >= req.follow) {
- const err = new Error(`maximum redirect reached at: ${uri}`)
- err.code = 'EMAXREDIRECT'
- throw err
- }
-
- const resolvedUrl = url.resolve(req.url, res.headers.get('location'))
- let redirectURL = url.parse(resolvedUrl)
-
- if (isURL.test(res.headers.get('location'))) {
- redirectURL = url.parse(res.headers.get('location'))
- }
-
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- if (url.parse(req.url).hostname !== redirectURL.hostname) {
- req.headers.delete('authorization')
- }
-
- // for POST request with 301/302 response, or any request with 303 response,
- // use GET when following redirect
- if (res.status === 303 ||
- ((res.status === 301 || res.status === 302) && req.method === 'POST')) {
- opts.method = 'GET'
- opts.body = null
- req.headers.delete('content-length')
- }
-
- opts.headers = {}
- req.headers.forEach((value, name) => {
- opts.headers[name] = value
- })
-
- opts.counter = ++req.counter
- return cachingFetch(resolvedUrl, opts)
- })
- .catch(err => {
- const code = err.code === 'EPROMISERETRY' ? err.retried.code : err.code
-
- const isRetryError = RETRY_ERRORS.indexOf(code) === -1 &&
- RETRY_TYPES.indexOf(err.type) === -1
-
- if (req.method === 'POST' || isRetryError) {
- throw err
- }
-
- if (typeof opts.onRetry === 'function') {
- opts.onRetry(err)
- }
-
- return retryHandler(err)
- })
- },
- opts.retry
- ).catch(err => {
- if (err.status >= 400) {
- return err
- }
-
- throw err
- })
-}
-
-function isHeaderConditional (headers) {
- if (!headers || typeof headers !== 'object') {
- return false
- }
-
- const modifiers = [
- 'if-modified-since',
- 'if-none-match',
- 'if-unmodified-since',
- 'if-match',
- 'if-range'
- ]
-
- return Object.keys(headers)
- .some(h => modifiers.indexOf(h.toLowerCase()) !== -1)
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index ff28e528a7..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,95 +0,0 @@
-{
- "_from": "make-fetch-happen@^3.0.0",
- "_id": "make-fetch-happen@3.0.0",
- "_inBundle": false,
- "_integrity": "sha512-FmWY7gC0mL6Z4N86vE14+m719JKE4H0A+pyiOH18B025gF/C113pyfb4gHDDYP5cqnRMHOz06JGdmffC/SES+w==",
- "_location": "/npm-registry-fetch/make-fetch-happen",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "make-fetch-happen@^3.0.0",
- "name": "make-fetch-happen",
- "escapedName": "make-fetch-happen",
- "rawSpec": "^3.0.0",
- "saveSpec": null,
- "fetchSpec": "^3.0.0"
- },
- "_requiredBy": [
- "/npm-registry-fetch"
- ],
- "_resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-3.0.0.tgz",
- "_shasum": "7b661d2372fc4710ab5cc8e1fa3c290eea69a961",
- "_spec": "make-fetch-happen@^3.0.0",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/make-fetch-happen/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agentkeepalive": "^3.4.1",
- "cacache": "^10.0.4",
- "http-cache-semantics": "^3.8.1",
- "http-proxy-agent": "^2.1.0",
- "https-proxy-agent": "^2.2.0",
- "lru-cache": "^4.1.2",
- "mississippi": "^3.0.0",
- "node-fetch-npm": "^2.0.2",
- "promise-retry": "^1.1.1",
- "socks-proxy-agent": "^3.0.1",
- "ssri": "^5.2.4"
- },
- "deprecated": false,
- "description": "Opinionated, caching, retrying fetch client",
- "devDependencies": {
- "bluebird": "^3.5.1",
- "mkdirp": "^0.5.1",
- "nock": "^9.2.3",
- "npmlog": "^4.1.2",
- "nyc": "^11.4.1",
- "require-inject": "^1.4.2",
- "rimraf": "^2.6.2",
- "safe-buffer": "^5.1.1",
- "standard": "^11.0.0",
- "standard-version": "^4.3.0",
- "tacks": "^1.2.6",
- "tap": "^11.1.2",
- "weallbehave": "^1.0.0",
- "weallcontribute": "^1.0.7"
- },
- "files": [
- "*.js",
- "lib"
- ],
- "homepage": "https://github.com/zkat/make-fetch-happen#readme",
- "keywords": [
- "http",
- "request",
- "fetch",
- "mean girls",
- "caching",
- "cache",
- "subresource integrity"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "make-fetch-happen",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/make-fetch-happen.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "nyc --all -- tap --timeout=35 -J test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "3.0.0"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js
deleted file mode 100644
index b8f13cf831..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/warning.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const url = require('url')
-
-module.exports = setWarning
-
-function setWarning (reqOrRes, code, message, replace) {
- // Warning = "Warning" ":" 1#warning-value
- // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date]
- // warn-code = 3DIGIT
- // warn-agent = ( host [ ":" port ] ) | pseudonym
- // ; the name or pseudonym of the server adding
- // ; the Warning header, for use in debugging
- // warn-text = quoted-string
- // warn-date = <"> HTTP-date <">
- // (https://tools.ietf.org/html/rfc2616#section-14.46)
- const host = url.parse(reqOrRes.url).host
- const jsonMessage = JSON.stringify(message)
- const jsonDate = JSON.stringify(new Date().toUTCString())
- const header = replace ? 'set' : 'append'
-
- reqOrRes.headers[header](
- 'Warning',
- `${code} ${host} ${jsonMessage} ${jsonDate}`
- )
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/.travis.yml b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/.travis.yml
deleted file mode 100644
index 17f94330e7..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/.travis.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-language: node_js
-node_js:
- - "0.10"
-
-script: "npm test"
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/LICENSE
deleted file mode 100644
index 757562ec59..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Mathias Buus
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE. \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/README.md
deleted file mode 100644
index 5029b27d68..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/README.md
+++ /dev/null
@@ -1,56 +0,0 @@
-# pump
-
-pump is a small node module that pipes streams together and destroys all of them if one of them closes.
-
-```
-npm install pump
-```
-
-[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump)
-
-## What problem does it solve?
-
-When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error.
-You are also not able to provide a callback to tell when then pipe has finished.
-
-pump does these two things for you
-
-## Usage
-
-Simply pass the streams you want to pipe together to pump and add an optional callback
-
-``` js
-var pump = require('pump')
-var fs = require('fs')
-
-var source = fs.createReadStream('/dev/random')
-var dest = fs.createWriteStream('/dev/null')
-
-pump(source, dest, function(err) {
- console.log('pipe finished', err)
-})
-
-setTimeout(function() {
- dest.destroy() // when dest is closed pump will destroy source
-}, 1000)
-```
-
-You can use pump to pipe more than two streams together as well
-
-``` js
-var transform = someTransformStream()
-
-pump(source, transform, anotherTransform, dest, function(err) {
- console.log('pipe finished', err)
-})
-```
-
-If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed.
-
-## License
-
-MIT
-
-## Related
-
-`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/index.js
deleted file mode 100644
index d9ca0335cb..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/index.js
+++ /dev/null
@@ -1,82 +0,0 @@
-var once = require('once')
-var eos = require('end-of-stream')
-var fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes
-
-var noop = function () {}
-var ancient = /^v?\.0/.test(process.version)
-
-var isFn = function (fn) {
- return typeof fn === 'function'
-}
-
-var isFS = function (stream) {
- if (!ancient) return false // newer node version do not need to care about fs is a special way
- if (!fs) return false // browser
- return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
-}
-
-var isRequest = function (stream) {
- return stream.setHeader && isFn(stream.abort)
-}
-
-var destroyer = function (stream, reading, writing, callback) {
- callback = once(callback)
-
- var closed = false
- stream.on('close', function () {
- closed = true
- })
-
- eos(stream, {readable: reading, writable: writing}, function (err) {
- if (err) return callback(err)
- closed = true
- callback()
- })
-
- var destroyed = false
- return function (err) {
- if (closed) return
- if (destroyed) return
- destroyed = true
-
- if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
- if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
-
- if (isFn(stream.destroy)) return stream.destroy()
-
- callback(err || new Error('stream was destroyed'))
- }
-}
-
-var call = function (fn) {
- fn()
-}
-
-var pipe = function (from, to) {
- return from.pipe(to)
-}
-
-var pump = function () {
- var streams = Array.prototype.slice.call(arguments)
- var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
-
- if (Array.isArray(streams[0])) streams = streams[0]
- if (streams.length < 2) throw new Error('pump requires two streams per minimum')
-
- var error
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1
- var writing = i > 0
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err
- if (err) destroys.forEach(call)
- if (reading) return
- destroys.forEach(call)
- callback(error)
- })
- })
-
- streams.reduce(pipe)
-}
-
-module.exports = pump
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/package.json
deleted file mode 100644
index 4a3008e488..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/package.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
- "_from": "pump@^2.0.1",
- "_id": "pump@2.0.1",
- "_inBundle": false,
- "_integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==",
- "_location": "/npm-registry-fetch/pump",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "pump@^2.0.1",
- "name": "pump",
- "escapedName": "pump",
- "rawSpec": "^2.0.1",
- "saveSpec": null,
- "fetchSpec": "^2.0.1"
- },
- "_requiredBy": [
- "/npm-registry-fetch/cacache/mississippi"
- ],
- "_resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz",
- "_shasum": "12399add6e4cf7526d973cbc8b5ce2e2908b3909",
- "_spec": "pump@^2.0.1",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/cacache/node_modules/mississippi",
- "author": {
- "name": "Mathias Buus Madsen",
- "email": "mathiasbuus@gmail.com"
- },
- "browser": {
- "fs": false
- },
- "bugs": {
- "url": "https://github.com/mafintosh/pump/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "end-of-stream": "^1.1.0",
- "once": "^1.3.1"
- },
- "deprecated": false,
- "description": "pipe streams together and close all of them if one of them closes",
- "homepage": "https://github.com/mafintosh/pump#readme",
- "keywords": [
- "streams",
- "pipe",
- "destroy",
- "callback"
- ],
- "license": "MIT",
- "name": "pump",
- "repository": {
- "type": "git",
- "url": "git://github.com/mafintosh/pump.git"
- },
- "scripts": {
- "test": "node test-browser.js && node test-node.js"
- },
- "version": "2.0.1"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-browser.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-browser.js
deleted file mode 100644
index 75ea4a292e..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-browser.js
+++ /dev/null
@@ -1,62 +0,0 @@
-var stream = require('stream')
-var pump = require('./index')
-
-var rs = new stream.Readable()
-var ws = new stream.Writable()
-
-rs._read = function (size) {
- this.push(Buffer(size).fill('abc'))
-}
-
-ws._write = function (chunk, encoding, cb) {
- setTimeout(function () {
- cb()
- }, 100)
-}
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) {
- console.log('test-browser.js passes')
- clearTimeout(timeout)
- }
-}
-
-ws.on('finish', function () {
- wsClosed = true
- check()
-})
-
-rs.on('end', function () {
- rsClosed = true
- check()
-})
-
-pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-setTimeout(function () {
- rs.push(null)
- rs.emit('close')
-}, 1000)
-
-var timeout = setTimeout(function () {
- check()
- throw new Error('timeout')
-}, 5000)
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-node.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-node.js
deleted file mode 100644
index 034a65414d..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/pump/test-node.js
+++ /dev/null
@@ -1,53 +0,0 @@
-var pump = require('./index')
-
-var rs = require('fs').createReadStream('/dev/random')
-var ws = require('fs').createWriteStream('/dev/null')
-
-var toHex = function () {
- var reverse = new (require('stream').Transform)()
-
- reverse._transform = function (chunk, enc, callback) {
- reverse.push(chunk.toString('hex'))
- callback()
- }
-
- return reverse
-}
-
-var wsClosed = false
-var rsClosed = false
-var callbackCalled = false
-
-var check = function () {
- if (wsClosed && rsClosed && callbackCalled) {
- console.log('test-node.js passes')
- clearTimeout(timeout)
- }
-}
-
-ws.on('close', function () {
- wsClosed = true
- check()
-})
-
-rs.on('close', function () {
- rsClosed = true
- check()
-})
-
-var res = pump(rs, toHex(), toHex(), toHex(), ws, function () {
- callbackCalled = true
- check()
-})
-
-if (res) {
- process.exit(1)
-}
-
-setTimeout(function () {
- rs.destroy()
-}, 1000)
-
-var timeout = setTimeout(function () {
- throw new Error('timeout')
-}, 5000)
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.npmignore b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.npmignore
deleted file mode 100644
index 6bcbf6f77d..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.git*
-.idea*
-node_modules/
-npm-debug.log
-coverage \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.travis.yml b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.travis.yml
deleted file mode 100644
index 9d67de5878..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/.travis.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-language: node_js
-node_js:
- - 0.10
- - 0.12
- - 4
- - 6
- - stable
-
-script: "npm run coverage"
-# Send coverage data to Coveralls
-after_script: "cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js" \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/LICENSE
deleted file mode 100644
index b2442a9e71..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 Josh Glazebrook
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/README.md
deleted file mode 100644
index 178ce8827f..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/README.md
+++ /dev/null
@@ -1,307 +0,0 @@
-smart-buffer [![Build Status](https://travis-ci.org/JoshGlazebrook/smart-buffer.svg?branch=master)](https://travis-ci.org/JoshGlazebrook/smart-buffer) [![Coverage Status](https://coveralls.io/repos/github/JoshGlazebrook/smart-buffer/badge.svg?branch=master)](https://coveralls.io/github/JoshGlazebrook/smart-buffer?branch=master)
-=============
-
-smart-buffer is a light Buffer wrapper that takes away the need to keep track of what position to read and write data to and from the underlying Buffer. It also adds null terminating string operations and **grows** as you add more data.
-
-
-### What it's useful for:
-
-I created smart-buffer because I wanted to simplify the process of using Buffer for building and reading network packets to send over a socket. Rather than having to keep track of which position I need to write a UInt16 to after adding a string of variable length, I simply don't have to.
-
-Key Features:
-* Proxies all of the Buffer write and read functions.
-* Keeps track of read and write positions for you.
-* Grows the internal Buffer as you add data to it.
-* Useful string operations. (Null terminating strings)
-* Allows for inserting values at specific points in the internal Buffer.
-
-#### Note:
-smart-buffer can be used for writing to an underlying buffer as well as reading from it. It however does not function correctly if you're mixing both read and write operations with each other.
-
-## Installing:
-
-`npm install smart-buffer`
-
-## Using smart-buffer
-
-### Example
-
-Say you were building a packet that had to conform to the following protocol:
-
-`[PacketType:2][PacketLength:2][Data:XX]`
-
-To build this packet using the vanilla Buffer class, you would have to count up the length of the data payload beforehand. You would also need to keep track of the current "cursor" position in your Buffer so you write everything in the right places. With smart-buffer you don't have to do either of those things.
-
-```javascript
-function createLoginPacket(username, password, age, country) {
- var packet = new SmartBuffer();
- packet.writeUInt16LE(0x0060); // Login Packet Type/ID
- packet.writeStringNT(username);
- packet.writeStringNT(password);
- packet.writeUInt8(age);
- packet.writeStringNT(country);
- packet.writeUInt16LE(packet.length - 2, 2);
-
- return packet.toBuffer();
-}
-```
-With the above function, you now can do this:
-```javascript
-var login = createLoginPacket("Josh", "secret123", 22, "United States");
-
-// <Buffer 60 00 1e 00 4a 6f 73 68 00 73 65 63 72 65 74 31 32 33 00 16 55 6e 69 74 65 64 20 53 74 61 74 65 73 00>
-```
-Notice that the `[PacketLength:2]` part of the packet was inserted after we had added everything else, and as shown in the Buffer dump above, is in the correct location along with everything else.
-
-Reading back the packet we created above is just as easy:
-```javascript
-
-var reader = new SmartBuffer(login);
-
-var logininfo = {
- packetType: reader.readUInt16LE(),
- packetLength: reader.readUInt16LE(),
- username: reader.readStringNT(),
- password: reader.readStringNT(),
- age: reader.readUInt8(),
- country: reader.readStringNT()
-};
-
-/*
-{
- packetType: 96, (0x0060)
- packetLength: 30,
- username: 'Josh',
- password: 'secret123',
- age: 22,
- country: 'United States'
-};
-*/
-```
-
-# Api Reference:
-
-### Constructing a smart-buffer
-
-smart-buffer has a few different constructor signatures you can use. By default, utf8 encoding is used, and the internal Buffer length will be 4096. When reading from a Buffer, smart-buffer does NOT make a copy of the Buffer. It reads from the Buffer it was given.
-
-```javascript
-var SmartBuffer = require('smart-buffer');
-
-// Reading from an existing Buffer:
-var reader = new SmartBuffer(buffer);
-var reader = new SmartBuffer(buffer, 'ascii');
-
-// Writing to a new Buffer:
-var writer = new SmartBuffer(); // Defaults to utf8, 4096 length internal Buffer.
-var writer = new SmartBuffer(1024); // Defaults to utf8, 1024 length internal Buffer.
-var writer = new SmartBuffer('ascii'); // Sets to ascii encoding, 4096 length internal buffer.
-var writer = new SmartBuffer(1024, 'ascii'); // Sets to ascii encoding, 1024 length internal buffer.
-```
-
-## Reading Data
-
-smart-buffer supports all of the common read functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to start reading from. This is possible because as you read data out of a smart-buffer, it automatically progresses an internal read offset/position to know where to pick up from on the next read.
-
-## Reading Numeric Values
-
-When numeric values, you simply need to call the function you want, and the data is returned.
-
-Supported Operations:
-* readInt8
-* readInt16BE
-* readInt16LE
-* readInt32BE
-* readInt32LE
-* readUInt8
-* readUInt16BE
-* readUInt16LE
-* readUInt32BE
-* readUInt32LE
-* readFloatBE
-* readFloatLE
-* readDoubleBE
-* readDoubleLE
-
-```javascript
-var reader = new SmartBuffer(somebuffer);
-var num = reader.readInt8();
-```
-
-## Reading String Values
-
-When reading String values, you can either choose to read a null terminated string, or a string of a specified length.
-
-### SmartBuffer.readStringNT( [encoding] )
-> `String` **String encoding to use** - Defaults to the encoding set in the constructor, or utf8.
-
-returns `String`
-
-> Note: When readStringNT is called and there is no null character found, smart-buffer will read to the end of the internal Buffer.
-
-### SmartBuffer.readString( [length], [encoding] )
-### SmartBuffer.readString( [length] )
-### SmartBuffer.readString( [encoding] )
-> `Number` **Length of the string to read**
-
-> `String` **String encoding to use** - Defaults to the encoding set in the constructor, or utf8.
-
-returns `String`
-
-> Note: When readString is called without a specified length, smart-buffer will read to the end of the internal Buffer.
-
-
-
-## Reading Buffer Values
-
-### SmartBuffer.readBuffer( length )
-> `Number` **Length of data to read into a Buffer**
-
-returns `Buffer`
-
-> Note: This function uses `slice` to retrieve the Buffer.
-
-
-### SmartBuffer.readBufferNT()
-
-returns `Buffer`
-
-> Note: This reads the next sequence of bytes in the buffer until a null (0x00) value is found. (Null terminated buffer)
-> Note: This function uses `slice` to retrieve the Buffer.
-
-
-## Writing Data
-
-smart-buffer supports all of the common write functions you will find in the vanilla Buffer class. The only difference is, you do not need to specify which location to write to in your Buffer by default. You do however have the option of **inserting** a piece of data into your smart-buffer at a given location.
-
-
-## Writing Numeric Values
-
-
-For numeric values, you simply need to call the function you want, and the data is written at the end of the internal Buffer's current write position. You can specify a offset/position to **insert** the given value at, but keep in mind this does not override data at the given position. This feature also does not work properly when inserting a value beyond the current internal length of the smart-buffer (length being the .length property of the smart-buffer instance you're writing to)
-
-Supported Operations:
-* writeInt8
-* writeInt16BE
-* writeInt16LE
-* writeInt32BE
-* writeInt32LE
-* writeUInt8
-* writeUInt16BE
-* writeUInt16LE
-* writeUInt32BE
-* writeUInt32LE
-* writeFloatBE
-* writeFloatLE
-* writeDoubleBE
-* writeDoubleLE
-
-The following signature is the same for all the above functions:
-
-### SmartBuffer.writeInt8( value, [offset] )
-> `Number` **A valid Int8 number**
-
-> `Number` **The position to insert this value at**
-
-returns this
-
-> Note: All write operations return `this` to allow for chaining.
-
-## Writing String Values
-
-When reading String values, you can either choose to write a null terminated string, or a non null terminated string.
-
-### SmartBuffer.writeStringNT( value, [offset], [encoding] )
-### SmartBuffer.writeStringNT( value, [offset] )
-### SmartBuffer.writeStringNT( value, [encoding] )
-> `String` **String value to write**
-
-> `Number` **The position to insert this String at**
-
-> `String` **The String encoding to use.** - Defaults to the encoding set in the constructor, or utf8.
-
-returns this
-
-### SmartBuffer.writeString( value, [offset], [encoding] )
-### SmartBuffer.writeString( value, [offset] )
-### SmartBuffer.writeString( value, [encoding] )
-> `String` **String value to write**
-
-> `Number` **The position to insert this String at**
-
-> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8.
-
-returns this
-
-
-## Writing Buffer Values
-
-### SmartBuffer.writeBuffer( value, [offset] )
-> `Buffer` **Buffer value to write**
-
-> `Number` **The position to insert this Buffer's content at**
-
-returns this
-
-### SmartBuffer.writeBufferNT( value, [offset] )
-> `Buffer` **Buffer value to write**
-
-> `Number` **The position to insert this Buffer's content at**
-
-returns this
-
-
-## Utility Functions
-
-### SmartBuffer.clear()
-Resets the SmartBuffer to its default state where it can be reused for reading or writing.
-
-### SmartBuffer.remaining()
-
-returns `Number` The amount of data left to read based on the current read Position.
-
-### SmartBuffer.skip( value )
-> `Number` **The amount of bytes to skip ahead**
-
-Skips the read position ahead by the given value.
-
-returns this
-
-### SmartBuffer.rewind( value )
-> `Number` **The amount of bytes to reward backwards**
-
-Rewinds the read position backwards by the given value.
-
-returns this
-
-### SmartBuffer.skipTo( position )
-> `Number` **The point to skip the read position to**
-
-Moves the read position to the given point.
-returns this
-
-### SmartBuffer.toBuffer()
-
-returns `Buffer` A Buffer containing the contents of the internal Buffer.
-
-> Note: This uses the slice function.
-
-### SmartBuffer.toString( [encoding] )
-> `String` **The String encoding to use** - Defaults to the encoding set in the constructor, or utf8.
-
-returns `String` The internal Buffer in String representation.
-
-### SmartBuffer.destroy()
-Attempts to destroy the smart-buffer.
-
-returns this
-
-## Properties
-
-### SmartBuffer.length
-
-returns `Number` **The length of the data that is being tracked in the internal Buffer** - Does NOT return the absolute length of the internal Buffer being written to.
-
-## License
-
-This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License). \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js
deleted file mode 100644
index ef53b9fd4d..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js
+++ /dev/null
@@ -1,726 +0,0 @@
-"use strict";
-// The default Buffer size if one is not provided.
-const DEFAULT_SMARTBUFFER_SIZE = 4096;
-// The default string encoding to use for reading/writing strings.
-const DEFAULT_SMARTBUFFER_ENCODING = 'utf8';
-class SmartBuffer {
- /**
- * Creates a new SmartBuffer instance.
- *
- * @param arg1 { Number | BufferEncoding | Buffer | SmartBufferOptions }
- * @param arg2 { BufferEncoding }
- */
- constructor(arg1, arg2) {
- this.length = 0;
- this.encoding = DEFAULT_SMARTBUFFER_ENCODING;
- this.writeOffset = 0;
- this.readOffset = 0;
- // Initial buffer size provided
- if (typeof arg1 === 'number') {
- if (Number.isFinite(arg1) && Number.isInteger(arg1) && arg1 > 0) {
- this.buff = Buffer.allocUnsafe(arg1);
- }
- else {
- throw new Error('Invalid size provided. Size must be a valid integer greater than zero.');
- }
- }
- else if (typeof arg1 === 'string') {
- if (Buffer.isEncoding(arg1)) {
- this.buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- this.encoding = arg1;
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- else if (arg1 instanceof Buffer) {
- this.buff = arg1;
- this.length = arg1.length;
- }
- else if (SmartBuffer.isSmartBufferOptions(arg1)) {
- // Checks for encoding
- if (arg1.encoding) {
- if (Buffer.isEncoding(arg1.encoding)) {
- this.encoding = arg1.encoding;
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- // Checks for initial size length
- if (arg1.size) {
- if (Number.isFinite(arg1.size) && Number.isInteger(arg1.size) && arg1.size > 0) {
- this.buff = Buffer.allocUnsafe(arg1.size);
- }
- else {
- throw new Error('Invalid size provided. Size must be a valid integer greater than zero.');
- }
- }
- else if (arg1.buff) {
- if (arg1.buff instanceof Buffer) {
- this.buff = arg1.buff;
- this.length = arg1.buff.length;
- }
- else {
- throw new Error('Invalid buffer provided in SmartBufferOptions.');
- }
- }
- else {
- this.buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- }
- }
- else if (typeof arg1 === 'object') {
- throw new Error('Invalid object supplied to SmartBuffer constructor.');
- }
- else {
- this.buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- }
- // Check for encoding (Buffer, Encoding) constructor.
- if (typeof arg2 === 'string') {
- if (Buffer.isEncoding(arg2)) {
- this.encoding = arg2;
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- }
- /**
- * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding.
- *
- * @param size { Number } The size of the internal Buffer.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromSize(size, encoding) {
- return new this({
- size: size,
- encoding: encoding
- });
- }
- /**
- * Creates a new SmartBuffer instance with the provided Buffer and optional encoding.
- *
- * @param buffer { Buffer } The Buffer to use as the internal Buffer value.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromBuffer(buff, encoding) {
- return new this({
- buff: buff,
- encoding: encoding
- });
- }
- /**
- * Creates a new SmartBuffer instance with the provided SmartBufferOptions options.
- *
- * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance.
- */
- static fromOptions(options) {
- return new this(options);
- }
- /**
- * Ensures that the internal Buffer is large enough to write data.
- *
- * @param minLength { Number } The minimum length of the data that needs to be written.
- * @param offset { Number } The offset of the data to be written.
- */
- ensureWriteable(minLength, offset) {
- const offsetVal = typeof offset === 'number' ? offset : 0;
- // Ensure there is enough internal Buffer capacity.
- this.ensureCapacity(this.length + minLength + offsetVal);
- // If offset is provided, copy data into appropriate location in regards to the offset.
- if (typeof offset === 'number') {
- this.buff.copy(this.buff, offsetVal + minLength, offsetVal, this.buff.length);
- }
- // Adjust instance length.
- this.length = Math.max(this.length + minLength, offsetVal + minLength);
- }
- /**
- * Ensures that the internal Buffer is large enough to write at least the given amount of data.
- *
- * @param minLength { Number } The minimum length of the data needs to be written.
- */
- ensureCapacity(minLength) {
- const oldLength = this.buff.length;
- if (minLength > oldLength) {
- let data = this.buff;
- let newLength = (oldLength * 3) / 2 + 1;
- if (newLength < minLength) {
- newLength = minLength;
- }
- this.buff = Buffer.allocUnsafe(newLength);
- data.copy(this.buff, 0, 0, oldLength);
- }
- }
- /**
- * Reads a numeric number value using the provided function.
- *
- * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes read.
- *
- * @param { Number }
- */
- readNumberValue(func, byteSize) {
- // Call Buffer.readXXXX();
- const value = func.call(this.buff, this.readOffset);
- // Adjust internal read offset
- this.readOffset += byteSize;
- return value;
- }
- /**
- * Writes a numeric number value using the provided function.
- *
- * @param func { Function(offset: number, offset?) => number} The function to write data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes written.
- * @param value { Number } The number value to write.
- * @param offset { Number } the offset to write the number at.
- *
- */
- writeNumberValue(func, byteSize, value, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this.writeOffset;
- // Ensure there is enough internal Buffer capacity. (raw offset is passed)
- this.ensureWriteable(byteSize, offset);
- // Call buffer.writeXXXX();
- func.call(this.buff, value, offsetVal);
- // Adjusts internal write offset
- this.writeOffset += byteSize;
- }
- // Signed integers
- /**
- * Reads an Int8 value from the current read position.
- *
- * @return { Number }
- */
- readInt8() {
- return this.readNumberValue(Buffer.prototype.readUInt8, 1);
- }
- /**
- * Reads an Int16BE value from the current read position.
- *
- * @return { Number }
- */
- readInt16BE() {
- return this.readNumberValue(Buffer.prototype.readUInt16BE, 2);
- }
- /**
- * Reads an Int16LE value from the current read position.
- *
- * @return { Number }
- */
- readInt16LE() {
- return this.readNumberValue(Buffer.prototype.readUInt16LE, 2);
- }
- /**
- * Reads an Int32BE value from the current read position.
- *
- * @return { Number }
- */
- readInt32BE() {
- return this.readNumberValue(Buffer.prototype.readUInt32BE, 4);
- }
- /**
- * Reads an Int32LE value from the current read position.
- *
- * @return { Number }
- */
- readInt32LE() {
- return this.readNumberValue(Buffer.prototype.readUInt32LE, 4);
- }
- /**
- * Writes an Int8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt8(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
- return this;
- }
- /**
- * Writes an Int16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16BE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
- return this;
- }
- /**
- * Writes an Int16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16LE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
- return this;
- }
- /**
- * Writes an Int32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32BE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
- return this;
- }
- /**
- * Writes an Int32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32LE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
- return this;
- }
- // Unsigned Integers
- /**
- * Reads an UInt8 value from the current read position.
- *
- * @return { Number }
- */
- readUInt8() {
- return this.readNumberValue(Buffer.prototype.readUInt8, 1);
- }
- /**
- * Reads an UInt16BE value from the current read position.
- *
- * @return { Number }
- */
- readUInt16BE() {
- return this.readNumberValue(Buffer.prototype.readUInt16BE, 2);
- }
- /**
- * Reads an UInt16LE value from the current read position.
- *
- * @return { Number }
- */
- readUInt16LE() {
- return this.readNumberValue(Buffer.prototype.readUInt16LE, 2);
- }
- /**
- * Reads an UInt32BE value from the current read position.
- *
- * @return { Number }
- */
- readUInt32BE() {
- return this.readNumberValue(Buffer.prototype.readUInt32BE, 4);
- }
- /**
- * Reads an UInt32LE value from the current read position.
- *
- * @return { Number }
- */
- readUInt32LE() {
- return this.readNumberValue(Buffer.prototype.readUInt32LE, 4);
- }
- /**
- * Writes an UInt8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt8(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
- return this;
- }
- /**
- * Writes an UInt16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt16BE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
- return this;
- }
- /**
- * Writes an UInt16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt16LE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
- return this;
- }
- /**
- * Writes an UInt32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32BE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
- return this;
- }
- /**
- * Writes an UInt32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32LE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
- return this;
- }
- // Floating Point
- /**
- * Reads an FloatBE value from the current read position.
- *
- * @return { Number }
- */
- readFloatBE() {
- return this.readNumberValue(Buffer.prototype.readFloatBE, 4);
- }
- /**
- * Reads an FloatLE value from the current read position.
- *
- * @return { Number }
- */
- readFloatLE() {
- return this.readNumberValue(Buffer.prototype.readFloatLE, 4);
- }
- /**
- * Writes a FloatBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatBE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
- return this;
- }
- /**
- * Writes a FloatLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatLE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
- return this;
- }
- // Double Floating Point
- /**
- * Reads an DoublEBE value from the current read position.
- *
- * @return { Number }
- */
- readDoubleBE() {
- return this.readNumberValue(Buffer.prototype.readDoubleBE, 8);
- }
- /**
- * Reads an DoubleLE value from the current read position.
- *
- * @return { Number }
- */
- readDoubleLE() {
- return this.readNumberValue(Buffer.prototype.readDoubleLE, 8);
- }
- /**
- * Writes a DoubleBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleBE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
- return this;
- }
- /**
- * Writes a DoubleLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleLE(value, offset) {
- this.writeNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
- return this;
- }
- // Strings
- /**
- * Reads a String from the current read position.
- *
- * @param length { Number } The number of bytes to read as a String.
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readString(length, encoding) {
- const lengthVal = Math.min(length, this.length - this.readOffset) || this.length - this.readOffset;
- const value = this.buff.slice(this.readOffset, this.readOffset + lengthVal).toString(encoding || this.encoding);
- this.readOffset += lengthVal;
- return value;
- }
- /**
- * Writes a String to the current write position.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- */
- writeString(value, arg2, encoding) {
- let offsetVal = this.writeOffset;
- let encodingVal = this.encoding;
- // Check for offset
- if (typeof arg2 === 'number') {
- offsetVal = arg2;
- }
- else if (typeof arg2 === 'string') {
- if (Buffer.isEncoding(arg2)) {
- encodingVal = arg2;
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- // Check for encoding (third param)
- if (typeof encoding === 'string') {
- if (Buffer.isEncoding(encoding)) {
- encodingVal = encoding;
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- // Calculate bytelength of string.
- const byteLength = Buffer.byteLength(value, encodingVal);
- // Ensure there is enough internal Buffer capacity.
- this.ensureWriteable(byteLength, offsetVal);
- // Write value
- this.buff.write(value, offsetVal, byteLength, encodingVal);
- // Increment internal Buffer write offset;
- this.writeOffset += byteLength;
- return this;
- }
- /**
- * Reads a null-terminated String from the current read position.
- *
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readStringNT(encoding) {
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this.readOffset; i < this.length; i++) {
- if (this.buff[i] === 0x00) {
- nullPos = i;
- break;
- }
- }
- // Read string value
- const value = this.buff.slice(this.readOffset, nullPos);
- // Increment internal Buffer read offset
- this.readOffset = nullPos + 1;
- return value.toString(encoding || this.encoding);
- }
- /**
- * Writes a null-terminated String to the current write position.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- */
- writeStringNT(value, offset, encoding) {
- // Write Values
- this.writeString(value, offset, encoding);
- this.writeUInt8(0x00, (typeof offset === 'number' ? offset + value.length : this.writeOffset));
- }
- // Buffers
- /**
- * Reads a Buffer from the internal read position.
- *
- * @param length { Number } The length of data to read as a Buffer.
- *
- * @return { Buffer }
- */
- readBuffer(length) {
- const lengthVal = typeof length === 'number' ? length : this.length;
- const endPoint = Math.min(this.length, this.readOffset + lengthVal);
- // Read buffer value
- const value = this.buff.slice(this.readOffset, endPoint);
- // Increment internal Buffer read offset
- this.readOffset = endPoint;
- return value;
- }
- /**
- * Writes a Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- */
- writeBuffer(value, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this.writeOffset;
- // Ensure there is enough internal Buffer capacity.
- this.ensureWriteable(value.length, offsetVal);
- // Write buffer value
- value.copy(this.buff, offsetVal);
- // Increment internal Buffer write offset
- this.writeOffset += value.length;
- return this;
- }
- /**
- * Reads a null-terminated Buffer from the current read poisiton.
- *
- * @return { Buffer }
- */
- readBufferNT() {
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this.readOffset; i < this.length; i++) {
- if (this.buff[i] === 0x00) {
- nullPos = i;
- break;
- }
- }
- // Read value
- const value = this.buff.slice(this.readOffset, nullPos);
- // Increment internal Buffer read offset
- this.readOffset = nullPos + 1;
- return value;
- }
- /**
- * Writes a null-terminated Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- */
- writeBufferNT(value, offset) {
- // Write Values
- this.writeBuffer(value, offset);
- this.writeUInt8(0, (typeof offset === 'number' ? offset + value.length : this.writeOffset));
- return this;
- }
- /**
- * Clears the SmartBuffer instance to its original empty state.
- */
- clear() {
- this.writeOffset = 0;
- this.readOffset = 0;
- this.length = 0;
- }
- /**
- * Gets the remaining data left to be read from the SmartBuffer instance.
- *
- * @return { Number }
- */
- remaining() {
- return this.length - this.readOffset;
- }
- /**
- * Moves the read offset forward.
- *
- * @param amount { Number } The amount to move the read offset forward by.
- */
- skip(amount) {
- if (this.readOffset + amount > this.length) {
- throw new Error('Target position is beyond the bounds of the SmartBuffer size.');
- }
- this.readOffset += amount;
- }
- /**
- * Moves the read offset backwards.
- *
- * @param amount { Number } The amount to move the read offset backwards by.
- */
- rewind(amount) {
- if (this.readOffset - amount < 0) {
- throw new Error('Target position is beyond the bounds of the SmartBuffer size.');
- }
- this.readOffset -= amount;
- }
- /**
- * Moves the read offset to a specific position.
- *
- * @param position { Number } The position to move the read offset to.
- */
- skipTo(position) {
- this.moveTo(position);
- }
- /**
- * Moves the read offset to a specific position.
- *
- * @param position { Number } The position to move the read offset to.
- */
- moveTo(position) {
- if (position > this.length) {
- throw new Error('Target position is beyond the bounds of the SmartBuffer size.');
- }
- this.readOffset = position;
- }
- /**
- * Gets the value of the internal managed Buffer
- *
- * @param { Buffer }
- */
- toBuffer() {
- return this.buff.slice(0, this.length);
- }
- /**
- * Gets the String value of the internal managed Buffer
- *
- * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding).
- */
- toString(encoding) {
- const encodingVal = typeof encoding === 'string' ? encoding : this.encoding;
- if (Buffer.isEncoding(encodingVal)) {
- return this.buff.toString(encodingVal, 0, this.length);
- }
- else {
- throw new Error('Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.');
- }
- }
- /**
- * Destroys the SmartBuffer instance.
- */
- destroy() {
- this.clear();
- }
- /**
- * Type checking function that determines if an object is a SmartBufferOptions object.
- */
- static isSmartBufferOptions(options) {
- const castOptions = options;
- return castOptions && (castOptions.encoding !== undefined || castOptions.size !== undefined || castOptions.buff !== undefined);
- }
-}
-module.exports = SmartBuffer;
-//# sourceMappingURL=smartbuffer.js.map \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js.map b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js.map
deleted file mode 100644
index 60af067a2c..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/build/smartbuffer.js.map
+++ /dev/null
@@ -1 +0,0 @@
-{"version":3,"file":"smartbuffer.js","sourceRoot":"","sources":["../src/smartbuffer.ts"],"names":[],"mappings":";AAaA,kDAAkD;AAClD,MAAM,wBAAwB,GAAG,IAAI,CAAC;AAEtC,mEAAmE;AACnE,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAE5C;IASI;;;;;OAKG;IACH,YAAY,IAA4D,EAAE,IAAqB;QAbxF,WAAM,GAAW,CAAC,CAAC;QACnB,aAAQ,GAAmB,4BAA4B,CAAC;QAEvD,gBAAW,GAAW,CAAC,CAAC;QACxB,eAAU,GAAW,CAAC,CAAC;QAW3B,+BAA+B;QAC/B,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAE3B,EAAE,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC;gBAC9D,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;YACzC,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,MAAM,IAAI,KAAK,CAAC,wEAAwE,CAAC,CAAC;YAC9F,CAAC;QAEL,CAAC;QAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAClC,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;gBAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;gBACzD,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;YACzB,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,MAAM,IAAI,KAAK,CAAC,kGAAkG,CAAC,CAAC;YACxH,CAAC;QAEL,CAAC;QAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,YAAY,MAAM,CAAC,CAAC,CAAC;YAChC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QAC9B,CAAC;QAAC,IAAI,CAAC,EAAE,CAAC,CAAC,WAAW,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAEhD,sBAAsB;YACtB,EAAE,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAChB,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;oBACnC,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;gBAClC,CAAC;gBAAC,IAAI,CAAC,CAAC;oBACJ,MAAM,IAAK,KAAK,CAAC,kGAAkG,CAAC,CAAC;gBACzH,CAAC;YACL,CAAC;YAED,iCAAiC;YACjC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBACZ,EAAE,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC;oBAC7E,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBAC9C,CAAC;gBAAC,IAAI,CAAC,CAAC;oBACJ,MAAM,IAAI,KAAK,CAAC,wEAAwE,CAAC,CAAC;gBAC9F,CAAC;YAEL,CAAC;YAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBACnB,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,YAAY,MAAM,CAAC,CAAC,CAAC;oBAC9B,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;oBACtB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;gBACnC,CAAC;gBAAC,IAAI,CAAC,CAAC;oBACJ,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;gBACtE,CAAC;YACL,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;YAC7D,CAAC;QACL,CAAC;QAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAClC,MAAM,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;QAC3E,CAAC;QAAC,IAAI,CAAC,CAAC;YACJ,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;QAC7D,CAAC;QAED,qDAAqD;QACrD,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAC3B,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC;YACzB,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,MAAM,IAAI,KAAK,CAAC,kGAAkG,CAAC,CAAC;YACxH,CAAC;QACL,CAAC;IACL,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,QAAQ,CAAC,IAAY,EAAE,QAAyB;QAC1D,MAAM,CAAC,IAAI,IAAI,CAAC;YACZ,IAAI,EAAE,IAAI;YACV,QAAQ,EAAE,QAAQ;SACrB,CAAC,CAAC;IACP,CAAC;IAED;;;;;;;OAOG;IACI,MAAM,CAAC,UAAU,CAAC,IAAY,EAAE,QAAyB;QAC5D,MAAM,CAAC,IAAI,IAAI,CAAC;YACZ,IAAI,EAAE,IAAI;YACV,QAAQ,EAAE,QAAQ;SACrB,CAAC,CAAC;IACP,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,WAAW,CAAC,OAA2B;QACjD,MAAM,CAAC,IAAI,IAAI,CAAC,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;;OAKG;IACK,eAAe,CAAC,SAAiB,EAAE,MAAe;QACtD,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;QAE1D,mDAAmD;QACnD,IAAI,CAAC,cAAc,CAAC,IAAI,CAAC,MAAM,GAAG,SAAS,GAAG,SAAS,CAAC,CAAC;QAEzD,uFAAuF;QACvF,EAAE,CAAC,CAAC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC;YAC7B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,GAAG,SAAS,EAAE,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClF,CAAC;QAED,0BAA0B;QAC1B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,GAAG,SAAS,EAAE,SAAS,GAAG,SAAS,CAAC,CAAC;IAC3E,CAAC;IAGD;;;;OAIG;IACK,cAAc,CAAC,SAAiB;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC;QAEnC,EAAE,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC;YACxB,IAAI,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC;YACrB,IAAI,SAAS,GAAG,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACxC,EAAE,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC;gBACxB,SAAS,GAAG,SAAS,CAAC;YAC1B,CAAC;YACD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YAE1C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,SAAS,CAAC,CAAC;QAC1C,CAAC;IACL,CAAC;IAED;;;;;;;OAOG;IACK,eAAe,CAAC,IAAgC,EAAE,QAAgB;QACtE,0BAA0B;QAC1B,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;QAEpD,8BAA8B;QAC9B,IAAI,CAAC,UAAU,IAAI,QAAQ,CAAC;QAE5B,MAAM,CAAC,KAAK,CAAC;IACjB,CAAC;IAED;;;;;;;;OAQG;IACK,gBAAgB,CAAC,IAAgD,EAAE,QAAgB,EAAE,KAAa,EAAE,MAAe;QACvH,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC;QAEzE,0EAA0E;QAC1E,IAAI,CAAC,eAAe,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAEvC,2BAA2B;QAC3B,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,EAAE,SAAS,CAAC,CAAC;QAEvC,gCAAgC;QAChC,IAAI,CAAC,WAAW,IAAI,QAAQ,CAAC;IACjC,CAAC;IAGD,kBAAkB;IAElB;;;;OAIG;IACH,QAAQ;QACJ,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;IAC/D,CAAC;IAED;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;;OAOG;IACH,SAAS,CAAC,KAAa,EAAE,MAAe;QACpC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACpE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED,oBAAoB;IAEpB;;;;OAIG;IACH,SAAS;QACL,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;IAC/D,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;;OAOG;IACH,UAAU,CAAC,KAAa,EAAE,MAAe;QACrC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACrE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED,iBAAiB;IAGjB;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;IACjE,CAAC;IAED;;;;OAIG;IACH,WAAW;QACP,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,YAAY,CAAC,KAAa,EAAE,MAAe;QACvC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACvE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAGD,wBAAwB;IAExB;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,SAAS,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,SAAS,CAAC,aAAa,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;QACxE,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAGD,UAAU;IAEV;;;;;;;OAOG;IACH,UAAU,CAAC,MAAe,EAAE,QAAyB;QACjD,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC;QACnG,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC,CAAC,QAAQ,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC;QAEhH,IAAI,CAAC,UAAU,IAAI,SAAS,CAAC;QAC7B,MAAM,CAAC,KAAK,CAAC;IACjB,CAAC;IAED;;;;;;OAMG;IACH,WAAW,CAAC,KAAa,EAAE,IAA8B,EAAE,QAAyB;QAChF,IAAI,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC;QACjC,IAAI,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC;QAEhC,mBAAmB;QACnB,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAC3B,SAAS,GAAG,IAAI,CAAC;QAErB,CAAC;QAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC;YAClC,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;gBAC1B,WAAW,GAAG,IAAI,CAAC;YACvB,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,MAAM,IAAI,KAAK,CAAC,kGAAkG,CAAC,CAAC;YACxH,CAAC;QACL,CAAC;QAED,mCAAmC;QACnC,EAAE,CAAC,CAAC,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC;YAC/B,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;gBAC9B,WAAW,GAAG,QAAQ,CAAC;YAC3B,CAAC;YAAC,IAAI,CAAC,CAAC;gBACJ,MAAM,IAAI,KAAK,CAAC,kGAAkG,CAAC,CAAC;YACxH,CAAC;QACL,CAAC;QAED,kCAAkC;QAClC,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAEzD,mDAAmD;QACnD,IAAI,CAAC,eAAe,CAAC,UAAU,EAAE,SAAS,CAAC,CAAC;QAE5C,cAAc;QACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;QAE3D,0CAA0C;QAC1C,IAAI,CAAC,WAAW,IAAI,UAAU,CAAC;QAC/B,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;;;OAMG;IACH,YAAY,CAAC,QAAyB;QAElC,+DAA+D;QAC/D,IAAI,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC;QAE1B,6EAA6E;QAC7E,GAAG,CAAA,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAChD,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC;gBACxB,OAAO,GAAG,CAAC,CAAC;gBACZ,KAAK,CAAC;YACV,CAAC;QACL,CAAC;QAED,oBAAoB;QACpB,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAExD,wCAAwC;QACxC,IAAI,CAAC,UAAU,GAAG,OAAO,GAAG,CAAC,CAAC;QAE9B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,aAAa,CAAC,KAAa,EAAE,MAAgC,EAAE,QAAyB;QACpF,eAAe;QACf,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;QAC1C,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,CAAC,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;IACnG,CAAC;IAED,UAAU;IAEV;;;;;;OAMG;IACH,UAAU,CAAC,MAAe;QACtB,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QACpE,MAAM,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC,CAAC;QAEpE,oBAAoB;QACpB,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;QAEzD,wCAAwC;QACxC,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC;QAC3B,MAAM,CAAC,KAAK,CAAC;IACjB,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,KAAa,EAAE,MAAe;QACtC,MAAM,SAAS,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC;QAEzE,mDAAmD;QACnD,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;QAE9C,qBAAqB;QACrB,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;QAEjC,yCAAyC;QACzC,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;;;OAIG;IACH,YAAY;QACR,+DAA+D;QAC/D,IAAI,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC;QAE1B,6EAA6E;QAC7E,GAAG,CAAA,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YAChD,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC;gBACxB,OAAO,GAAG,CAAC,CAAC;gBACZ,KAAK,CAAC;YACV,CAAC;QACL,CAAC;QAED,aAAa;QACb,MAAM,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAExD,wCAAwC;QACxC,IAAI,CAAC,UAAU,GAAG,OAAO,GAAG,CAAC,CAAC;QAC9B,MAAM,CAAC,KAAK,CAAC;IACjB,CAAC;IAED;;;;;OAKG;IACH,aAAa,CAAC,KAAa,EAAE,MAAe;QACxC,eAAe;QACf,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;QAChC,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC;QAE5F,MAAM,CAAC,IAAI,CAAC;IAChB,CAAC;IAED;;OAEG;IACH,KAAK;QACD,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACrB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;QACpB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;IACpB,CAAC;IAED;;;;OAIG;IACH,SAAS;QACL,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,IAAI,CAAC,MAAc;QACf,EAAE,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;YACzC,MAAM,IAAI,KAAK,CAAC,+DAA+D,CAAC,CAAC;QACrF,CAAC;QAED,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC;IAC9B,CAAC;IAED;;;;OAIG;IACH,MAAM,CAAC,MAAc;QACjB,EAAE,CAAC,CAAC,IAAI,CAAC,UAAU,GAAG,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;YAC/B,MAAM,IAAI,KAAK,CAAC,+DAA+D,CAAC,CAAC;QACrF,CAAC;QAED,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC;IAC9B,CAAC;IAED;;;;OAIG;IACH,MAAM,CAAC,QAAgB;QACnB,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC1B,CAAC;IAED;;;;OAIG;IACH,MAAM,CAAC,QAAgB;QACnB,EAAE,CAAC,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;YACzB,MAAM,IAAI,KAAK,CAAC,+DAA+D,CAAC,CAAC;QACrF,CAAC;QAED,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACH,QAAQ;QACJ,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,QAAQ,CAAC,QAAyB;QAC9B,MAAM,WAAW,GAAG,OAAO,QAAQ,KAAK,QAAQ,GAAG,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;QAE5E,EAAE,CAAC,CAAC,MAAM,CAAC,UAAU,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;YACjC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3D,CAAC;QAAC,IAAI,CAAC,CAAC;YACJ,MAAM,IAAI,KAAK,CAAC,kGAAkG,CAAC,CAAC;QACxH,CAAC;IACL,CAAC;IAED;;OAEG;IACH,OAAO;QACH,IAAI,CAAC,KAAK,EAAE,CAAC;IACjB,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,oBAAoB,CAAC,OAA2B;QACnD,MAAM,WAAW,GAAwB,OAAQ,CAAC;QAElD,MAAM,CAAC,WAAW,IAAI,CAAC,WAAW,CAAC,QAAQ,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC;IACnI,CAAC;CACJ;AAGD,iBAAS,WAAW,CAAC"} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/lib/smart-buffer.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/lib/smart-buffer.js
deleted file mode 100644
index ea69cfc09a..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/lib/smart-buffer.js
+++ /dev/null
@@ -1,371 +0,0 @@
-var SmartBuffer = (function () {
-
- /**
- * Constructor for SmartBuffer.
- * @param arg1 {Buffer || Number || String} Buffer to read from, or expected size to write to, or encoding to use.
- * @param arg2 {String} Encoding to use for writing and reading strings. Defaults to utf8. If encoding is given in arg1, this is ignored.
- * @constructor
- *
- * There are a few ways to construct a SmartBuffer:
- *
- * SmartBuffer() - Defaults to utf8, 4096 pre-set internal Buffer length.
- * SmartBuffer(size) - Defaults to utf8, sets internal Buffer length to the size given.
- * SmartBuffer(encoding) - Sets the given encoding, defaults to 4096 pre-set internal Buffer length.
- * SmartBuffer(Buffer) - Defaults to utf8, sets the internal Buffer to the given buffer (same memory).
- * SmartBuffer(Buffer, encoding) - Sets the given encoding, sets the internal Buffer to the given buffer (same memory).
- *
- */
- function SmartBuffer(arg1, arg2) {
- var type;
- switch (type = typeof arg1) {
- case 'number':
- if (isFinite(arg1) && arg1 > 0) {
- this.buff = new Buffer(Math.ceil(arg1));
- this.length = 0;
- } else {
- throw new Error('When specifying a size, it must be a valid number above zero.');
- }
- break;
-
- case 'string':
- if (Buffer.isEncoding(arg1)) {
- this.buff = new Buffer(4096);
- this.length = 0;
- this.encoding = arg1;
- } else {
- throw new Error('Invalid Encoding');
- }
- break;
-
- case 'object':
- if (Buffer.isBuffer(arg1)) {
- this.buff = arg1;
- this.length = arg1.length;
- } else {
- throw new TypeError('First argument must be a Buffer, Number representing the size, or a String representing the encoding.');
- }
- break;
-
- default:
- this.buff = new Buffer(4096);
- this.length = 0;
- break;
- }
-
- if (typeof this.encoding === 'undefined') {
- if (typeof arg2 === 'string') {
- if (Buffer.isEncoding(arg2)) {
- this.encoding = arg2;
- } else {
- throw new Error('Invalid Encoding');
- }
- }
- }
-
- this._readOffset = 0;
- this._writeOffset = 0;
- }
-
-
- SmartBuffer.prototype._ensureWritable = function (len, offset) {
- this._ensureCapacity(this.length + len + (typeof offset === 'number' ? offset : 0));
-
- if (typeof offset === 'number') {
- this.buff.copy(this.buff, offset + len, offset, this.buff.length);
- }
- this.length = Math.max(this.length + len, (typeof offset === 'number' ? offset : 0) + len);
- };
-
- SmartBuffer.prototype._ensureCapacity = function (minlen) {
- var oldlen = this.buff.length;
-
- if (minlen > oldlen) {
- var data = this.buff;
- var newlen = (oldlen * 3) / 2 + 1;
- if (newlen < minlen)
- newlen = minlen;
- this.buff = new Buffer(newlen);
- data.copy(this.buff, 0, 0, oldlen);
- }
- };
-
-
- var makeReader = function (func, size) {
- return function () {
- var ret = func.call(this.buff, this._readOffset);
- this._readOffset += size;
- return ret;
- }
- };
-
- var makeWriter = function (func, size) {
- return function (value, offset) {
- this._ensureWritable(size, offset);
- func.call(this.buff, value, typeof offset === 'number' ? offset : this._writeOffset);
- this._writeOffset += size;
- return this;
- }
- };
-
-
- /*
- Read Operations
- */
-
- SmartBuffer.prototype.readInt8 = makeReader(Buffer.prototype.readInt8, 1);
- SmartBuffer.prototype.readInt16BE = makeReader(Buffer.prototype.readInt16BE, 2);
- SmartBuffer.prototype.readInt16LE = makeReader(Buffer.prototype.readInt16LE, 2);
- SmartBuffer.prototype.readInt32BE = makeReader(Buffer.prototype.readInt32BE, 4);
- SmartBuffer.prototype.readInt32LE = makeReader(Buffer.prototype.readInt32LE, 4);
-
- SmartBuffer.prototype.readUInt8 = makeReader(Buffer.prototype.readUInt8, 1);
- SmartBuffer.prototype.readUInt16BE = makeReader(Buffer.prototype.readUInt16BE, 2);
- SmartBuffer.prototype.readUInt16LE = makeReader(Buffer.prototype.readUInt16LE, 2);
- SmartBuffer.prototype.readUInt32BE = makeReader(Buffer.prototype.readUInt32BE, 4);
- SmartBuffer.prototype.readUInt32LE = makeReader(Buffer.prototype.readUInt32LE, 4);
-
- SmartBuffer.prototype.readFloatBE = makeReader(Buffer.prototype.readFloatBE, 4);
- SmartBuffer.prototype.readFloatLE = makeReader(Buffer.prototype.readFloatLE, 4);
-
- SmartBuffer.prototype.readDoubleBE = makeReader(Buffer.prototype.readDoubleBE, 8);
- SmartBuffer.prototype.readDoubleLE = makeReader(Buffer.prototype.readDoubleLE, 8);
-
-
- /**
- * Reads a string of the given length.
- * @param length {Number} The length of the string to read. (Defaults to the length of the remaining data)
- * @param encoding {String} The encoding to use. (Defaults to encoding set in constructor, or utf8)
- * @returns {string} The string.
- */
- SmartBuffer.prototype.readString = function (length, encoding) {
- var len = Math.min(length, this.length - this._readOffset) || (this.length - this._readOffset);
- var ret = this.buff.slice(this._readOffset, this._readOffset + len).toString(encoding || this.encoding);
- this._readOffset += len;
- return ret;
- };
-
- /**
- * Reads a null terminated string from the underlying buffer.
- * @param encoding {String} Encoding to use. Defaults to encoding set in constructor, or utf8.
- * @returns {string}
- */
- SmartBuffer.prototype.readStringNT = function (encoding) {
- var nullpos = this.length;
- for (var i = this._readOffset; i < this.length; i++) {
- if (this.buff[i] == 0x00) {
- nullpos = i;
- break;
- }
- }
-
- var result = this.buff.slice(this._readOffset, nullpos);
- this._readOffset = nullpos + 1;
-
- return result.toString(encoding || this.encoding);
- };
-
-
- /**
- * Reads a specified number of bytes.
- * @param len {Number} Numbers of bytes to read. (Defaults to the remaining data length)
- * @returns {Buffer} Buffer containing the read bytes.
- */
- SmartBuffer.prototype.readBuffer = function (len) {
- var endpoint = Math.min(this.length, this._readOffset + (typeof len === 'number' ? len : this.length));
- var ret = this.buff.slice(this._readOffset, endpoint);
- this._readOffset = endpoint;
- return ret;
- };
-
- /**
- * Reads a null terminated sequence of bytes from the underlying buffer.
- * @returns {Buffer} Buffer containing the read bytes.
- */
- SmartBuffer.prototype.readBufferNT = function () {
- var nullpos = this.length;
- for (var i = this._readOffset; i < this.length; i++) {
- if (this.buff[i] == 0x00) {
- nullpos = i;
- break;
- }
- }
-
- var ret = this.buff.slice(this._readOffset, nullpos);
- this._readOffset = nullpos + 1;
-
- return ret;
- };
-
-
- /*
- Write Operations
- */
-
-
- SmartBuffer.prototype.writeInt8 = makeWriter(Buffer.prototype.writeInt8, 1);
- SmartBuffer.prototype.writeInt16BE = makeWriter(Buffer.prototype.writeInt16BE, 2);
- SmartBuffer.prototype.writeInt16LE = makeWriter(Buffer.prototype.writeInt16LE, 2);
- SmartBuffer.prototype.writeInt32BE = makeWriter(Buffer.prototype.writeInt32BE, 4);
- SmartBuffer.prototype.writeInt32LE = makeWriter(Buffer.prototype.writeInt32LE, 4);
-
- SmartBuffer.prototype.writeUInt8 = makeWriter(Buffer.prototype.writeUInt8, 1);
- SmartBuffer.prototype.writeUInt16BE = makeWriter(Buffer.prototype.writeUInt16BE, 2);
- SmartBuffer.prototype.writeUInt16LE = makeWriter(Buffer.prototype.writeUInt16LE, 2);
- SmartBuffer.prototype.writeUInt32BE = makeWriter(Buffer.prototype.writeUInt32BE, 4);
- SmartBuffer.prototype.writeUInt32LE = makeWriter(Buffer.prototype.writeUInt32LE, 4);
-
- SmartBuffer.prototype.writeFloatBE = makeWriter(Buffer.prototype.writeFloatBE, 4);
- SmartBuffer.prototype.writeFloatLE = makeWriter(Buffer.prototype.writeFloatLE, 4);
-
- SmartBuffer.prototype.writeDoubleBE = makeWriter(Buffer.prototype.writeDoubleBE, 8);
- SmartBuffer.prototype.writeDoubleLE = makeWriter(Buffer.prototype.writeDoubleLE, 8);
-
-
- /**
- * Writes a string to the underlying buffer.
- * @param value {String} The string to write.
- * @param offset {Number} The offset to write the string to. (Encoding can also be set here in place of offset)
- * @param encoding {String} The encoding to use. (Defaults to encoding set in constructor, or to utf8)
- * @returns {*}
- */
- SmartBuffer.prototype.writeString = function (value, offset, encoding) {
- var len, _offset, type = typeof offset;
-
- if (type === 'number') {
- _offset = offset;
- } else if (type === 'string') {
- encoding = offset;
- offset = this._writeOffset;
- } else {
- encoding = undefined;
- offset = this._writeOffset;
- }
-
- len = Buffer.byteLength(value, encoding || this.encoding);
- this._ensureWritable(len, _offset);
-
- this.buff.write(value, offset, len, encoding || this.encoding);
- this._writeOffset += len;
- return this;
- };
-
- /**
- * Writes a null terminated string to the underlying buffer.
- * @param value {String} The string to write.
- * @param offset {Number} The offset to write the string to. (Encoding can also be set here in place of offset)
- * @param encoding {String} The encoding to use. (Defaults to encoding set in constructor, or to utf8)
- * @returns {*}
- */
- SmartBuffer.prototype.writeStringNT = function (value, offset, encoding) {
- this.writeString(value, offset, encoding);
- this.writeUInt8(0x00, (typeof offset === 'number' ? offset + value.length : this._writeOffset));
- return this;
- };
-
- /**
- * Writes a Buffer to the underlying buffer.
- * @param value {Buffer} The buffer to write.
- * @param offset {Number} The offset to write the Buffer to.
- * @returns {*}
- */
- SmartBuffer.prototype.writeBuffer = function (value, offset) {
- var len = value.length;
- this._ensureWritable(len, offset);
- value.copy(this.buff, typeof offset === 'number' ? offset : this._writeOffset);
- this._writeOffset += len;
- return this;
- };
-
- /**
- * Writes a null terminated Buffer to the underlying buffer.
- * @param value {Buffer} The buffer to write.
- * @param offset {Number} The offset to write the Buffer to.
- * @returns {*}
- */
- SmartBuffer.prototype.writeBufferNT = function (value, offset) {
- this.writeBuffer(value, offset);
- this.writeUInt8(0x00, (typeof offset === 'number' ? offset + value.length : this._writeOffset));
-
- return this;
- };
-
-
- /**
- * Resets the Endless Buffer.
- */
- SmartBuffer.prototype.clear = function () {
- this._writeOffset = 0;
- this._readOffset = 0;
- this.length = 0;
- };
-
- /**
- * Gets the remaining number of bytes to be read from the existing Buffer.
- * @returns {number} The number of bytes remaining.
- */
- SmartBuffer.prototype.remaining = function () {
- return this.length - this._readOffset;
- };
-
- /**
- * Skips the read position forward by the amount of given.
- * @param amount {Number} The amount of bytes to skip forward.
- */
- SmartBuffer.prototype.skip = function (amount) {
- if (this._readOffset + amount > this.length)
- throw new Error('Target position is beyond the bounds of the data.');
-
- this._readOffset += amount;
- };
-
- /**
- * Rewinds the read position backward by the amount given.
- * @param amount {Number} The amount of bytes to reverse backward.
- */
- SmartBuffer.prototype.rewind = function (amount) {
- if (this._readOffset - amount < 0)
- throw new Error('Target position is beyond the bounds of the data.');
-
- this._readOffset -= amount;
- };
-
- /**
- * Skips the read position to the given position.
- * @param position {Number} The position to skip to.
- */
- SmartBuffer.prototype.skipTo = function (position) {
- if (position < 0 || position > this.length)
- throw new Error('Target position is beyond the bounds of the data.');
-
- this._readOffset = position;
- };
-
- /**
- * Gets the underlying Buffer.
- * @returns {*}
- */
- SmartBuffer.prototype.toBuffer = function () {
- return this.buff.slice(0, this.length);
- };
-
- /**
- * Gets a string representation of the underlying Buffer.
- * @param encoding {String} Encoding to use. (Defaults to encoding set in constructor, or utf8.)
- * @returns {*}
- */
- SmartBuffer.prototype.toString = function (encoding) {
- return this.buff.toString(encoding || this.encoding, 0, this.length);
- };
-
- /**
- * Destroys the underlying Buffer, and resets the SmartBuffer.
- */
- SmartBuffer.prototype.destroy = function () {
- delete this.buff;
- this.clear();
- };
-
- return SmartBuffer;
-})();
-
-module.exports = SmartBuffer; \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/package.json
deleted file mode 100644
index f6e183c589..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/package.json
+++ /dev/null
@@ -1,70 +0,0 @@
-{
- "_from": "smart-buffer@^1.0.13",
- "_id": "smart-buffer@1.1.15",
- "_inBundle": false,
- "_integrity": "sha1-fxFLW2X6s+KjWqd1uxLw0cZJvxY=",
- "_location": "/npm-registry-fetch/smart-buffer",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "smart-buffer@^1.0.13",
- "name": "smart-buffer",
- "escapedName": "smart-buffer",
- "rawSpec": "^1.0.13",
- "saveSpec": null,
- "fetchSpec": "^1.0.13"
- },
- "_requiredBy": [
- "/npm-registry-fetch/socks"
- ],
- "_resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-1.1.15.tgz",
- "_shasum": "7f114b5b65fab3e2a35aa775bb12f0d1c649bf16",
- "_spec": "smart-buffer@^1.0.13",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/socks",
- "author": {
- "name": "Josh Glazebrook"
- },
- "bugs": {
- "url": "https://github.com/JoshGlazebrook/smart-buffer/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "A smarter Buffer that keeps track of its own read and write positions while growing endlessly.",
- "devDependencies": {
- "chai": "^3.5.0",
- "coveralls": "^2.11.15",
- "istanbul": "^0.4.3",
- "mocha": "^3.2.0",
- "mocha-lcov-reporter": "^1.2.0"
- },
- "engines": {
- "node": ">= 0.10.15",
- "npm": ">= 1.3.5"
- },
- "homepage": "https://github.com/JoshGlazebrook/smart-buffer/",
- "keywords": [
- "buffer",
- "smart",
- "serialize",
- "packet",
- "network",
- "cursor",
- "simple"
- ],
- "license": "MIT",
- "main": "lib/smart-buffer.js",
- "name": "smart-buffer",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/JoshGlazebrook/smart-buffer.git"
- },
- "scripts": {
- "coverage": "istanbul cover node_modules/mocha/bin/_mocha recursive test",
- "fullcoverage": "istanbul -include-all-sources cover node_modules/mocha/bin/_mocha recursive test",
- "test": "mocha test/smart-buffer.test.js"
- },
- "typings": "typings/index",
- "version": "1.1.15"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/test/smart-buffer.test.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/test/smart-buffer.test.js
deleted file mode 100644
index 0897d54c2f..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/test/smart-buffer.test.js
+++ /dev/null
@@ -1,410 +0,0 @@
-var SmartBuffer = require('../lib/smart-buffer.js');
-var assert = require('chai').assert;
-
-
-describe('Constructing a SmartBuffer', function () {
- describe('Constructing with an existing Buffer', function () {
- var buff = new Buffer([0xAA, 0xBB, 0xCC, 0xDD, 0xFF, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99]);
- var reader = new SmartBuffer(buff);
-
- it('should have the exact same internal Buffer when constructed with a Buffer', function () {
- assert.strictEqual(reader.buff, buff);
- });
-
- it('should return a buffer with the same content', function () {
- assert.deepEqual(reader.toBuffer(), buff);
- });
- });
-
- describe('Constructing with an existing Buffer and setting the encoding', function () {
- var buff = new Buffer([0xAA, 0xBB, 0xCC, 0xDD, 0xFF, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99]);
- var reader = new SmartBuffer(buff, 'ascii');
-
- it('should have the exact same internal Buffer', function () {
- assert.strictEqual(reader.buff, buff);
- });
-
- it('should have the same encoding that was set', function () {
- assert.strictEqual(reader.encoding, 'ascii');
- });
- });
-
- describe('Constructing with a specified size', function () {
- var size = 128;
- var reader = new SmartBuffer(size);
-
- it('should have an internal Buffer with the same length as the size defined in the constructor', function () {
- assert.strictEqual(reader.buff.length, size);
- });
- });
-
- describe('Constructing with a specified encoding', function () {
- var encoding = 'utf8';
-
- it('should have an internal encoding with the encoding given to the constructor (1st argument)', function () {
- var reader = new SmartBuffer(encoding);
- assert.strictEqual(reader.encoding, encoding);
- });
-
- it('should have an internal encoding with the encoding given to the constructor (2nd argument)', function () {
- var reader = new SmartBuffer(1024, encoding);
- assert.strictEqual(reader.encoding, encoding);
- });
-
- });
-
- describe('Constructing with invalid parameters', function () {
- it('should throw an exception when given an invalid number size', function () {
- assert.throws(function () {
- var reader = new SmartBuffer(-100);
- }, Error);
- });
-
- it('should throw an exception when give a invalid encoding', function () {
- assert.throws(function () {
- var reader = new SmartBuffer('invalid');
- }, Error);
-
- assert.throws(function () {
- var reader = new SmartBuffer(1024, 'invalid');
- }, Error);
- });
-
- it('should throw and exception when given an object that is not a Buffer', function () {
- assert.throws(function () {
- var reader = new SmartBuffer(null);
- }, TypeError);
- });
- });
-});
-
-
-describe('Reading/Writing To/From SmartBuffer', function () {
- /**
- * Technically, if one of these works, they all should. But they're all here anyways.
- */
-
- describe('Numeric Values', function () {
- var reader = new SmartBuffer();
- reader.writeInt8(0x44);
- reader.writeUInt8(0xFF);
- reader.writeInt16BE(0x6699);
- reader.writeInt16LE(0x6699);
- reader.writeUInt16BE(0xFFDD);
- reader.writeUInt16LE(0xFFDD);
- reader.writeInt32BE(0x77889900);
- reader.writeInt32LE(0x77889900);
- reader.writeUInt32BE(0xFFDDCCBB);
- reader.writeUInt32LE(0xFFDDCCBB);
- reader.writeFloatBE(1.234);
- reader.writeFloatLE(1.234);
- reader.writeDoubleBE(1.234567890);
- reader.writeDoubleLE(1.234567890);
-
- it('should equal the correct values that were written above', function () {
- assert.strictEqual(reader.readInt8(), 0x44);
- assert.strictEqual(reader.readUInt8(), 0xFF);
- assert.strictEqual(reader.readInt16BE(), 0x6699);
- assert.strictEqual(reader.readInt16LE(), 0x6699);
- assert.strictEqual(reader.readUInt16BE(), 0xFFDD);
- assert.strictEqual(reader.readUInt16LE(), 0xFFDD);
- assert.strictEqual(reader.readInt32BE(), 0x77889900);
- assert.strictEqual(reader.readInt32LE(), 0x77889900);
- assert.strictEqual(reader.readUInt32BE(), 0xFFDDCCBB);
- assert.strictEqual(reader.readUInt32LE(), 0xFFDDCCBB);
- assert.closeTo(reader.readFloatBE(), 1.234, 0.001);
- assert.closeTo(reader.readFloatLE(), 1.234, 0.001);
- assert.closeTo(reader.readDoubleBE(), 1.234567890, 0.001);
- assert.closeTo(reader.readDoubleLE(), 1.234567890, 0.001);
- });
-
- });
-
- describe('Basic String Values', function () {
- var reader = new SmartBuffer();
- reader.writeStringNT('hello');
- reader.writeString('world');
- reader.writeStringNT('✎✏✎✏✎✏');
-
- it('should equal the correct strings that were written above', function () {
- assert.strictEqual(reader.readStringNT(), 'hello');
- assert.strictEqual(reader.readString(5), 'world');
- assert.strictEqual(reader.readStringNT(), '✎✏✎✏✎✏');
- });
- });
-
- describe('Mixed Encoding Strings', function () {
- var reader = new SmartBuffer('ascii');
- reader.writeStringNT('some ascii text');
- reader.writeStringNT('ѕσмє υтƒ8 тєχт', 'utf8');
-
- it('should equal the correct strings that were written above', function () {
- assert.strictEqual(reader.readStringNT(), 'some ascii text');
- assert.strictEqual(reader.readStringNT('utf8'), 'ѕσмє υтƒ8 тєχт');
- });
- });
-
- describe('Null/non-null terminating strings', function () {
- var reader = new SmartBuffer();
- reader.writeString('hello\0test\0bleh');
-
- it('should equal hello', function () {
- assert.strictEqual(reader.readStringNT(), 'hello');
- });
-
- it('should equal: test', function () {
- assert.strictEqual(reader.readString(4), 'test');
- });
-
- it('should have a length of zero', function () {
- assert.strictEqual(reader.readStringNT().length, 0);
- });
-
- it('should equal: bleh', function () {
- assert.strictEqual(reader.readStringNT(), 'bleh');
- });
-
-
- });
-
- describe('Reading string without specifying length', function () {
- var str = 'hello123';
- var writer = new SmartBuffer();
- writer.writeString(str);
-
- var reader = new SmartBuffer(writer.toBuffer());
-
- assert.strictEqual(reader.readString(), str);
- });
-
- describe('Write string as specific position', function () {
- var str = 'hello123';
- var writer = new SmartBuffer();
- writer.writeString(str, 10);
-
- var reader = new SmartBuffer(writer.toBuffer());
-
- reader.skipTo(10);
- it('Should read the correct string from the original position it was written to.', function () {
- assert.strictEqual(reader.readString(), str);
- });
-
-
- });
-
- describe('Buffer Values', function () {
- describe('Writing buffer to position 0', function () {
- var buff = new SmartBuffer();
- var frontBuff = new Buffer([1, 2, 3, 4, 5, 6]);
- buff.writeStringNT('hello');
- buff.writeBuffer(frontBuff, 0);
-
- it('should write the buffer to the front of the smart buffer instance', function () {
- var readBuff = buff.readBuffer(frontBuff.length);
- assert.deepEqual(readBuff, frontBuff);
- });
- });
-
- describe('Writing null terminated buffer to position 0', function () {
- var buff = new SmartBuffer();
- var frontBuff = new Buffer([1, 2, 3, 4, 5, 6]);
- buff.writeStringNT('hello');
- buff.writeBufferNT(frontBuff, 0);
-
- console.log(buff);
-
- it('should write the buffer to the front of the smart buffer instance', function () {
- var readBuff = buff.readBufferNT();
- console.log(readBuff);
- assert.deepEqual(readBuff, frontBuff);
- });
- });
-
- describe('Explicit lengths', function () {
- var buff = new Buffer([0x01, 0x02, 0x04, 0x08, 0x16, 0x32, 0x64]);
- var reader = new SmartBuffer();
- reader.writeBuffer(buff);
-
- it('should equal the buffer that was written above.', function () {
- assert.deepEqual(reader.readBuffer(7), buff);
- });
- });
-
- describe('Implicit lengths', function () {
- var buff = new Buffer([0x01, 0x02, 0x04, 0x08, 0x16, 0x32, 0x64]);
- var reader = new SmartBuffer();
- reader.writeBuffer(buff);
-
- it('should equal the buffer that was written above.', function () {
- assert.deepEqual(reader.readBuffer(), buff);
- });
- });
-
- describe('Null Terminated Buffer Reading', function () {
- var buff = new SmartBuffer();
- buff.writeBuffer(new Buffer([0x01, 0x02, 0x03, 0x04, 0x00, 0x01, 0x02, 0x03]));
-
- var read1 = buff.readBufferNT();
- var read2 = buff.readBufferNT();
-
- it('Should return a length of 4 for the four bytes before the first null in the buffer.', function () {
- assert.equal(read1.length, 4);
- });
-
- it('Should return a length of 3 for the three bytes after the first null in the buffer after reading to end.', function () {
- assert.equal(read2.length, 3);
- });
- });
-
- describe('Null Terminated Buffer Writing', function () {
- var buff = new SmartBuffer();
- buff.writeBufferNT(new Buffer([0x01, 0x02, 0x03, 0x04]));
-
- var read1 = buff.readBufferNT();
-
- it('Should read the correct null terminated buffer data.', function () {
- assert.equal(read1.length, 4);
- });
-
- })
-
- });
-
- describe('Inserting values into specific positions', function () {
- var reader = new SmartBuffer();
-
- reader.writeUInt16LE(0x0060);
- reader.writeStringNT('something');
- reader.writeUInt32LE(8485934);
- reader.writeUInt16LE(6699);
- reader.writeStringNT('else');
- reader.writeUInt16LE(reader.length - 2, 2);
-
-
- it('should equal the size of the remaining data in the buffer', function () {
- reader.readUInt16LE();
- var size = reader.readUInt16LE();
- assert.strictEqual(reader.remaining(), size);
- });
- });
-
- describe('Adding more data to the buffer than the internal buffer currently allows.', function () {
- it('Should automatically adjust internal buffer size when needed', function () {
- var writer = new SmartBuffer();
- var largeBuff = new Buffer(10000);
-
- writer.writeBuffer(largeBuff);
-
- assert.strictEqual(writer.length, largeBuff.length);
- });
- });
-
-});
-
-describe('Skipping around data', function () {
- var writer = new SmartBuffer();
- writer.writeStringNT('hello');
- writer.writeUInt16LE(6699);
- writer.writeStringNT('world!');
-
- it('Should equal the UInt16 that was written above', function () {
- var reader = new SmartBuffer(writer.toBuffer());
- reader.skip(6);
- assert.strictEqual(reader.readUInt16LE(), 6699);
- reader.skipTo(0);
- assert.strictEqual(reader.readStringNT(), 'hello');
- reader.rewind(6);
- assert.strictEqual(reader.readStringNT(), 'hello');
- });
-
- it('Should throw an error when attempting to skip more bytes than actually exist.', function () {
- var reader = new SmartBuffer(writer.toBuffer());
-
- assert.throws(function () {
- reader.skip(10000);
- });
- });
-
- it('Should throw an error when attempting to skip to a position that does not exist.', function () {
- var reader = new SmartBuffer(writer.toBuffer());
-
- assert.throws(function () {
- reader.skipTo(10000);
- });
- });
-
- it('Should throw an error when attempting to rewind past the start of the buffer.', function () {
- var buff = new SmartBuffer();
- assert.throws(function () {
- buff.rewind(10000);
- });
- });
-});
-
-describe('Automatic internal buffer resizing', function () {
- var writer;
-
- it('Should not throw an error when adding data that is larger than current buffer size (internal resize algo fails)', function () {
- var str = 'String larger than one byte';
- writer = new SmartBuffer(1);
- writer.writeString(str);
-
- assert.strictEqual(writer.buff.length, str.length);
-
- });
-
- it('Should not throw an error when adding data that is larger than current buffer size (internal resize algo succeeds)', function () {
- writer = new SmartBuffer(100);
- var buff = new Buffer(105);
-
- writer.writeBuffer(buff);
-
- // Test internal array growth algo.
- assert.strictEqual(writer.buff.length, (100 * 3 / 2 + 1));
- });
-});
-
-describe('Clearing the buffer', function () {
- var writer = new SmartBuffer();
- writer.writeString('somedata');
-
- it('Should contain some data.', function () {
- assert.notStrictEqual(writer.length, 0);
- });
-
- it('Should contain zero data after being cleared.', function () {
- writer.clear();
- assert.strictEqual(writer.length, 0);
- });
-});
-
-describe('Displaying the buffer as a string', function () {
- var buff = new Buffer([1, 2, 3, 4]);
- var sbuff = new SmartBuffer(buff);
-
- var str = buff.toString();
- var str64 = buff.toString('base64');
-
- it('Should return a valid string representing the internal buffer', function () {
- assert.strictEqual(str, sbuff.toString());
- });
-
- it('Should return a valid base64 string representing the internal buffer', function () {
- assert.strictEqual(str64, sbuff.toString('base64'));
- });
-});
-
-describe('Destroying the buffer', function () {
- var writer = new SmartBuffer();
- writer.writeString('hello123');
-
- writer.destroy();
-
- it('Should have a length of zero when buffer is destroyed', function () {
- assert.strictEqual(0, writer.length);
- });
-
- it('Should have no internal buff property when buffer is destroyed', function () {
- assert.notProperty(writer, 'buff');
- });
-}); \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/typings/index.d.ts b/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/typings/index.d.ts
deleted file mode 100644
index b567f1e97c..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/smart-buffer/typings/index.d.ts
+++ /dev/null
@@ -1,383 +0,0 @@
-// Type definitions for smart-buffer
-// Project: https://github.com/JoshGlazebrook/smart-buffer
-// Definitions by: Josh Glazebrook <https://github.com/JoshGlazebrook>
-
-
-
-declare class SmartBuffer {
- /**
- * Creates a new SmartBuffer instance (defaults to utf8 encoding)
- */
- constructor();
-
- /**
- * Creates a new SmartBuffer instance
- *
- * @param arg1 { Number } The size the underlying buffer instance should be instantiated to (defaults to 4096)
- * @param arg2 { String } The string encoding to use for reading/writing strings (defaults to utf8)
- */
- constructor(size: number, encoding?: string);
-
- /**
- * Creates a new SmartBuffer instance
- *
- * @param arg1 { String } The string encoding to use for reading/writing strings (defaults to utf8)
- */
- constructor(encoding?: string);
-
- /**
- * Creates a new SmartBuffer instance
- *
- * @param arg1 { Buffer } An existing buffer instance to copy to this smart buffer instance
- * @param arg2 { String } The string encoding to use for reading/writing strings (defaults to utf8)
- */
- constructor(buffer: Buffer, encoding?: string)
-
-
-
- // Signed number readers
-
- /**
- * Reads a 8-bit signed integer
- */
- readInt8(): number;
-
- /**
- * Reads a 16-bit signed integer (big endian)
- */
- readInt16BE(): number;
-
- /**
- * Reads a 16-bit signed integer (little endian)
- */
- readInt16LE(): number;
-
- /**
- * Reads a 32-bit signed integer (big endian)
- */
- readInt32BE(): number;
-
- /**
- * Reads a 32-bit signed integer (little endian)
- */
- readInt32LE(): number;
-
- // Unsigned number readers
-
- /**
- * Reads a 8-bit unsigned integer
- */
- readUInt8(): number;
-
- /**
- * Reads a 16-bit unsigned integer (big endian)
- */
- readUInt16BE(): number;
-
- /**
- * Reads a 16-bit unsigned integer (little endian)
- */
- readUInt16LE(): number;
-
- /**
- * Reads a 32-bit unsigned integer (big endian)
- */
- readUInt32BE(): number;
-
- /**
- * Reads a 32-bit unsigned integer (little endian)
- */
- readUInt32LE(): number;
-
- // Floating point readers
-
- /**
- * Reads a float (big endian)
- */
- readFloatBE(): number;
-
- /**
- * Reads a float (little endian)
- */
- readFloatLE(): number;
-
- /**
- * Reads a double (big endian)
- */
- readDoubleBE(): number;
-
- /**
- * Reads a double (little endian)
- */
- readDoubleLE(): number;
-
- // String readers
-
- /**
- * Reads a string
- *
- * @param length { Number } The length of the string to read
- * @param encoding { Number} The encoding to use (defaults to instance level encoding)
- */
- readString(length?: number, encoding?: string): string;
-
- /**
- * Reads a null terminated string
- *
- * @param encoding The encoding to use (defaults to instance level encoding)
- */
- readStringNT(encoding?: string): string;
-
- // Buffer readers
-
- /**
- * Reads binary data into a Buffer
- *
- * @param len { Number } The amount of data to read
- */
- readBuffer(len?: number): Buffer;
-
- /**
- * Reads null terminated binary data into a Buffer
- */
- readBufferNT(): Buffer;
-
-
- // Signed number writers
-
- /**
- * Writes a 8-bit signed integer value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeInt8(value: number, offset?: number): this;
-
- /**
- * Writes a 16-bit signed integer (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeInt16BE(value: number, offset?: number): this;
-
- /**
- * Writes a 16-bit signed integer (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeInt16LE(value: number, offset?: number): this;
-
- /**
- * Writes a 32-bit signed integer (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeInt32BE(value: number, offset?: number): this;
-
- /**
- * Writes a 32-bit signed integer (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeInt32LE(value: number, offset?: number): this;
-
- // Unsigned number writers
-
- /**
- * Writes a 8-bit unsigned integer value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeUInt8(value: number, offset?: number): this;
-
- /**
- * Writes a 16-bit unsigned integer (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeUInt16BE(value: number, offset?: number): this;
-
- /**
- * Writes a 16-bit unsigned integer (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeUInt16LE(value: number, offset?: number): this;
-
- /**
- * Writes a 32-bit unsigned integer (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeUInt32BE(value: number, offset?: number): this;
-
- /**
- * Writes a 32-bit unsigned integer (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeUInt32LE(value: number, offset?: number): this;
-
- // Floating point writers
-
- /**
- * Writes a float (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeFloatBE(value: number, offset?: number): this;
-
- /**
- * Writes a float (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeFloatLE(value: number, offset?: number): this;
-
- /**
- * Writes a double (big endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeDoubleBE(value: number, offset?: number): this;
-
- /**
- * Writes a double (little endian) value
- *
- * @param value { Number } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeDoubleLE(value: number, offset?: number): this;
-
- // String writers
-
- /**
- * Writes a string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- /**
- * Writes a string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { String } The encoding to use when writing the string (defaults to instance level encoding)
- */
- /**
- * Writes a string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- * @param encoding { String } The encoding to use when writing the string (defaults to instance level encoding)
- */
- writeString(value: string, offset?: number | string, encoding?: string): this;
-
- /**
- * Writes a null terminated string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- */
- /**
- * Writes a null terminated string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { String } The encoding to use when writing the string (defaults to instance level encoding)
- */
- /**
- * Writes a null terminated string
- *
- * @param value { String } The value to write to the buffer
- * @param offset { Number } The offset position to write the value to
- * @param encoding { String } The encoding to use when writing the string (defaults to instance level encoding)
- */
- writeStringNT(value: string, offset?: number | string, encoding?: string): this;
-
- // Buffer writers
-
- /**
- * Writes a Buffer
- *
- * @param value { Buffer } The Buffer to write to the smart buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeBuffer(value: Buffer, offset?: number): this;
-
- /**
- * Writes a Buffer with null termination
- *
- * @param value { Buffer } The buffer to write to the smart buffer
- * @param offset { Number } The offset position to write the value to
- */
- writeBufferNT(value: Buffer, offset?: number): this;
-
-
- // Misc Functions
-
- /**
- * Clears the smart buffer
- */
- clear();
-
- /**
- * Gets the number of bytes that remain to be read
- */
- remaining(): number;
-
- /**
- * Increases the read offset position
- *
- * @param amount { Number } The amount to increase the read offset position by
- */
- skip(amount: number);
-
- /**
- * Changes the read offset position
- *
- * @param position { Number } The position to change the read offset to
- */
- skipTo(position: number);
-
- /**
- * Decreases the read offset position
- *
- * @param amount { Number } The amount to decrease the read offset position by
- */
- rewind(amount: number);
-
- /**
- * Gets the underlying Buffer instance
- */
- toBuffer(): Buffer;
-
- /**
- * Gets the string representation of the underlying Buffer
- *
- * @param encoding { String } The encoding to use (defaults to instance level encoding)
- */
- toString(encoding?: string): string;
-
- /**
- * Destroys the smart buffer instance
- */
- destroy();
-
- /**
- * Gets the current length of the smart buffer instance
- */
- length: number;
-}
-
-export = SmartBuffer; \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.npmignore b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.npmignore
deleted file mode 100644
index 07e6e472cc..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-/node_modules
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.travis.yml b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.travis.yml
deleted file mode 100644
index 805d3d50d2..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/.travis.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-sudo: false
-
-language: node_js
-
-node_js:
- - "4"
- - "5"
- - "6"
- - "7"
- - "8"
-
-install:
- - PATH="`npm bin`:`npm bin -g`:$PATH"
- # Install dependencies and build
- - npm install
-
-script:
- # Output useful info for debugging
- - node --version
- - npm --version
- # Run tests
- - npm test
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/History.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/History.md
deleted file mode 100644
index b0266b5e35..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/History.md
+++ /dev/null
@@ -1,96 +0,0 @@
-
-3.0.1 / 2017-09-18
-==================
-
- * update "agent-base" to v4.1.0
-
-3.0.0 / 2017-06-13
-==================
-
- * [BREAKING] drop support for Node < 4
- * update deps, remove `extend` dependency
- * rename `socks-proxy-agent.js` to `index.js`
-
-2.1.1 / 2017-06-13
-==================
-
- * fix a bug where `close` would emit before `end`
- * use "raw-body" module for tests
- * prettier
-
-2.1.0 / 2017-05-24
-==================
-
- * DRY post-lookup logic
- * Fix an error in readme (#13, @599316527)
- * travis: test node v5
- * travis: test iojs v1, 2, 3 and node.js v4
- * test: use ssl-cert-snakeoil cert files
- * Authentication support (#9, @baryshev)
-
-2.0.0 / 2015-07-10
-==================
-
- * API CHANGE! Removed `secure` boolean second argument in constructor
- * upgrade to "agent-base" v2 API
- * package: update "extend" to v3
-
-1.0.2 / 2015-07-01
-==================
-
- * remove "v4a" from description
- * socks-proxy-agent: cast `port` to a Number
- * travis: attempt to make node v0.8 work
- * travis: test node v0.12, don't test v0.11
- * test: pass `rejectUnauthorized` as a proxy opt
- * test: catch http.ClientRequest errors
- * test: add self-signed SSL server cert files
- * test: refactor to use local SOCKS, HTTP and HTTPS servers
- * README: use SVG for Travis-CI badge
-
-1.0.1 / 2015-03-01
-==================
-
- * switched from using "socks-client" to "socks" (#5, @JoshGlazebrook)
-
-1.0.0 / 2015-02-11
-==================
-
- * add client-side DNS lookup logic for 4 and 5 version socks proxies
- * remove dead `onproxyconnect()` code function
- * use a switch statement to decide the socks `version`
- * refactor to use "socks-client" instead of "rainbowsocks"
- * package: remove "rainbowsocks" dependency
- * package: allow any "mocha" v2
-
-0.1.2 / 2014-06-11
-==================
-
- * package: update "rainbowsocks" to v0.1.2
- * travis: don't test node v0.9
-
-0.1.1 / 2014-04-09
-==================
-
- * package: update outdated dependencies
- * socks-proxy-agent: pass `secure` flag when no `new`
- * socks-proxy-agent: small code cleanup
-
-0.1.0 / 2013-11-19
-==================
-
- * add .travis.yml file
- * socks-proxy-agent: properly mix in the proxy options
- * socks-proxy-agent: coerce the `secureEndpoint` into a Boolean
- * socks-proxy-agent: use "extend" module
- * socks-proxy-agent: update to "agent-base" v1 API
-
-0.0.2 / 2013-07-24
-==================
-
- * socks-proxy-agent: properly set the `defaultPort` property
-
-0.0.1 / 2013-07-11
-==================
-
- * Initial release
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/README.md
deleted file mode 100644
index 30d33500af..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/README.md
+++ /dev/null
@@ -1,134 +0,0 @@
-socks-proxy-agent
-================
-### A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS
-[![Build Status](https://travis-ci.org/TooTallNate/node-socks-proxy-agent.svg?branch=master)](https://travis-ci.org/TooTallNate/node-socks-proxy-agent)
-
-This module provides an `http.Agent` implementation that connects to a
-specified SOCKS proxy server, and can be used with the built-in `http`
-or `https` modules.
-
-It can also be used in conjunction with the `ws` module to establish a WebSocket
-connection over a SOCKS proxy. See the "Examples" section below.
-
-Installation
-------------
-
-Install with `npm`:
-
-``` bash
-$ npm install socks-proxy-agent
-```
-
-
-Examples
---------
-
-#### `http` module example
-
-``` js
-var url = require('url');
-var http = require('http');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// HTTP endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'http://nodejs.org/api/';
-console.log('attempting to GET %j', endpoint);
-var opts = url.parse(endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-var agent = new SocksProxyAgent(proxy);
-opts.agent = agent;
-
-http.get(opts, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-#### `https` module example
-
-``` js
-var url = require('url');
-var https = require('https');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// HTTP endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'https://encrypted.google.com/';
-console.log('attempting to GET %j', endpoint);
-var opts = url.parse(endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-// NOTE: the `true` second argument! Means to use TLS encryption on the socket
-var agent = new SocksProxyAgent(proxy, true);
-opts.agent = agent;
-
-https.get(opts, function (res) {
- console.log('"response" event!', res.headers);
- res.pipe(process.stdout);
-});
-```
-
-#### `ws` WebSocket connection example
-
-``` js
-var WebSocket = require('ws');
-var SocksProxyAgent = require('socks-proxy-agent');
-
-// SOCKS proxy to connect to
-var proxy = process.env.socks_proxy || 'socks://127.0.0.1:9050';
-console.log('using proxy server %j', proxy);
-
-// WebSocket endpoint for the proxy to connect to
-var endpoint = process.argv[2] || 'ws://echo.websocket.org';
-console.log('attempting to connect to WebSocket %j', endpoint);
-
-// create an instance of the `SocksProxyAgent` class with the proxy server information
-var agent = new SocksProxyAgent(proxy);
-
-// initiate the WebSocket connection
-var socket = new WebSocket(endpoint, { agent: agent });
-
-socket.on('open', function () {
- console.log('"open" event!');
- socket.send('hello world');
-});
-
-socket.on('message', function (data, flags) {
- console.log('"message" event! %j %j', data, flags);
- socket.close();
-});
-```
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/index.js
deleted file mode 100644
index 3dac18d564..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/index.js
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- * Module dependencies.
- */
-
-var tls; // lazy-loaded...
-var url = require('url');
-var dns = require('dns');
-var Agent = require('agent-base');
-var SocksClient = require('socks');
-var inherits = require('util').inherits;
-
-/**
- * Module exports.
- */
-
-module.exports = SocksProxyAgent;
-
-/**
- * The `SocksProxyAgent`.
- *
- * @api public
- */
-
-function SocksProxyAgent(opts) {
- if (!(this instanceof SocksProxyAgent)) return new SocksProxyAgent(opts);
- if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts)
- throw new Error(
- 'a SOCKS proxy server `host` and `port` must be specified!'
- );
- Agent.call(this, opts);
-
- var proxy = Object.assign({}, opts);
-
- // prefer `hostname` over `host`, because of `url.parse()`
- proxy.host = proxy.hostname || proxy.host;
-
- // SOCKS doesn't *technically* have a default port, but this is
- // the same default that `curl(1)` uses
- proxy.port = +proxy.port || 1080;
-
- if (proxy.host && proxy.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete proxy.path;
- delete proxy.pathname;
- }
-
- // figure out if we want socks v4 or v5, based on the "protocol" used.
- // Defaults to 5.
- proxy.lookup = false;
- switch (proxy.protocol) {
- case 'socks4:':
- proxy.lookup = true;
- // pass through
- case 'socks4a:':
- proxy.version = 4;
- break;
- case 'socks5:':
- proxy.lookup = true;
- // pass through
- case 'socks:': // no version specified, default to 5h
- case 'socks5h:':
- proxy.version = 5;
- break;
- default:
- throw new TypeError(
- 'A "socks" protocol must be specified! Got: ' + proxy.protocol
- );
- }
-
- if (proxy.auth) {
- var auth = proxy.auth.split(':');
- proxy.authentication = { username: auth[0], password: auth[1] };
- proxy.userid = auth[0];
- }
- this.proxy = proxy;
-}
-inherits(SocksProxyAgent, Agent);
-
-/**
- * Initiates a SOCKS connection to the specified SOCKS proxy server,
- * which in turn connects to the specified remote host and port.
- *
- * @api public
- */
-
-SocksProxyAgent.prototype.callback = function connect(req, opts, fn) {
- var proxy = this.proxy;
-
- // called once the SOCKS proxy has connected to the specified remote endpoint
- function onhostconnect(err, socket) {
- if (err) return fn(err);
- var s = socket;
- if (opts.secureEndpoint) {
- // since the proxy is connecting to an SSL server, we have
- // to upgrade this socket connection to an SSL connection
- if (!tls) tls = require('tls');
- opts.socket = socket;
- opts.servername = opts.host;
- opts.host = null;
- opts.hostname = null;
- opts.port = null;
- s = tls.connect(opts);
- }
- socket.resume();
- fn(null, s);
- }
-
- // called for the `dns.lookup()` callback
- function onlookup(err, ip) {
- if (err) return fn(err);
- options.target.host = ip;
- SocksClient.createConnection(options, onhostconnect);
- }
-
- var options = {
- proxy: {
- ipaddress: proxy.host,
- port: +proxy.port,
- type: proxy.version
- },
- target: {
- port: +opts.port
- },
- command: 'connect'
- };
- if (proxy.authentication) {
- options.proxy.authentication = proxy.authentication;
- options.proxy.userid = proxy.userid;
- }
-
- if (proxy.lookup) {
- // client-side DNS resolution for "4" and "5" socks proxy versions
- dns.lookup(opts.host, onlookup);
- } else {
- // proxy hostname DNS resolution for "4a" and "5h" socks proxy servers
- onlookup(null, opts.host);
- }
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/package.json
deleted file mode 100644
index 011f4a61f7..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_from": "socks-proxy-agent@^3.0.1",
- "_id": "socks-proxy-agent@3.0.1",
- "_inBundle": false,
- "_integrity": "sha512-ZwEDymm204mTzvdqyUqOdovVr2YRd2NYskrYrF2LXyZ9qDiMAoFESGK8CRphiO7rtbo2Y757k2Nia3x2hGtalA==",
- "_location": "/npm-registry-fetch/socks-proxy-agent",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "socks-proxy-agent@^3.0.1",
- "name": "socks-proxy-agent",
- "escapedName": "socks-proxy-agent",
- "rawSpec": "^3.0.1",
- "saveSpec": null,
- "fetchSpec": "^3.0.1"
- },
- "_requiredBy": [
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-3.0.1.tgz",
- "_shasum": "2eae7cf8e2a82d34565761539a7f9718c5617659",
- "_spec": "socks-proxy-agent@^3.0.1",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://n8.io/"
- },
- "bugs": {
- "url": "https://github.com/TooTallNate/node-socks-proxy-agent/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "agent-base": "^4.1.0",
- "socks": "^1.1.10"
- },
- "deprecated": false,
- "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS",
- "devDependencies": {
- "mocha": "^3.4.2",
- "raw-body": "^2.2.0",
- "socksv5": "0.0.6"
- },
- "homepage": "https://github.com/TooTallNate/node-socks-proxy-agent#readme",
- "keywords": [
- "socks",
- "socks4",
- "socks4a",
- "proxy",
- "http",
- "https",
- "agent"
- ],
- "license": "MIT",
- "main": "./index.js",
- "name": "socks-proxy-agent",
- "repository": {
- "type": "git",
- "url": "git://github.com/TooTallNate/node-socks-proxy-agent.git"
- },
- "scripts": {
- "test": "mocha --reporter spec"
- },
- "version": "3.0.1"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key
deleted file mode 100644
index fd12501220..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
-bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
-b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
-AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
-Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
-1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
-5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
-T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
-uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
-Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
-h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
-bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
-ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
------END RSA PRIVATE KEY-----
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem
deleted file mode 100644
index b115a5e914..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/ssl-cert-snakeoil.pem
+++ /dev/null
@@ -1,12 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
-NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
-NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
-NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
-ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
-OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
-g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
-AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
-1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
-QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
------END CERTIFICATE-----
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/test.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/test.js
deleted file mode 100644
index 968ef650fb..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent/test/test.js
+++ /dev/null
@@ -1,144 +0,0 @@
-
-/**
- * Module dependencies.
- */
-
-var fs = require('fs');
-var url = require('url');
-var http = require('http');
-var https = require('https');
-var assert = require('assert');
-var socks = require('socksv5');
-var getRawBody = require('raw-body');
-var SocksProxyAgent = require('../');
-
-describe('SocksProxyAgent', function () {
- var httpServer, httpPort;
- var httpsServer, httpsPort;
- var socksServer, socksPort;
-
- before(function (done) {
- // setup SOCKS proxy server
- socksServer = socks.createServer(function(info, accept, deny) {
- accept();
- });
- socksServer.listen(0, '127.0.0.1', function() {
- socksPort = socksServer.address().port;
- //console.log('SOCKS server listening on port %d', socksPort);
- done();
- });
- socksServer.useAuth(socks.auth.None());
- //socksServer.useAuth(socks.auth.UserPassword(function(user, password, cb) {
- // cb(user === 'nodejs' && password === 'rules!');
- //}));
- });
-
- before(function (done) {
- // setup target HTTP server
- httpServer = http.createServer();
- httpServer.listen(function () {
- httpPort = httpServer.address().port;
- done();
- });
- });
-
- before(function (done) {
- // setup target SSL HTTPS server
- var options = {
- key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
- cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
- };
- httpsServer = https.createServer(options);
- httpsServer.listen(function () {
- httpsPort = httpsServer.address().port;
- done();
- });
- });
-
- after(function (done) {
- socksServer.once('close', function () { done(); });
- socksServer.close();
- });
-
- after(function (done) {
- httpServer.once('close', function () { done(); });
- httpServer.close();
- });
-
- after(function (done) {
- httpsServer.once('close', function () { done(); });
- httpsServer.close();
- });
-
- describe('constructor', function () {
- it('should throw an Error if no "proxy" argument is given', function () {
- assert.throws(function () {
- new SocksProxyAgent();
- });
- });
- it('should accept a "string" proxy argument', function () {
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(socksPort, agent.proxy.port);
- });
- it('should accept a `url.parse()` result object argument', function () {
- var opts = url.parse('socks://127.0.0.1:' + socksPort);
- var agent = new SocksProxyAgent(opts);
- assert.equal('127.0.0.1', agent.proxy.host);
- assert.equal(socksPort, agent.proxy.port);
- });
- });
-
- describe('"http" module', function () {
- it('should work against an HTTP endpoint', function (done) {
- httpServer.once('request', function (req, res) {
- assert.equal('/foo', req.url);
- res.statusCode = 404;
- res.end(JSON.stringify(req.headers));
- });
-
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- var opts = url.parse('http://127.0.0.1:' + httpPort + '/foo');
- opts.agent = agent;
- opts.headers = { foo: 'bar' };
- var req = http.get(opts, function (res) {
- assert.equal(404, res.statusCode);
- getRawBody(res, 'utf8', function (err, buf) {
- if (err) return done(err);
- var data = JSON.parse(buf);
- assert.equal('bar', data.foo);
- done();
- });
- });
- req.once('error', done);
- });
- });
-
- describe('"https" module', function () {
- it('should work against an HTTPS endpoint', function (done) {
- httpsServer.once('request', function (req, res) {
- assert.equal('/foo', req.url);
- res.statusCode = 404;
- res.end(JSON.stringify(req.headers));
- });
-
- var agent = new SocksProxyAgent('socks://127.0.0.1:' + socksPort);
- var opts = url.parse('https://127.0.0.1:' + httpsPort + '/foo');
- opts.agent = agent;
- opts.rejectUnauthorized = false;
-
- opts.headers = { foo: 'bar' };
- var req = https.get(opts, function (res) {
- assert.equal(404, res.statusCode);
- getRawBody(res, 'utf8', function (err, buf) {
- if (err) return done(err);
- var data = JSON.parse(buf);
- assert.equal('bar', data.foo);
- done();
- });
- });
- req.once('error', done);
- });
- });
-
-});
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/.npmignore b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/.npmignore
deleted file mode 100644
index 7deddced8b..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/.npmignore
+++ /dev/null
@@ -1,4 +0,0 @@
-node_modules
-.git*
-.idea
-npm-debug.log \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/LICENSE
deleted file mode 100644
index b2442a9e71..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 Josh Glazebrook
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/README.md
deleted file mode 100644
index 890b7deb6a..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/README.md
+++ /dev/null
@@ -1,339 +0,0 @@
-socks
-=============
-
-socks is a full client implementation of the SOCKS 4, 4a, and 5 protocols in an easy to use node.js module.
-
-### Notice
-As of February 26th, 2015, socks is the new home of the socks-client package.
-
-### Why socks?
-
-There is not any other SOCKS proxy client library on npm that supports all three variants of the SOCKS protocol. Nor are there any that support the BIND and associate features that some versions of the SOCKS protocol supports.
-
-Key Features:
-* Supports SOCKS 4, 4a, and 5 protocols
-* Supports the connect method (simple tcp connections of SOCKS) (Client -> SOCKS Server -> Target Server)
-* Supports the BIND method (4, 4a, 5)
-* Supports the associate (UDP forwarding) method (5)
-* Simple and easy to use (one function call to make any type of SOCKS connection)
-
-## Installing:
-
-`npm install socks`
-
-### Getting Started Example
-
-For this example, say you wanted to grab the html of google's home page.
-
-```javascript
-var Socks = require('socks');
-
-var options = {
- proxy: {
- ipaddress: "202.101.228.108", // Random public proxy
- port: 1080,
- type: 5 // type is REQUIRED. Valid types: [4, 5] (note 4 also works for 4a)
- },
- target: {
- host: "google.com", // can be an ip address or domain (4a and 5 only)
- port: 80
- },
- command: 'connect' // This defaults to connect, so it's optional if you're not using BIND or Associate.
-};
-
-Socks.createConnection(options, function(err, socket, info) {
- if (err)
- console.log(err);
- else {
- // Connection has been established, we can start sending data now:
- socket.write("GET / HTTP/1.1\nHost: google.com\n\n");
- socket.on('data', function(data) {
- console.log(data.length);
- console.log(data);
- });
-
- // PLEASE NOTE: sockets need to be resumed before any data will come in or out as they are paused right before this callback is fired.
- socket.resume();
-
- // 569
- // <Buffer 48 54 54 50 2f 31 2e 31 20 33 30 31 20 4d 6f 76 65 64 20 50 65...
- }
-});
-```
-
-### BIND Example:
-
-When sending the BIND command to a SOCKS proxy server, this will cause the proxy server to open up a new tcp port. Once this port is open, you, another client, application, etc, can then connect to the SOCKS proxy on that tcp port and communications will be forwarded to each connection through the proxy itself.
-
-```javascript
-var options = {
- proxy: {
- ipaddress: "202.101.228.108",
- port: 1080,
- type: 4,
- command: "bind" // Since we are using bind, we must specify it here.
- },
- target: {
- host: "1.2.3.4", // When using bind, it's best to give an estimation of the ip that will be connecting to the newly opened tcp port on the proxy server.
- port: 1080
- }
-};
-
-Socks.createConnection(options, function(err, socket, info) {
- if (err)
- console.log(err);
- else {
- // BIND request has completed.
- // info object contains the remote ip and newly opened tcp port to connect to.
- console.log(info);
-
- // { port: 1494, host: '202.101.228.108' }
-
- socket.on('data', function(data) {
- console.log(data.length);
- console.log(data);
- });
-
- // Remember to resume the socket stream.
- socket.resume();
- }
-});
-
-```
-At this point, your original connection to the proxy server remains open, and no data will be received until a tcp connection is made to the given endpoint in the info object.
-
-For an example, I am going to connect to the endpoint with telnet:
-
-```
-Joshs-MacBook-Pro:~ Josh$ telnet 202.101.228.108 1494
- Trying 202.101.228.108...
- Connected to 202.101.228.108.
- Escape character is '^]'.
- hello
- aaaaaaaaa
-```
-
-Note that this connection to the newly bound port does not need to go through the SOCKS handshake.
-
-Back at our original connection we see that we have received some new data:
-
-```
-8
-<Buffer 00 5a ca 61 43 a8 09 01> // This first piece of information can be ignored.
-
-7
-<Buffer 68 65 6c 6c 6f 0d 0a> // Hello <\r\n (enter key)>
-
-11
-<Buffer 61 61 61 61 61 61 61 61 61 0d 0a> // aaaaaaaaa <\r\n (enter key)>
-```
-
-As you can see the data entered in the telnet terminal is routed through the SOCKS proxy and back to the original connection that was made to the proxy.
-
-**Note** Please pay close attention to the first piece of data that was received.
-
-```
-<Buffer 00 5a ca 61 43 a8 09 01>
-
- [005a] [PORT:2} [IP:4]
-```
-
-This piece of data is technically part of the SOCKS BIND specifications, but because of my design decisions that were made in an effort to keep this library simple to use, you will need to make sure to ignore and/or deal with this initial packet that is received when a connection is made to the newly opened port.
-
-### Associate Example:
-The associate command sets up a UDP relay for the remote SOCKS proxy server to relay UDP packets to the remote host of your choice.
-
-```javascript
-var options = {
- proxy: {
- ipaddress: "202.101.228.108",
- port: 1080,
- type: 5,
- command: "associate" // Since we are using associate, we must specify it here.
- },
- target: {
- // When using associate, either set the ip and port to 0.0.0.0:0 or the expected source of incoming udp packets.
- // Note: Some SOCKS servers MAY block associate requests with 0.0.0.0:0 endpoints.
- // Note: ipv4, ipv6, and hostnames are supported here.
- host: "0.0.0.0",
- port: 0
- }
-};
-
-
-Socks.createConnection(options, function(err, socket, info) {
- if (err)
- console.log(err);
- else {
- // Associate request has completed.
- // info object contains the remote ip and udp port to send UDP packets to.
- console.log(info);
- // { port: 42803, host: '202.101.228.108' }
-
- var udp = new dgram.Socket('udp4');
-
- // In this example we are going to send "Hello" to 1.2.3.4:2323 through the SOCKS proxy.
-
- var pack = Socks.createUDPFrame({ host: "1.2.3.4", port: 2323}, new Buffer("hello"));
-
- // Send Packet to Proxy UDP endpoint given in the info object.
- udp.send(pack, 0, pack.length, info.port, info.host);
- }
-});
-
-```
-Now assuming that the associate request went through correctly. Anything that is typed in the stdin will first be sent to the SOCKS proxy on the endpoint that was provided in the info object. Once the SOCKS proxy receives it, it will then forward on the actual UDP packet to the host you you wanted.
-
-
-1.2.3.4:2323 should now receive our relayed UDP packet from 202.101.228.108 (SOCKS proxy)
-```
-// <Buffer 68 65 6c 6c 6f>
-```
-
-## Using socks as an HTTP Agent
-
-You can use socks as a http agent which will relay all your http
-connections through the socks server.
-
-The object that `Socks.Agent` accepts is the same as `Socks.createConnection`, you don't need to set a target since you have to define it in `http.request` or `http.get` methods.
-
-The second argument is a boolean which indicates whether the remote endpoint requires TLS.
-
-```javascript
-var socksAgent = new Socks.Agent({
- proxy: {
- ipaddress: "202.101.228.108",
- port: 1080,
- type: 5,
- }},
- true, // we are connecting to a HTTPS server, false for HTTP server
- false // rejectUnauthorized option passed to tls.connect(). Only when secure is set to true
-);
-
-http.get({ hostname: 'google.com', port: '443', agent: socksAgent}, function (res) {
- // Connection header by default is keep-alive, we have to manually end the socket
- socksAgent.encryptedSocket.end();
-});
-```
-
-# Api Reference:
-
-There are only three exported functions that you will ever need to use.
-
-### Socks.createConnection( options, callback(err, socket, info) )
-> `Object` **Object containing options to use when creating this connection**
-
-> `function` **Callback that is called when connection completes or errors**
-
-Options:
-
-```javascript
-var options = {
-
- // Information about proxy server
- proxy: {
- // IP Address of Proxy (Required)
- ipaddress: "1.2.3.4",
-
- // TCP Port of Proxy (Required)
- port: 1080,
-
- // Proxy Type [4, 5] (Required)
- // Note: 4 works for both 4 and 4a.
- type: 4,
-
- // SOCKS Connection Type (Optional)
- // - defaults to 'connect'
-
- // 'connect' - establishes a regular SOCKS connection to the target host.
- // 'bind' - establishes an open tcp port on the SOCKS for another client to connect to.
- // 'associate' - establishes a udp association relay on the SOCKS server.
- command: "connect",
-
-
- // SOCKS 4 Specific:
-
- // UserId used when making a SOCKS 4/4a request. (Optional)
- userid: "someuserid",
-
- // SOCKS 5 Specific:
-
- // Authentication used for SOCKS 5 (when it's required) (Optional)
- authentication: {
- username: "Josh",
- password: "somepassword"
- }
- },
-
- // Information about target host and/or expected client of a bind association. (Required)
- target: {
- // When using 'connect': IP Address or hostname (4a and 5 only) of a target to connect to.
- // When using 'bind': IP Address of the expected client that will connect to the newly open tcp port.
- // When using 'associate': IP Address and Port of the expected client that will send UDP packets to this UDP association relay.
-
- // Note:
- // When using SOCKS 4, only an ipv4 address can be used.
- // When using SOCKS 4a, an ipv4 address OR a hostname can be used.
- // When using SOCKS 5, ipv4, ipv6, or a hostname can be used.
- host: "1.2.3.4",
-
- // TCP port of target to connect to.
- port: 1080
- },
-
- // Amount of time to wait for a connection to be established. (Optional)
- // - defaults to 10000ms (10 seconds)
- timeout: 10000
-};
-```
-Callback:
-
-```javascript
-
-// err: If an error occurs, err will be an Error object, otherwise null.
-// socket: Socket with established connection to your target host.
-// info: If using BIND or associate, this will be the remote endpoint to use.
-
-function(err, socket, info) {
- // Hopefully no errors :-)
-}
-```
-
-### Socks.createUDPFrame( target, data, [frame] )
-> `Object` **Target host object containing destination for UDP packet**
-
-> `Buffer` **Data Buffer to send in the UDP packet**
-
-> `Number` **Frame number in UDP packet. (defaults to 0)**
-
-Creates a UDP packet frame for using with UDP association relays.
-
-returns `Buffer` The completed UDP packet container to be sent to the proxy for forwarding.
-
-target:
-```javascript
-
-// Target host information for where the UDP packet should be sent.
-var target =
- {
- // ipv4, ipv6, or hostname for where to have the proxy send the UDP packet.
- host: "1.2.3.4",
-
- // udpport for where to send the UDP packet.
- port: 2323
- }
-
-```
-
-### Socks.Agent( options, tls) )
-> `Object` **Object containing options to use when creating this connection (see above in createConnection)**
-
-> `boolean` **Boolean indicating if we upgrade the connection to TLS on the socks server**
-
-
-# Further Reading:
-Please read the SOCKS 5 specifications for more information on how to use BIND and Associate.
-http://www.ietf.org/rfc/rfc1928.txt
-
-# License
-This work is licensed under the [MIT license](http://en.wikipedia.org/wiki/MIT_License).
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/associate.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/associate.js
deleted file mode 100644
index 82d6afa97b..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/associate.js
+++ /dev/null
@@ -1,33 +0,0 @@
-var Socks = require('../index.js');
-var dgram = require('dgram');
-
-var options = {
- proxy: {
- ipaddress: "202.101.228.108",
- port: 1080,
- type: 5,
- command: 'associate'
- },
-
- target: {
- host: "0.0.0.0",
- port: 0
- }
-};
-
-Socks.createConnection(options, function(err, socket, info) {
- if (err)
- console.log(err);
- else {
- console.log("Connected");
-
- // Associate request completed.
- // Now we can send properly formed UDP packet frames to this endpoint for forwarding:
- console.log(info);
- // { port: 4381, host: '202.101.228.108' }
-
- var udp = new dgram.Socket('udp4');
- var packet = SocksClient.createUDPFrame({ host: "1.2.3.4", port: 5454}, new Buffer("Hello"));
- udp.send(packet, 0, packet.length, info.port, info.host);
- }
-}); \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/bind.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/bind.js
deleted file mode 100644
index 4410dd1def..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/bind.js
+++ /dev/null
@@ -1,30 +0,0 @@
-var Socks = require('../index.js');
-
-var options = {
- proxy: {
- ipaddress: "202.101.228.108",
- port: 1080,
- type: 5,
- command: 'bind'
- },
-
- target: {
- host: "0.0.0.0",
- port: 0
- }
-};
-
-Socks.createConnection(options, function(err, socket, info) {
- if (err)
- console.log(err);
- else {
- console.log("Connected");
-
- // BIND request completed, now a tcp client should connect to this endpoint:
- console.log(info);
- // { port: 3334, host: '202.101.228.108' }
-
- // Resume! You need to!
- socket.resume();
- }
-}); \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/connect.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/connect.js
deleted file mode 100644
index 528ad4d416..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/examples/connect.js
+++ /dev/null
@@ -1,31 +0,0 @@
-var Socks = require('../index.js');
-
-var options = {
- proxy: {
- ipaddress: "31.193.133.9",
- port: 1081,
- type: 5 // (4 or 5)
- },
-
- target: {
- host: "173.194.33.103", // (google.com)
- port: 80
- }
-};
-
-Socks.createConnection(options, function (err, socket, info) {
- if (err)
- console.log(err);
- else {
- console.log("Connected");
-
- socket.on('data', function (data) {
- // do something with incoming data
- });
-
- // Please remember that sockets need to be resumed before any data will come in.
- socket.resume();
-
- // We can do whatever we want with the socket now.
- }
-}); \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/index.js
deleted file mode 100644
index 29331d4a1c..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var SocksClient = require('./lib/socks-client.js');
-var SocksAgent = require('./lib/socks-agent.js');
-
-exports.createConnection = SocksClient.createConnection;
-exports.createUDPFrame = SocksClient.createUDPFrame;
-exports.Agent = SocksAgent.Agent;
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-agent.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-agent.js
deleted file mode 100644
index db1c301ec9..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-agent.js
+++ /dev/null
@@ -1,108 +0,0 @@
-var tls = require('tls');
-var inherits = require('util').inherits;
-var EventEmitter = require('events').EventEmitter;
-var SocksClient = require('./socks-client.js');
-
-function SocksAgent(options, secure, rejectUnauthorized) {
- this.options = options;
- this.secure = secure || false;
- this.rejectUnauthorized = rejectUnauthorized;
-
- if (this.rejectUnauthorized === undefined) {
- this.rejectUnauthorized = true;
- }
-}
-
-inherits(SocksAgent, EventEmitter);
-
-SocksAgent.prototype.createConnection = function(req, opts, fn) {
- var handler = fn, host, self = this;
-
- this.options.target = this.options.target || {};
-
- if (!this.options.target.host) {
- this.options.target.host = opts.host;
- }
-
- if (!this.options.target.port) {
- this.options.target.port = opts.port;
- }
-
- host = this.options.target.host;
-
- if (this.secure) {
- handler = function(err, socket, info) {
- var options, cleartext;
-
- if (err) {
- return fn(err);
- }
-
- // save encrypted socket
- self.encryptedSocket = socket;
-
- options = {
- socket: socket,
- servername: host,
- rejectUnauthorized: self.rejectUnauthorized
- };
-
- cleartext = tls.connect(options, function (err) {
- return fn(err, this);
- });
- cleartext.on('error', fn);
-
- socket.resume();
- }
- }
-
- SocksClient.createConnection(this.options, handler);
-};
-
-/**
- * @see https://www.npmjs.com/package/agent-base
- */
-SocksAgent.prototype.addRequest = function(req, host, port, localAddress) {
- var opts;
- if ('object' === typeof host) {
- // >= v0.11.x API
- opts = host;
- if (opts.host && opts.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete opts.path;
- }
- } else {
- // <= v0.10.x API
- opts = { host: host, port: port };
- if (null !== localAddress) {
- opts.localAddress = localAddress;
- }
- }
-
- var sync = true;
-
- this.createConnection(req, opts, function (err, socket) {
- function emitErr () {
- req.emit('error', err);
- }
- if (err) {
- if (sync) {
- // need to defer the "error" event, when sync, because by now the `req`
- // instance hasn't event been passed back to the user yet...
- process.nextTick(emitErr);
- } else {
- emitErr();
- }
- } else {
- req.onSocket(socket);
- //have to resume this socket when node 12
- socket.resume();
- }
- });
-
- sync = false;
-};
-
-exports.Agent = SocksAgent;
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-client.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-client.js
deleted file mode 100644
index 4a31f62c32..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/lib/socks-client.js
+++ /dev/null
@@ -1,306 +0,0 @@
-var net = require('net');
-var ip = require('ip');
-var SmartBuffer = require('smart-buffer');
-
-(function () {
-
- var COMMAND = {
- Connect: 0x01,
- Bind: 0x02,
- Associate: 0x03
- };
-
- var SOCKS4_RESPONSE = {
- Granted: 0x5A,
- Failed: 0x5B,
- Rejected: 0x5C,
- RejectedIdent: 0x5D
- };
-
- var SOCKS5_AUTH = {
- NoAuth: 0x00,
- GSSApi: 0x01,
- UserPass: 0x02
- };
-
- var SOCKS5_RESPONSE = {
- Granted: 0x00,
- Failure: 0x01,
- NotAllowed: 0x02,
- NetworkUnreachable: 0x03,
- HostUnreachable: 0x04,
- ConnectionRefused: 0x05,
- TTLExpired: 0x06,
- CommandNotSupported: 0x07,
- AddressNotSupported: 0x08
- };
-
-
- exports.createConnection = function (options, callback) {
- var socket = new net.Socket(), finished = false, buff = new SmartBuffer();
-
- // Defaults
- options.timeout = options.timeout || 10000;
- options.proxy.command = commandFromString(options.proxy.command);
- options.proxy.userid = options.proxy.userid || "";
-
- var auth = options.proxy.authentication || {};
- auth.username = auth.username || "";
- auth.password = auth.password || "";
-
- options.proxy.authentication = auth;
-
- // Connect & negotiation timeout
- function onTimeout() {
- finish(new Error("Connection Timed Out"), socket, null, callback);
- }
- socket.setTimeout(options.timeout, onTimeout);
-
- // Socket events
- socket.once('close', function () {
- finish(new Error("Socket Closed"), socket, null, callback);
- });
-
- socket.once('error', function (err) {
- });
-
- socket.once('connect', function () {
- if (options.proxy.type === 4) {
- negotiateSocks4(options, socket, callback);
- } else if (options.proxy.type === 5) {
- negotiateSocks5(options, socket, callback);
- } else {
- throw new Error("Please specify a proxy type in options.proxy.type");
- }
- });
-
- socket.connect(options.proxy.port, options.proxy.ipaddress);
-
-
- // 4/4a (connect, bind) - Supports domains & ipaddress
- function negotiateSocks4(options, socket, callback) {
- buff.writeUInt8(0x04);
- buff.writeUInt8(options.proxy.command);
- buff.writeUInt16BE(options.target.port);
-
- // ipv4 or domain?
- if (net.isIPv4(options.target.host)) {
- buff.writeBuffer(ip.toBuffer(options.target.host));
- buff.writeStringNT(options.proxy.userid);
- } else {
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x01);
- buff.writeStringNT(options.proxy.userid);
- buff.writeStringNT(options.target.host);
- }
-
- socket.once('data', receivedResponse);
- socket.write(buff.toBuffer());
-
- function receivedResponse(data) {
- socket.pause();
- if (data.length === 8 && data[1] === SOCKS4_RESPONSE.Granted) {
-
- if (options.proxy.command === COMMAND.Bind) {
- buff.clear();
- buff.writeBuffer(data);
- buff.skip(2);
-
- var info = {
- port: buff.readUInt16BE(),
- host: buff.readUInt32BE()
- };
-
- if (info.host === 0) {
- info.host = options.proxy.ipaddress;
- } else {
- info.host = ip.fromLong(info.host);
- }
-
- finish(null, socket, info, callback);
- } else {
- finish(null, socket, null, callback);
- }
-
- } else {
- finish(new Error("Rejected (" + data[1] + ")"), socket, null, callback);
- }
- }
- }
-
- // Socks 5 (connect, bind, associate) - Supports domains and ipv4, ipv6.
- function negotiateSocks5(options, socket, callback) {
- buff.writeUInt8(0x05);
- buff.writeUInt8(2);
- buff.writeUInt8(SOCKS5_AUTH.NoAuth);
- buff.writeUInt8(SOCKS5_AUTH.UserPass);
-
- socket.once('data', handshake);
- socket.write(buff.toBuffer());
-
- function handshake(data) {
- if (data.length !== 2) {
- finish(new Error("Negotiation Error"), socket, null, callback);
- } else if (data[0] !== 0x05) {
- finish(new Error("Negotiation Error (invalid version)"), socket, null, callback);
- } else if (data[1] === 0xFF) {
- finish(new Error("Negotiation Error (unacceptable authentication)"), socket, null, callback);
- } else {
- if (data[1] === SOCKS5_AUTH.NoAuth) {
- sendRequest();
- } else if (data[1] === SOCKS5_AUTH.UserPass) {
- sendAuthentication(options.proxy.authentication);
- } else {
- finish(new Error("Negotiation Error (unknown authentication type)"), socket, null, callback);
- }
- }
- }
-
- function sendAuthentication(authinfo) {
- buff.clear();
- buff.writeUInt8(0x01);
- buff.writeUInt8(Buffer.byteLength(authinfo.username));
- buff.writeString(authinfo.username);
- buff.writeUInt8(Buffer.byteLength(authinfo.password));
- buff.writeString(authinfo.password);
-
- socket.once('data', authenticationResponse);
- socket.write(buff.toBuffer());
-
- function authenticationResponse(data) {
- if (data.length === 2 && data[1] === 0x00) {
- sendRequest();
- } else {
- finish(new Error("Negotiation Error (authentication failed)"), socket, null, callback);
- }
- }
- }
-
- function sendRequest() {
- buff.clear();
- buff.writeUInt8(0x05);
- buff.writeUInt8(options.proxy.command);
- buff.writeUInt8(0x00);
-
- // ipv4, ipv6, domain?
- if (net.isIPv4(options.target.host)) {
- buff.writeUInt8(0x01);
- buff.writeBuffer(ip.toBuffer(options.target.host));
- } else if (net.isIPv6(options.target.host)) {
- buff.writeUInt8(0x04);
- buff.writeBuffer(ip.toBuffer(options.target.host));
- } else {
- buff.writeUInt8(0x03);
- buff.writeUInt8(options.target.host.length);
- buff.writeString(options.target.host);
- }
- buff.writeUInt16BE(options.target.port);
-
- socket.once('data', receivedResponse);
- socket.write(buff.toBuffer());
- }
-
- function receivedResponse(data) {
- socket.pause();
- if (data.length < 4) {
- finish(new Error("Negotiation Error"), socket, null, callback);
- } else if (data[0] === 0x05 && data[1] === SOCKS5_RESPONSE.Granted) {
- if (options.proxy.command === COMMAND.Connect) {
- finish(null, socket, null, callback);
- } else if (options.proxy.command === COMMAND.Bind || options.proxy.command === COMMAND.Associate) {
- buff.clear();
- buff.writeBuffer(data);
- buff.skip(3);
-
- var info = {};
- var addrtype = buff.readUInt8();
-
- try {
-
- if (addrtype === 0x01) {
- info.host = buff.readUInt32BE();
- if (info.host === 0)
- info.host = options.proxy.ipaddress;
- else
- info.host = ip.fromLong(info.host);
- } else if (addrtype === 0x03) {
- var len = buff.readUInt8();
- info.host = buff.readString(len);
- } else if (addrtype === 0x04) {
- info.host = buff.readBuffer(16);
- } else {
- finish(new Error("Negotiation Error (invalid host address)"), socket, null, callback);
- }
- info.port = buff.readUInt16BE();
-
- finish(null, socket, info, callback);
- } catch (ex) {
- finish(new Error("Negotiation Error (missing data)"), socket, null, callback);
- }
- }
- } else {
- finish(new Error("Negotiation Error (" + data[1] + ")"), socket, null, callback);
- }
- }
- }
-
- function finish(err, socket, info, callback) {
- socket.setTimeout(0, onTimeout);
- if (!finished) {
- finished = true;
-
- if (buff instanceof SmartBuffer)
- buff.destroy();
-
- if (err && socket instanceof net.Socket) {
- socket.removeAllListeners('close');
- socket.removeAllListeners('timeout');
- socket.removeAllListeners('data');
- socket.destroy();
- socket = null;
- }
-
- callback(err, socket, info);
- }
- }
-
- function commandFromString(str) {
- var result = COMMAND.Connect;
-
- if (str === "connect") {
- result = COMMAND.Connect;
- } else if (str === 'associate') {
- result = COMMAND.Associate;
- } else if (str === 'bind') {
- result = COMMAND.Bind;
- }
-
- return result;
- }
- };
-
-
- exports.createUDPFrame = function (target, data, frame) {
- var buff = new SmartBuffer();
- buff.writeUInt16BE(0);
- buff.writeUInt8(frame || 0x00);
-
- if (net.isIPv4(target.host)) {
- buff.writeUInt8(0x01);
- buff.writeUInt32BE(ip.toLong(target.host));
- } else if (net.isIPv6(target.host)) {
- buff.writeUInt8(0x04);
- buff.writeBuffer(ip.toBuffer(target.host));
- } else {
- buff.writeUInt8(0x03);
- buff.writeUInt8(Buffer.byteLength(target.host));
- buff.writeString(target.host);
- }
-
- buff.writeUInt16BE(target.port);
- buff.writeBuffer(data);
- return buff.toBuffer();
- };
-})();
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/package.json
deleted file mode 100644
index e4884cd472..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/socks/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "_from": "socks@^1.1.10",
- "_id": "socks@1.1.10",
- "_inBundle": false,
- "_integrity": "sha1-W4t/x8jzQcU+0FbpKbe/Tei6e1o=",
- "_location": "/npm-registry-fetch/socks",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "socks@^1.1.10",
- "name": "socks",
- "escapedName": "socks",
- "rawSpec": "^1.1.10",
- "saveSpec": null,
- "fetchSpec": "^1.1.10"
- },
- "_requiredBy": [
- "/npm-registry-fetch/socks-proxy-agent"
- ],
- "_resolved": "https://registry.npmjs.org/socks/-/socks-1.1.10.tgz",
- "_shasum": "5b8b7fc7c8f341c53ed056e929b7bf4de8ba7b5a",
- "_spec": "socks@^1.1.10",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/socks-proxy-agent",
- "author": {
- "name": "Josh Glazebrook"
- },
- "bugs": {
- "url": "https://github.com/JoshGlazebrook/socks/issues"
- },
- "bundleDependencies": false,
- "contributors": [
- {
- "name": "Samuel Gordalina"
- }
- ],
- "dependencies": {
- "ip": "^1.1.4",
- "smart-buffer": "^1.0.13"
- },
- "deprecated": false,
- "description": "A SOCKS proxy client supporting SOCKS 4, 4a, and 5. (also supports BIND/Associate)",
- "engines": {
- "node": ">= 0.10.0",
- "npm": ">= 1.3.5"
- },
- "homepage": "https://github.com/JoshGlazebrook/socks",
- "keywords": [
- "socks",
- "proxy",
- "client",
- "tor",
- "bind",
- "associate",
- "socks 4",
- "socks 4a",
- "socks 5",
- "agent"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "socks",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/JoshGlazebrook/socks.git"
- },
- "version": "1.1.10"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/CHANGELOG.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/CHANGELOG.md
deleted file mode 100644
index 5c06894881..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/CHANGELOG.md
+++ /dev/null
@@ -1,256 +0,0 @@
-# Change Log
-
-All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
-
-<a name="5.3.0"></a>
-# [5.3.0](https://github.com/zkat/ssri/compare/v5.2.4...v5.3.0) (2018-03-13)
-
-
-### Features
-
-* **checkData:** optionally throw when checkData fails ([bf26b84](https://github.com/zkat/ssri/commit/bf26b84))
-
-
-
-<a name="5.2.4"></a>
-## [5.2.4](https://github.com/zkat/ssri/compare/v5.2.3...v5.2.4) (2018-02-16)
-
-
-
-<a name="5.2.3"></a>
-## [5.2.3](https://github.com/zkat/ssri/compare/v5.2.2...v5.2.3) (2018-02-16)
-
-
-### Bug Fixes
-
-* **hashes:** filter hash priority list by available hashes ([2fa30b8](https://github.com/zkat/ssri/commit/2fa30b8))
-* **integrityStream:** dedupe algorithms to generate ([d56c654](https://github.com/zkat/ssri/commit/d56c654))
-
-
-
-<a name="5.2.2"></a>
-## [5.2.2](https://github.com/zkat/ssri/compare/v5.2.1...v5.2.2) (2018-02-14)
-
-
-### Bug Fixes
-
-* **security:** tweak strict SRI regex ([#10](https://github.com/zkat/ssri/issues/10)) ([d0ebcdc](https://github.com/zkat/ssri/commit/d0ebcdc))
-
-
-
-<a name="5.2.1"></a>
-## [5.2.1](https://github.com/zkat/ssri/compare/v5.2.0...v5.2.1) (2018-02-06)
-
-
-
-<a name="5.2.0"></a>
-# [5.2.0](https://github.com/zkat/ssri/compare/v5.1.0...v5.2.0) (2018-02-06)
-
-
-### Features
-
-* **match:** add integrity.match() ([3c49cc4](https://github.com/zkat/ssri/commit/3c49cc4))
-
-
-
-<a name="5.1.0"></a>
-# [5.1.0](https://github.com/zkat/ssri/compare/v5.0.0...v5.1.0) (2018-01-18)
-
-
-### Bug Fixes
-
-* **checkStream:** integrityStream now takes opts.integrity algos into account ([d262910](https://github.com/zkat/ssri/commit/d262910))
-
-
-### Features
-
-* **sha3:** do some guesswork about upcoming sha3 ([7fdd9df](https://github.com/zkat/ssri/commit/7fdd9df))
-
-
-
-<a name="5.0.0"></a>
-# [5.0.0](https://github.com/zkat/ssri/compare/v4.1.6...v5.0.0) (2017-10-23)
-
-
-### Features
-
-* **license:** relicense to ISC (#9) ([c82983a](https://github.com/zkat/ssri/commit/c82983a))
-
-
-### BREAKING CHANGES
-
-* **license:** the license has been changed from CC0-1.0 to ISC.
-
-
-
-<a name="4.1.6"></a>
-## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
-
-
-### Bug Fixes
-
-* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
-
-
-
-<a name="4.1.5"></a>
-## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
-
-
-### Bug Fixes
-
-* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
-
-
-
-<a name="4.1.4"></a>
-## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
-
-
-### Bug Fixes
-
-* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
-
-
-
-<a name="4.1.3"></a>
-## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
-
-
-### Bug Fixes
-
-* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
-
-
-
-<a name="4.1.2"></a>
-## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
-
-
-### Bug Fixes
-
-* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
-
-
-
-<a name="4.1.1"></a>
-## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
-
-
-### Bug Fixes
-
-* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
-
-
-
-<a name="4.1.0"></a>
-# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
-
-
-### Features
-
-* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
-
-
-
-<a name="4.0.0"></a>
-# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
-
-
-### BREAKING CHANGES
-
-* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
-
-
-
-<a name="3.0.2"></a>
-## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
-
-
-
-<a name="3.0.1"></a>
-## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
-
-
-### Bug Fixes
-
-* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
-
-
-
-<a name="3.0.0"></a>
-# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
-
-
-### Bug Fixes
-
-* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
-
-
-### Features
-
-* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
-* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
-* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
-* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
-* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
-* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
-* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
-* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
-* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
-
-
-### BREAKING CHANGES
-
-* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
-* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
-
-To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
-* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
-
-
-
-<a name="2.0.0"></a>
-# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
-
-
-### Bug Fixes
-
-* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
-
-
-### Features
-
-* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
-* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
-* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
-* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
-* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
-* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
-
-
-### BREAKING CHANGES
-
-* **pickAlgo:** ssri will prioritize specific hashes now
-* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
-* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
-
-
-
-<a name="1.0.0"></a>
-# 1.0.0 (2017-03-23)
-
-
-### Features
-
-* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
-
-
-### BREAKING CHANGES
-
-* **api:** Initial API established.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/LICENSE.md
deleted file mode 100644
index 8d28acf866..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/README.md b/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/README.md
deleted file mode 100644
index a6c07e7409..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/README.md
+++ /dev/null
@@ -1,488 +0,0 @@
-# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
-
-[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
-Integrity, is a Node.js utility for parsing, manipulating, serializing,
-generating, and verifying [Subresource
-Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
-
-## Install
-
-`$ npm install --save ssri`
-
-## Table of Contents
-
-* [Example](#example)
-* [Features](#features)
-* [Contributing](#contributing)
-* [API](#api)
- * Parsing & Serializing
- * [`parse`](#parse)
- * [`stringify`](#stringify)
- * [`Integrity#concat`](#integrity-concat)
- * [`Integrity#toString`](#integrity-to-string)
- * [`Integrity#toJSON`](#integrity-to-json)
- * [`Integrity#match`](#integrity-match)
- * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
- * [`Integrity#hexDigest`](#integrity-hex-digest)
- * Integrity Generation
- * [`fromHex`](#from-hex)
- * [`fromData`](#from-data)
- * [`fromStream`](#from-stream)
- * [`create`](#create)
- * Integrity Verification
- * [`checkData`](#check-data)
- * [`checkStream`](#check-stream)
- * [`integrityStream`](#integrity-stream)
-
-### Example
-
-```javascript
-const ssri = require('ssri')
-
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Parsing and serializing
-const parsed = ssri.parse(integrity)
-ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
-parsed.toString() // === integrity
-
-// Async stream functions
-ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
-ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
- sri.toString() === integrity
-})
-fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
-
-// Sync data functions
-ssri.fromData(fs.readFileSync('./my-file')) // === parsed
-ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
-```
-
-### Features
-
-* Parses and stringifies SRI strings.
-* Generates SRI strings from raw data or Streams.
-* Strict standard compliance.
-* `?foo` metadata option support.
-* Multiple entries for the same algorithm.
-* Object-based integrity hash manipulation.
-* Small footprint: no dependencies, concise implementation.
-* Full test coverage.
-* Customizable algorithm picker.
-
-### Contributing
-
-The ssri team enthusiastically welcomes contributions and project participation!
-There's a bunch of things you can do if you want to contribute! The [Contributor
-Guide](CONTRIBUTING.md) has all the information you need for everything from
-reporting bugs to contributing entire new features. Please don't hesitate to
-jump in if you'd like to, or even ask us questions if something isn't clear.
-
-### API
-
-#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
-
-Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
-string, an `Hash`-like with `digest` and `algorithm` fields and an optional
-`options` field, or an `Integrity`-like object. The resulting object will be an
-`Integrity` instance that has this shape:
-
-```javascript
-{
- 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
- 'sha512': [
- {algorithm: 'sha512', digest: 'c0ffee', options: []},
- {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
- ],
-}
-```
-
-If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
-single object that looks like `{algorithm, digest, options}`, as opposed to a
-larger object with multiple of these.
-
-If `opts.strict` is truthy, the resulting object will be filtered such that
-it strictly follows the Subresource Integrity spec, throwing away any entries
-with any invalid components. This also means a restricted set of algorithms
-will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
-
-Strict mode is recommended if the integrity strings are intended for use in
-browsers, or in other situations where strict adherence to the spec is needed.
-
-##### Example
-
-```javascript
-ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
-```
-
-#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
-
-This function is identical to [`Integrity#toString()`](#integrity-to-string),
-except it can be used on _any_ object that [`parse`](#parse) can handle -- that
-is, a string, an `Hash`-like, or an `Integrity`-like.
-
-The `opts.sep` option defines the string to use when joining multiple entries
-together. To be spec-compliant, this _must_ be whitespace. The default is a
-single space (`' '`).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// Useful for cleaning up input SRI strings:
-ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
-// -> 'sha512-foo sha384-bar'
-
-// Hash-like: only a single entry.
-ssri.stringify({
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
-ssri.stringify({
- 'sha512': [
- {
- algorithm: 'sha512',
- digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
- options: ['foo']
- }
- ]
-})
-// ->
-// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-```
-
-#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
-
-Concatenates an `Integrity` object with another IntegrityLike, or an integrity
-string.
-
-This is functionally equivalent to concatenating the string format of both
-integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
-
-If `opts.strict` is true, the new `Integrity` will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-// This will combine the integrity checks for two different versions of
-// your index.js file so you can use a single integrity string and serve
-// either of these to clients, from a single `<script>` tag.
-const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
-const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
-
-// Note that browsers (and ssri) will succeed as long as ONE of the entries
-// for the *prioritized* algorithm succeeds. That is, in order for this fallback
-// to work, both desktop and mobile *must* use the same `algorithm` values.
-desktopIntegrity.concat(mobileIntegrity)
-```
-
-#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `opts.sep`, which defaults to `' '`.
-
-If you want to serialize an object that didn't from from an `ssri` function,
-use [`ssri.stringify()`](#stringify).
-
-If `opts.strict` is true, the integrity string will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
-
-ssri.parse(integrity).toString() === integrity
-```
-
-#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
-
-Returns the string representation of an `Integrity` object. All hash entries
-will be concatenated in the string by `' '`.
-
-This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
-For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
-
-##### Example
-
-```javascript
-const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
-
-JSON.stringify(ssri.parse(integrity)) === integrity
-```
-
-#### <a name="integrity-match"></a> `> Integrity#match(sri, [opts]) -> Hash | false`
-
-Returns the matching (truthy) hash if `Integrity` matches the argument passed as
-`sri`, which can be anything that [`parse`](#parse) will accept. `opts` will be
-passed through to `parse` and [`pickAlgorithm()`](#integrity-pick-algorithm).
-
-##### Example
-
-```javascript
-const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-
-ssri.parse(integrity).match(integrity)
-// Hash {
-// digest: '9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A=='
-// algorithm: 'sha512'
-// }
-
-ssri.parse(integrity).match('sha1-deadbeef')
-// false
-```
-
-#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
-
-Returns the "best" algorithm from those available in the integrity object.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
-```
-
-#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
-
-`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
-`Hash` instance. Returns its `digest`, converted to a hex representation of the
-base64 data.
-
-##### Example
-
-```javascript
-ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
-```
-
-#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
-
-Creates an `Integrity` object with a single entry, based on a hex-formatted
-hash. This is a utility function to help convert existing shasums to the
-Integrity format, and is roughly equivalent to something like:
-
-```javascript
-algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
-```
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-If `opts.single` is true, a single `Hash` object will be returned.
-
-##### Example
-
-```javascript
-ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
-```
-
-#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
-
-Creates an `Integrity` object from either string or `Buffer` data, calculating
-all the requested hashes and adding any specified options to the object.
-
-`opts.algorithms` determines which algorithms to generate hashes for. All
-results will be included in a single `Integrity` object. The default value for
-`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
-in `crypto.getHashes()` for the host Node.js platform.
-
-`opts.options` may optionally be passed in: it must be an array of option
-strings that will be added to all generated integrity hashes generated by
-`fromData`. This is a loosely-specified feature of SRIs, and currently has no
-specified semantics besides being `?`-separated. Use at your own risk, and
-probably avoid if your integrity strings are meant to be used with browsers.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrityObj = ssri.fromData('foobarbaz', {
- algorithms: ['sha256', 'sha384', 'sha512']
-})
-integrity.toString('\n')
-// ->
-// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
-// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
-
-Returns a Promise of an Integrity object calculated by reading data from
-a given `stream`.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-Additionally, `opts.Promise` may be passed in to inject a Promise library of
-choice. By default, ssri will use Node's built-in Promises.
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-ssri.fromStream(fs.createReadStream('index.js'), {
- algorithms: ['sha1', 'sha512']
-}).then(integrity => {
- return ssri.checkStream(fs.createReadStream('index.js'), integrity)
-}) // succeeds
-```
-
-#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
-
-Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
-
-
-The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
-`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
-calls to update.
-
-It accepts both `opts.algorithms` and `opts.options`, which are documented as
-part of [`ssri.fromData`](#from-data).
-
-If `opts.strict` is true, the integrity object will be created using strict
-parsing rules. See [`ssri.parse`](#parse).
-
-##### Example
-
-```javascript
-const integrity = ssri.create().update('foobarbaz').digest()
-integrity.toString()
-// ->
-// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
-```
-
-#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
-
-Verifies `data` integrity against an `sri` argument. `data` may be either a
-`String` or a `Buffer`, and `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-If verification succeeds, `checkData` will return the name of the algorithm that
-was used for verification (a truthy value). Otherwise, it will return `false`.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-If `opts.error` is true, and verification fails, `checkData` will throw either
-an `EBADSIZE` or an `EINTEGRITY` error, instead of just returning false.
-
-##### Example
-
-```javascript
-const data = fs.readFileSync('index.js')
-ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
-ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
-ssri.checkData(data, 'sha1-BaDDigEST') // -> false
-ssri.checkData(data, 'sha1-BaDDigEST', {error: true}) // -> Error! EINTEGRITY
-```
-
-#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
-
-Verifies the contents of `stream` against an `sri` argument. `stream` will be
-consumed in its entirety by this process. `sri` can be any subresource integrity
-representation that [`ssri.parse`](#parse) can handle.
-
-`checkStream` will return a Promise that either resolves to the
-`Hash` that succeeded verification, or, if the verification fails
-or an error happens with `stream`, the Promise will be rejected.
-
-If the Promise is rejected because verification failed, the returned error will
-have `err.code` as `EINTEGRITY`.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be used by
-[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
-the available digests to match against.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- integrity
-)
-// ->
-// Promise<{
-// algorithm: 'sha512',
-// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
-// }>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
-) // -> Promise<Hash>
-
-ssri.checkStream(
- fs.createReadStream('index.js'),
- 'sha1-BaDDigEST'
-) // -> Promise<Error<{code: 'EINTEGRITY'}>>
-```
-
-#### <a name="integrity-stream"></a> `> integrityStream([opts]) -> IntegrityStream`
-
-Returns a `Transform` stream that data can be piped through in order to generate
-and optionally check data integrity for piped data. When the stream completes
-successfully, it emits `size` and `integrity` events, containing the total
-number of bytes processed and a calculated `Integrity` instance based on stream
-data, respectively.
-
-If `opts.algorithms` is passed in, the listed algorithms will be calculated when
-generating the final `Integrity` instance. The default is `['sha512']`.
-
-If `opts.single` is passed in, a single `Hash` instance will be returned.
-
-If `opts.integrity` is passed in, it should be an `integrity` value understood
-by [`parse`](#parse) that the stream will check the data against. If
-verification succeeds, the integrity stream will emit a `verified` event whose
-value is a single `Hash` object that is the one that succeeded verification. If
-verification fails, the stream will error with an `EINTEGRITY` error code.
-
-If `opts.size` is given, it will be matched against the stream size. An error
-with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
-and actual size fail to match.
-
-If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
-arguments. ssri will prioritize whichever of the two algorithms is returned by
-this function. Note that the function may be called multiple times, and it
-**must** return one of the two algorithms provided. By default, ssri will make
-a best-effort to pick the strongest/most reliable of the given algorithms. It
-may intentionally deprioritize algorithms with known vulnerabilities.
-
-##### Example
-
-```javascript
-const integrity = ssri.fromData(fs.readFileSync('index.js'))
-fs.createReadStream('index.js')
-.pipe(ssri.integrityStream({integrity}))
-```
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/index.js
deleted file mode 100644
index ff7881f7cb..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/index.js
+++ /dev/null
@@ -1,379 +0,0 @@
-'use strict'
-
-const Buffer = require('safe-buffer').Buffer
-
-const crypto = require('crypto')
-const Transform = require('stream').Transform
-
-const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
-
-const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
-const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)*$/
-const VCHAR_REGEX = /^[\x21-\x7E]+$/
-
-class Hash {
- get isHash () { return true }
- constructor (hash, opts) {
- const strict = !!(opts && opts.strict)
- this.source = hash.trim()
- // 3.1. Integrity metadata (called "Hash" by ssri)
- // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
- const match = this.source.match(
- strict
- ? STRICT_SRI_REGEX
- : SRI_REGEX
- )
- if (!match) { return }
- if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
- this.algorithm = match[1]
- this.digest = match[2]
-
- const rawOpts = match[3]
- this.options = rawOpts ? rawOpts.slice(1).split('?') : []
- }
- hexDigest () {
- return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
- }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- if (opts && opts.strict) {
- // Strict mode enforces the standard as close to the foot of the
- // letter as it can.
- if (!(
- // The spec has very restricted productions for algorithms.
- // https://www.w3.org/TR/CSP2/#source-list-syntax
- SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
- // Usually, if someone insists on using a "different" base64, we
- // leave it as-is, since there's multiple standards, and the
- // specified is not a URL-safe variant.
- // https://www.w3.org/TR/CSP2/#base64_value
- this.digest.match(BASE64_REGEX) &&
- // Option syntax is strictly visual chars.
- // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
- // https://tools.ietf.org/html/rfc5234#appendix-B.1
- (this.options || []).every(opt => opt.match(VCHAR_REGEX))
- )) {
- return ''
- }
- }
- const options = this.options && this.options.length
- ? `?${this.options.join('?')}`
- : ''
- return `${this.algorithm}-${this.digest}${options}`
- }
-}
-
-class Integrity {
- get isIntegrity () { return true }
- toJSON () {
- return this.toString()
- }
- toString (opts) {
- opts = opts || {}
- let sep = opts.sep || ' '
- if (opts.strict) {
- // Entries must be separated by whitespace, according to spec.
- sep = sep.replace(/\S+/g, ' ')
- }
- return Object.keys(this).map(k => {
- return this[k].map(hash => {
- return Hash.prototype.toString.call(hash, opts)
- }).filter(x => x.length).join(sep)
- }).filter(x => x.length).join(sep)
- }
- concat (integrity, opts) {
- const other = typeof integrity === 'string'
- ? integrity
- : stringify(integrity, opts)
- return parse(`${this.toString(opts)} ${other}`, opts)
- }
- hexDigest () {
- return parse(this, {single: true}).hexDigest()
- }
- match (integrity, opts) {
- const other = parse(integrity, opts)
- const algo = other.pickAlgorithm(opts)
- return (
- this[algo] &&
- other[algo] &&
- this[algo].find(hash =>
- other[algo].find(otherhash =>
- hash.digest === otherhash.digest
- )
- )
- ) || false
- }
- pickAlgorithm (opts) {
- const pickAlgorithm = (opts && opts.pickAlgorithm) || getPrioritizedHash
- const keys = Object.keys(this)
- if (!keys.length) {
- throw new Error(`No algorithms available for ${
- JSON.stringify(this.toString())
- }`)
- }
- return keys.reduce((acc, algo) => {
- return pickAlgorithm(acc, algo) || acc
- })
- }
-}
-
-module.exports.parse = parse
-function parse (sri, opts) {
- opts = opts || {}
- if (typeof sri === 'string') {
- return _parse(sri, opts)
- } else if (sri.algorithm && sri.digest) {
- const fullSri = new Integrity()
- fullSri[sri.algorithm] = [sri]
- return _parse(stringify(fullSri, opts), opts)
- } else {
- return _parse(stringify(sri, opts), opts)
- }
-}
-
-function _parse (integrity, opts) {
- // 3.4.3. Parse metadata
- // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
- if (opts.single) {
- return new Hash(integrity, opts)
- }
- return integrity.trim().split(/\s+/).reduce((acc, string) => {
- const hash = new Hash(string, opts)
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.stringify = stringify
-function stringify (obj, opts) {
- if (obj.algorithm && obj.digest) {
- return Hash.prototype.toString.call(obj, opts)
- } else if (typeof obj === 'string') {
- return stringify(parse(obj, opts), opts)
- } else {
- return Integrity.prototype.toString.call(obj, opts)
- }
-}
-
-module.exports.fromHex = fromHex
-function fromHex (hexDigest, algorithm, opts) {
- const optString = (opts && opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- return parse(
- `${algorithm}-${
- Buffer.from(hexDigest, 'hex').toString('base64')
- }${optString}`, opts
- )
-}
-
-module.exports.fromData = fromData
-function fromData (data, opts) {
- opts = opts || {}
- const algorithms = opts.algorithms || ['sha512']
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
- return algorithms.reduce((acc, algo) => {
- const digest = crypto.createHash(algo).update(data).digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-}
-
-module.exports.fromStream = fromStream
-function fromStream (stream, opts) {
- opts = opts || {}
- const P = opts.Promise || Promise
- const istream = integrityStream(opts)
- return new P((resolve, reject) => {
- stream.pipe(istream)
- stream.on('error', reject)
- istream.on('error', reject)
- let sri
- istream.on('integrity', s => { sri = s })
- istream.on('end', () => resolve(sri))
- istream.on('data', () => {})
- })
-}
-
-module.exports.checkData = checkData
-function checkData (data, sri, opts) {
- opts = opts || {}
- sri = parse(sri, opts)
- if (!Object.keys(sri).length) {
- if (opts.error) {
- throw Object.assign(
- new Error('No valid integrity hashes to check against'), {
- code: 'EINTEGRITY'
- }
- )
- } else {
- return false
- }
- }
- const algorithm = sri.pickAlgorithm(opts)
- const digest = crypto.createHash(algorithm).update(data).digest('base64')
- const newSri = parse({algorithm, digest})
- const match = newSri.match(sri, opts)
- if (match || !opts.error) {
- return match
- } else if (typeof opts.size === 'number' && (data.length !== opts.size)) {
- const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`)
- err.code = 'EBADSIZE'
- err.found = data.length
- err.expected = opts.size
- err.sri = sri
- throw err
- } else {
- const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = sri
- err.algorithm = algorithm
- err.sri = sri
- throw err
- }
-}
-
-module.exports.checkStream = checkStream
-function checkStream (stream, sri, opts) {
- opts = opts || {}
- const P = opts.Promise || Promise
- const checker = integrityStream(Object.assign({}, opts, {
- integrity: sri
- }))
- return new P((resolve, reject) => {
- stream.pipe(checker)
- stream.on('error', reject)
- checker.on('error', reject)
- let sri
- checker.on('verified', s => { sri = s })
- checker.on('end', () => resolve(sri))
- checker.on('data', () => {})
- })
-}
-
-module.exports.integrityStream = integrityStream
-function integrityStream (opts) {
- opts = opts || {}
- // For verification
- const sri = opts.integrity && parse(opts.integrity, opts)
- const goodSri = sri && Object.keys(sri).length
- const algorithm = goodSri && sri.pickAlgorithm(opts)
- const digests = goodSri && sri[algorithm]
- // Calculating stream
- const algorithms = Array.from(
- new Set(
- (opts.algorithms || ['sha512'])
- .concat(algorithm ? [algorithm] : [])
- )
- )
- const hashes = algorithms.map(crypto.createHash)
- let streamSize = 0
- const stream = new Transform({
- transform (chunk, enc, cb) {
- streamSize += chunk.length
- hashes.forEach(h => h.update(chunk, enc))
- cb(null, chunk, enc)
- }
- }).on('end', () => {
- const optString = (opts.options && opts.options.length)
- ? `?${opts.options.join('?')}`
- : ''
- const newSri = parse(hashes.map((h, i) => {
- return `${algorithms[i]}-${h.digest('base64')}${optString}`
- }).join(' '), opts)
- // Integrity verification mode
- const match = goodSri && newSri.match(sri, opts)
- if (typeof opts.size === 'number' && streamSize !== opts.size) {
- const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
- err.code = 'EBADSIZE'
- err.found = streamSize
- err.expected = opts.size
- err.sri = sri
- stream.emit('error', err)
- } else if (opts.integrity && !match) {
- const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
- err.code = 'EINTEGRITY'
- err.found = newSri
- err.expected = digests
- err.algorithm = algorithm
- err.sri = sri
- stream.emit('error', err)
- } else {
- stream.emit('size', streamSize)
- stream.emit('integrity', newSri)
- match && stream.emit('verified', match)
- }
- })
- return stream
-}
-
-module.exports.create = createIntegrity
-function createIntegrity (opts) {
- opts = opts || {}
- const algorithms = opts.algorithms || ['sha512']
- const optString = opts.options && opts.options.length
- ? `?${opts.options.join('?')}`
- : ''
-
- const hashes = algorithms.map(crypto.createHash)
-
- return {
- update: function (chunk, enc) {
- hashes.forEach(h => h.update(chunk, enc))
- return this
- },
- digest: function (enc) {
- const integrity = algorithms.reduce((acc, algo) => {
- const digest = hashes.shift().digest('base64')
- const hash = new Hash(
- `${algo}-${digest}${optString}`,
- opts
- )
- if (hash.algorithm && hash.digest) {
- const algo = hash.algorithm
- if (!acc[algo]) { acc[algo] = [] }
- acc[algo].push(hash)
- }
- return acc
- }, new Integrity())
-
- return integrity
- }
- }
-}
-
-const NODE_HASHES = new Set(crypto.getHashes())
-
-// This is a Best Effort™ at a reasonable priority for hash algos
-const DEFAULT_PRIORITY = [
- 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512',
- // TODO - it's unclear _which_ of these Node will actually use as its name
- // for the algorithm, so we guesswork it based on the OpenSSL names.
- 'sha3',
- 'sha3-256', 'sha3-384', 'sha3-512',
- 'sha3_256', 'sha3_384', 'sha3_512'
-].filter(algo => NODE_HASHES.has(algo))
-
-function getPrioritizedHash (algo1, algo2) {
- return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
- ? algo1
- : algo2
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/package.json
deleted file mode 100644
index ff219e9d49..0000000000
--- a/deps/npm/node_modules/npm-registry-fetch/node_modules/ssri/package.json
+++ /dev/null
@@ -1,90 +0,0 @@
-{
- "_from": "ssri@^5.2.4",
- "_id": "ssri@5.3.0",
- "_inBundle": false,
- "_integrity": "sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ==",
- "_location": "/npm-registry-fetch/ssri",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "ssri@^5.2.4",
- "name": "ssri",
- "escapedName": "ssri",
- "rawSpec": "^5.2.4",
- "saveSpec": null,
- "fetchSpec": "^5.2.4"
- },
- "_requiredBy": [
- "/npm-registry-fetch/cacache",
- "/npm-registry-fetch/make-fetch-happen"
- ],
- "_resolved": "https://registry.npmjs.org/ssri/-/ssri-5.3.0.tgz",
- "_shasum": "ba3872c9c6d33a0704a7d71ff045e5ec48999d06",
- "_spec": "ssri@^5.2.4",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-fetch/node_modules/make-fetch-happen",
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org"
- },
- "bugs": {
- "url": "https://github.com/zkat/ssri/issues"
- },
- "bundleDependencies": false,
- "config": {
- "nyc": {
- "exclude": [
- "node_modules/**",
- "test/**"
- ]
- }
- },
- "dependencies": {
- "safe-buffer": "^5.1.1"
- },
- "deprecated": false,
- "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
- "devDependencies": {
- "nyc": "^11.4.1",
- "standard": "^10.0.3",
- "standard-version": "^4.3.0",
- "tap": "^11.1.0",
- "weallbehave": "^1.2.0",
- "weallcontribute": "^1.0.8"
- },
- "files": [
- "*.js"
- ],
- "homepage": "https://github.com/zkat/ssri#readme",
- "keywords": [
- "w3c",
- "web",
- "security",
- "integrity",
- "checksum",
- "hashing",
- "subresource integrity",
- "sri",
- "sri hash",
- "sri string",
- "sri generator",
- "html"
- ],
- "license": "ISC",
- "main": "index.js",
- "name": "ssri",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/zkat/ssri.git"
- },
- "scripts": {
- "postrelease": "npm publish && git push --follow-tags",
- "prerelease": "npm t",
- "pretest": "standard",
- "release": "standard-version -s",
- "test": "tap -J --coverage test/*.js",
- "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
- "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
- },
- "version": "5.3.0"
-}
diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json
index 2844ea89a2..81a00978e2 100644
--- a/deps/npm/node_modules/npm-registry-fetch/package.json
+++ b/deps/npm/node_modules/npm-registry-fetch/package.json
@@ -1,64 +1,31 @@
{
- "_args": [
- [
- "npm-registry-fetch@1.1.0",
- "/Users/rebecca/code/npm"
- ]
- ],
- "_from": "npm-registry-fetch@1.1.0",
- "_id": "npm-registry-fetch@1.1.0",
+ "_from": "npm-registry-fetch@latest",
+ "_id": "npm-registry-fetch@3.8.0",
"_inBundle": false,
- "_integrity": "sha512-XJPIBfMtgaooRtZmuA42xCeLf3tkxdIX0xqRsGWwNrcVvJ9UYFccD7Ho7QWCzvkM3i/QrkUC37Hu0a+vDBmt5g==",
+ "_integrity": "sha512-hrw8UMD+Nob3Kl3h8Z/YjmKamb1gf7D1ZZch2otrIXM3uFLB5vjEY6DhMlq80z/zZet6eETLbOXcuQudCB3Zpw==",
"_location": "/npm-registry-fetch",
- "_phantomChildren": {
- "agent-base": "4.2.0",
- "agentkeepalive": "3.4.1",
- "bluebird": "3.5.1",
- "chownr": "1.0.1",
- "concat-stream": "1.6.2",
- "duplexify": "3.6.0",
- "end-of-stream": "1.4.1",
- "flush-write-stream": "1.0.3",
- "from2": "2.3.0",
- "glob": "7.1.2",
- "graceful-fs": "4.1.11",
- "http-cache-semantics": "3.8.1",
- "http-proxy-agent": "2.1.0",
- "https-proxy-agent": "2.2.1",
- "ip": "1.1.5",
- "lru-cache": "4.1.3",
- "mississippi": "3.0.0",
- "mkdirp": "0.5.1",
- "move-concurrently": "1.0.1",
- "node-fetch-npm": "2.0.2",
- "once": "1.4.0",
- "parallel-transform": "1.1.0",
- "promise-inflight": "1.0.1",
- "promise-retry": "1.1.1",
- "pumpify": "1.5.1",
- "rimraf": "2.6.2",
- "safe-buffer": "5.1.2",
- "stream-each": "1.2.2",
- "through2": "2.0.3",
- "unique-filename": "1.1.0",
- "y18n": "4.0.0"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "version",
+ "type": "tag",
"registry": true,
- "raw": "npm-registry-fetch@1.1.0",
+ "raw": "npm-registry-fetch@latest",
"name": "npm-registry-fetch",
"escapedName": "npm-registry-fetch",
- "rawSpec": "1.1.0",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "1.1.0"
+ "fetchSpec": "latest"
},
"_requiredBy": [
- "/"
+ "#USER",
+ "/",
+ "/libnpmaccess",
+ "/libnpmhook",
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-1.1.0.tgz",
- "_spec": "1.1.0",
- "_where": "/Users/rebecca/code/npm",
+ "_resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-3.8.0.tgz",
+ "_shasum": "aa7d9a7c92aff94f48dba0984bdef4bd131c88cc",
+ "_spec": "npm-registry-fetch@latest",
+ "_where": "/Users/zkat/Documents/code/work/npm",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
@@ -66,6 +33,7 @@
"bugs": {
"url": "https://github.com/npm/registry-fetch/issues"
},
+ "bundleDependencies": false,
"config": {
"nyc": {
"exclude": [
@@ -75,24 +43,26 @@
}
},
"dependencies": {
+ "JSONStream": "^1.3.4",
"bluebird": "^3.5.1",
- "figgy-pudding": "^2.0.1",
- "lru-cache": "^4.1.2",
- "make-fetch-happen": "^3.0.0",
- "npm-package-arg": "^6.0.0",
- "safe-buffer": "^5.1.1"
+ "figgy-pudding": "^3.4.1",
+ "lru-cache": "^4.1.3",
+ "make-fetch-happen": "^4.0.1",
+ "npm-package-arg": "^6.1.0"
},
+ "deprecated": false,
"description": "Fetch-based http client for use with npm registry APIs",
"devDependencies": {
- "cacache": "^10.0.4",
+ "cacache": "^11.0.2",
+ "get-stream": "^4.0.0",
"mkdirp": "^0.5.1",
- "nock": "^9.2.3",
+ "nock": "^9.4.3",
"npmlog": "^4.1.2",
"rimraf": "^2.6.2",
- "ssri": "^5.3.0",
+ "ssri": "^6.0.0",
"standard": "^11.0.1",
- "standard-version": "^4.2.0",
- "tap": "^11.1.2",
+ "standard-version": "^4.4.0",
+ "tap": "^12.0.1",
"weallbehave": "^1.2.0",
"weallcontribute": "^1.0.8"
},
@@ -122,5 +92,5 @@
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
- "version": "1.1.0"
+ "version": "3.8.0"
}