summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/npm-profile
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/npm-profile')
-rw-r--r--deps/npm/node_modules/npm-profile/README.md61
-rw-r--r--deps/npm/node_modules/npm-profile/index.js114
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md423
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.es.md628
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.md624
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/en.js3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/es.js3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/get.js190
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/index.js3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js26
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js115
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js21
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js162
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js224
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js69
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js44
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js11
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js55
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js32
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js25
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js213
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js44
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json6
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js46
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json6
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/ls.js6
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/LICENSE13
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/README.md91
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/index.js172
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/package.json65
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/package.json126
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/put.js71
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/rm.js28
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/verify.js3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/index.js16
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/node4/index.js22
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json18
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/responsetest.js6
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/revalidatetest.js15
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md175
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md3
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/README.md462
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/index.js334
-rw-r--r--deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/package.json90
-rw-r--r--deps/npm/node_modules/npm-profile/package.json24
46 files changed, 4819 insertions, 72 deletions
diff --git a/deps/npm/node_modules/npm-profile/README.md b/deps/npm/node_modules/npm-profile/README.md
index e1c13ef387..001c551cf4 100644
--- a/deps/npm/node_modules/npm-profile/README.md
+++ b/deps/npm/node_modules/npm-profile/README.md
@@ -50,12 +50,13 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
-Otherwise the code will be the HTTP response code.
+Otherwise the code will be `'E'` followed by the HTTP response code, for
+example a Forbidden response would be `E403`.
### profile.login(username, password, config) → Promise
@@ -93,11 +94,11 @@ An object with a `token` property that can be passed into future authentication
An error object indicating what went wrong.
-If the object has a `code` property set to `otp` then that indicates that
+If the object has a `code` property set to `EOTP` then that indicates that
this account must use two-factor authentication to login. Try again with a
one-time password.
-If the object has a `code` property set to `ip` then that indicates that
+If the object has a `code` property set to `EAUTHIP` then that indicates that
this account is only allowed to login from certain networks and this ip is
not on one of those networks.
@@ -157,10 +158,10 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
Otherwise the code will be the HTTP response code.
@@ -256,10 +257,10 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
Otherwise the code will be the HTTP response code.
@@ -300,10 +301,10 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
Otherwise the code will be the HTTP response code.
@@ -338,10 +339,10 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
Otherwise the code will be the HTTP response code.
@@ -389,9 +390,39 @@ An error object indicating what went wrong.
The `headers` property will contain the HTTP headers of the response.
If the action was denied because an OTP is required then `code` will be set
-to `otp`.
+to `EOTP`.
If the action was denied because it came from an IP address that this action
-on this account isn't allowed from then the `code` will be set to `ipaddress`.
+on this account isn't allowed from then the `code` will be set to `EAUTHIP`.
Otherwise the code will be the HTTP response code.
+
+## Logging
+
+This modules logs by emitting `log` events on the global `process` object.
+These events look like this:
+
+```
+process.emit('log', 'loglevel', 'feature', 'message part 1', 'part 2', 'part 3', 'etc')
+```
+
+`loglevel` can be one of: `error`, `warn`, `notice`, `http`, `timing`, `info`, `verbose`, and `silly`.
+
+`feature` is any brief string that describes the component doing the logging.
+
+The remaining arguments are evaluated like `console.log` and joined together with spaces.
+
+A real world example of this is:
+
+```
+ process.emit('log', 'http', 'request', '→',conf.method || 'GET', conf.target)
+```
+
+To handle the log events, you would do something like this:
+
+```
+const log = require('npmlog')
+process.on('log', function (level) {
+ return log[level].apply(log, [].slice.call(arguments, 1))
+})
+```
diff --git a/deps/npm/node_modules/npm-profile/index.js b/deps/npm/node_modules/npm-profile/index.js
index 838bb12af3..611200c5f8 100644
--- a/deps/npm/node_modules/npm-profile/index.js
+++ b/deps/npm/node_modules/npm-profile/index.js
@@ -126,32 +126,58 @@ function createToken (password, readonly, cidrs, conf) {
return fetchJSON(Object.assign({target: target, method: 'POST', body: props}, conf))
}
-function AuthOTP (res) {
- Error.call(this)
- this.message = 'OTP required for authentication'
- Error.captureStackTrace(this, AuthOTP)
- this.headers = res.headers.raw()
- this.code = 'EOTP'
+function FetchError (err, method, target) {
+ err.method = method
+ err.href = target
+ return err
}
-AuthOTP.prototype = Error.prototype
-
-function AuthIPAddress (res) {
- Error.call(this)
- this.message = 'Login is not allowed from your IP address'
- Error.captureStackTrace(this, AuthIPAddress)
- this.headers = res.headers.raw()
- this.code = 'EAUTHIP'
+
+class HttpErrorBase extends Error {
+ constructor (method, target, res, body) {
+ super()
+ this.headers = res.headers.raw()
+ this.statusCode = res.status
+ this.code = 'E' + res.status
+ this.method = method
+ this.target = target
+ this.body = body
+ this.pkgid = packageName(target)
+ }
}
-AuthIPAddress.prototype = Error.prototype
-
-function AuthUnknown (res) {
- Error.call(this)
- this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
- Error.captureStackTrace(this, AuthUnknown)
- this.headers = res.headers.raw()
- this.code = 'E401'
+
+class General extends HttpErrorBase {
+ constructor (method, target, res, body) {
+ super(method, target, res, body)
+ this.message = `Registry returned ${this.statusCode} for ${this.method} on ${this.href}`
+ }
+}
+
+class AuthOTP extends HttpErrorBase {
+ constructor (method, target, res, body) {
+ super(method, target, res, body)
+ this.message = 'OTP required for authentication'
+ this.code = 'EOTP'
+ Error.captureStackTrace(this, AuthOTP)
+ }
+}
+
+class AuthIPAddress extends HttpErrorBase {
+ constructor (res, body) {
+ super(method, target, res, body)
+ this.message = 'Login is not allowed from your IP address'
+ this.code = 'EAUTHIP'
+ Error.captureStackTrace(this, AuthIPAddress)
+ }
+}
+
+class AuthUnknown extends HttpErrorBase {
+ constructor (method, target, res, body) {
+ super(method, target, res, body)
+ this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
+ this.code = 'EAUTHIP'
+ Error.captureStackTrace(this, AuthIPAddress)
+ }
}
-AuthUnknown.prototype = Error.prototype
function authHeaders (auth) {
const headers = {}
@@ -175,7 +201,10 @@ function fetchJSON (conf) {
fetchOpts.headers['Content-Type'] = 'application/json'
fetchOpts.body = JSON.stringify(conf.body)
}
- return fetch.defaults(conf.opts || {})(conf.target, fetchOpts).then(res => {
+ process.emit('log', 'http', 'request', '→',conf.method || 'GET', conf.target)
+ return fetch.defaults(conf.opts || {})(conf.target, fetchOpts).catch(err => {
+ throw new FetchError(err, conf.method, conf.target)
+ }).then(res => {
if (res.headers.get('content-type') === 'application/json') {
return res.json().then(content => [res, content])
} else {
@@ -190,21 +219,42 @@ function fetchJSON (conf) {
}).then(result => {
const res = result[0]
const content = result[1]
+ process.emit('log', 'http', res.status, `← ${res.statusText} (${conf.target})`)
if (res.status === 401 && res.headers.get('www-authenticate')) {
const auth = res.headers.get('www-authenticate').split(/,\s*/).map(s => s.toLowerCase())
if (auth.indexOf('ipaddress') !== -1) {
- throw new AuthIPAddress(res)
+ throw new AuthIPAddress(conf.method, conf.target, res, content)
} else if (auth.indexOf('otp') !== -1) {
- throw new AuthOTP(res)
+ throw new AuthOTP(conf.method, conf.target, res, content)
} else {
- throw new AuthUnknown(res)
+ throw new AuthUnknown(conf.method, conf.target, res, content)
}
} else if (res.status < 200 || res.status >= 300) {
- const err = new Error(res.statusText)
- err.code = 'E' + res.status
- err.headers = res.headers.raw()
- throw err
+ if (typeof content === 'object' && content.error) {
+ return content
+ } else {
+ throw new General(conf.method, conf.target, res, content)
+ }
+ } else {
+ return content
}
- return content
})
}
+
+function packageName (href) {
+ try {
+ let basePath = url.parse(href).pathname.substr(1)
+ if (!basePath.match(/^-/)) {
+ basePath = basePath.split('/')
+ var index = basePath.indexOf('_rewrite')
+ if (index === -1) {
+ index = basePath.length - 1
+ } else {
+ index++
+ }
+ return decodeURIComponent(basePath[index])
+ }
+ } catch (_) {
+ // this is ok
+ }
+} \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md
new file mode 100644
index 0000000000..8edd5486b7
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/CHANGELOG.md
@@ -0,0 +1,423 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+<a name="9.3.0"></a>
+# [9.3.0](https://github.com/zkat/cacache/compare/v9.2.9...v9.3.0) (2017-10-07)
+
+
+### Features
+
+* **copy:** added cacache.get.copy api for fast copies (#107) ([067b5f6](https://github.com/zkat/cacache/commit/067b5f6))
+
+
+
+<a name="9.2.9"></a>
+## [9.2.9](https://github.com/zkat/cacache/compare/v9.2.8...v9.2.9) (2017-06-17)
+
+
+
+<a name="9.2.8"></a>
+## [9.2.8](https://github.com/zkat/cacache/compare/v9.2.7...v9.2.8) (2017-06-05)
+
+
+### Bug Fixes
+
+* **ssri:** bump ssri for bugfix ([c3232ea](https://github.com/zkat/cacache/commit/c3232ea))
+
+
+
+<a name="9.2.7"></a>
+## [9.2.7](https://github.com/zkat/cacache/compare/v9.2.6...v9.2.7) (2017-06-05)
+
+
+### Bug Fixes
+
+* **content:** make verified content completely read-only (#96) ([4131196](https://github.com/zkat/cacache/commit/4131196))
+
+
+
+<a name="9.2.6"></a>
+## [9.2.6](https://github.com/zkat/cacache/compare/v9.2.5...v9.2.6) (2017-05-31)
+
+
+### Bug Fixes
+
+* **node:** update ssri to prevent old node 4 crash ([5209ffe](https://github.com/zkat/cacache/commit/5209ffe))
+
+
+
+<a name="9.2.5"></a>
+## [9.2.5](https://github.com/zkat/cacache/compare/v9.2.4...v9.2.5) (2017-05-25)
+
+
+### Bug Fixes
+
+* **deps:** fix lockfile issues and bump ssri ([84e1d7e](https://github.com/zkat/cacache/commit/84e1d7e))
+
+
+
+<a name="9.2.4"></a>
+## [9.2.4](https://github.com/zkat/cacache/compare/v9.2.3...v9.2.4) (2017-05-24)
+
+
+### Bug Fixes
+
+* **deps:** bumping deps ([bbccb12](https://github.com/zkat/cacache/commit/bbccb12))
+
+
+
+<a name="9.2.3"></a>
+## [9.2.3](https://github.com/zkat/cacache/compare/v9.2.2...v9.2.3) (2017-05-24)
+
+
+### Bug Fixes
+
+* **rm:** stop crashing if content is missing on rm ([ac90bc0](https://github.com/zkat/cacache/commit/ac90bc0))
+
+
+
+<a name="9.2.2"></a>
+## [9.2.2](https://github.com/zkat/cacache/compare/v9.2.1...v9.2.2) (2017-05-14)
+
+
+### Bug Fixes
+
+* **i18n:** lets pretend this didn't happen ([519b4ee](https://github.com/zkat/cacache/commit/519b4ee))
+
+
+
+<a name="9.2.1"></a>
+## [9.2.1](https://github.com/zkat/cacache/compare/v9.2.0...v9.2.1) (2017-05-14)
+
+
+### Bug Fixes
+
+* **docs:** fixing translation messup ([bb9e4f9](https://github.com/zkat/cacache/commit/bb9e4f9))
+
+
+
+<a name="9.2.0"></a>
+# [9.2.0](https://github.com/zkat/cacache/compare/v9.1.0...v9.2.0) (2017-05-14)
+
+
+### Features
+
+* **i18n:** add Spanish translation for API ([531f9a4](https://github.com/zkat/cacache/commit/531f9a4))
+
+
+
+<a name="9.1.0"></a>
+# [9.1.0](https://github.com/zkat/cacache/compare/v9.0.0...v9.1.0) (2017-05-14)
+
+
+### Features
+
+* **i18n:** Add Spanish translation and i18n setup (#91) ([323b90c](https://github.com/zkat/cacache/commit/323b90c))
+
+
+
+<a name="9.0.0"></a>
+# [9.0.0](https://github.com/zkat/cacache/compare/v8.0.0...v9.0.0) (2017-04-28)
+
+
+### Bug Fixes
+
+* **memoization:** actually use the LRU ([0e55dc9](https://github.com/zkat/cacache/commit/0e55dc9))
+
+
+### Features
+
+* **memoization:** memoizers can be injected through opts.memoize (#90) ([e5614c7](https://github.com/zkat/cacache/commit/e5614c7))
+
+
+### BREAKING CHANGES
+
+* **memoization:** If you were passing an object to opts.memoize, it will now be used as an injected memoization object. If you were only passing booleans and other non-objects through that option, no changes are needed.
+
+
+
+<a name="8.0.0"></a>
+# [8.0.0](https://github.com/zkat/cacache/compare/v7.1.0...v8.0.0) (2017-04-22)
+
+
+### Features
+
+* **read:** change hasContent to return {sri, size} (#88) ([bad6c49](https://github.com/zkat/cacache/commit/bad6c49)), closes [#87](https://github.com/zkat/cacache/issues/87)
+
+
+### BREAKING CHANGES
+
+* **read:** hasContent now returns an object with `{sri, size}` instead of `sri`. Use `result.sri` anywhere that needed the old return value.
+
+
+
+<a name="7.1.0"></a>
+# [7.1.0](https://github.com/zkat/cacache/compare/v7.0.5...v7.1.0) (2017-04-20)
+
+
+### Features
+
+* **size:** handle content size info (#49) ([91230af](https://github.com/zkat/cacache/commit/91230af))
+
+
+
+<a name="7.0.5"></a>
+## [7.0.5](https://github.com/zkat/cacache/compare/v7.0.4...v7.0.5) (2017-04-18)
+
+
+### Bug Fixes
+
+* **integrity:** new ssri with fixed integrity stream ([6d13e8e](https://github.com/zkat/cacache/commit/6d13e8e))
+* **write:** wrap stuff in promises to improve errors ([3624fc5](https://github.com/zkat/cacache/commit/3624fc5))
+
+
+
+<a name="7.0.4"></a>
+## [7.0.4](https://github.com/zkat/cacache/compare/v7.0.3...v7.0.4) (2017-04-15)
+
+
+### Bug Fixes
+
+* **fix-owner:** throw away ENOENTs on chownr ([d49bbcd](https://github.com/zkat/cacache/commit/d49bbcd))
+
+
+
+<a name="7.0.3"></a>
+## [7.0.3](https://github.com/zkat/cacache/compare/v7.0.2...v7.0.3) (2017-04-05)
+
+
+### Bug Fixes
+
+* **read:** fixing error message for integrity verification failures ([9d4f0a5](https://github.com/zkat/cacache/commit/9d4f0a5))
+
+
+
+<a name="7.0.2"></a>
+## [7.0.2](https://github.com/zkat/cacache/compare/v7.0.1...v7.0.2) (2017-04-03)
+
+
+### Bug Fixes
+
+* **integrity:** use EINTEGRITY error code and update ssri ([8dc2e62](https://github.com/zkat/cacache/commit/8dc2e62))
+
+
+
+<a name="7.0.1"></a>
+## [7.0.1](https://github.com/zkat/cacache/compare/v7.0.0...v7.0.1) (2017-04-03)
+
+
+### Bug Fixes
+
+* **docs:** fix header name conflict in readme ([afcd456](https://github.com/zkat/cacache/commit/afcd456))
+
+
+
+<a name="7.0.0"></a>
+# [7.0.0](https://github.com/zkat/cacache/compare/v6.3.0...v7.0.0) (2017-04-03)
+
+
+### Bug Fixes
+
+* **test:** fix content.write tests when running in docker ([d2e9b6a](https://github.com/zkat/cacache/commit/d2e9b6a))
+
+
+### Features
+
+* **integrity:** subresource integrity support (#78) ([b1e731f](https://github.com/zkat/cacache/commit/b1e731f))
+
+
+### BREAKING CHANGES
+
+* **integrity:** The entire API has been overhauled to use SRI hashes instead of digest/hashAlgorithm pairs. SRI hashes follow the Subresource Integrity standard and support strings and objects compatible with [`ssri`](https://npm.im/ssri).
+
+* This change bumps the index version, which will invalidate all previous index entries. Content entries will remain intact, and existing caches will automatically reuse any content from before this breaking change.
+
+* `cacache.get.info()`, `cacache.ls()`, and `cacache.ls.stream()` will now return objects that looks like this:
+
+```
+{
+ key: String,
+ integrity: '<algorithm>-<base64hash>',
+ path: ContentPath,
+ time: Date<ms>,
+ metadata: Any
+}
+```
+
+* `opts.digest` and `opts.hashAlgorithm` are obsolete for any API calls that used them.
+
+* Anywhere `opts.digest` was accepted, `opts.integrity` is now an option. Any valid SRI hash is accepted here -- multiple hash entries will be resolved according to the standard: first, the "strongest" hash algorithm will be picked, and then each of the entries for that algorithm will be matched against the content. Content will be validated if *any* of the entries match (so, a single integrity string can be used for multiple "versions" of the same document/data).
+
+* `put.byDigest()`, `put.stream.byDigest`, `get.byDigest()` and `get.stream.byDigest()` now expect an SRI instead of a `digest` + `opts.hashAlgorithm` pairing.
+
+* `get.hasContent()` now expects an integrity hash instead of a digest. If content exists, it will return the specific single integrity hash that was found in the cache.
+
+* `verify()` has learned to handle integrity-based caches, and forgotten how to handle old-style cache indices due to the format change.
+
+* `cacache.rm.content()` now expects an integrity hash instead of a hex digest.
+
+
+
+<a name="6.3.0"></a>
+# [6.3.0](https://github.com/zkat/cacache/compare/v6.2.0...v6.3.0) (2017-04-01)
+
+
+### Bug Fixes
+
+* **fixOwner:** ignore EEXIST race condition from mkdirp ([4670e9b](https://github.com/zkat/cacache/commit/4670e9b))
+* **index:** ignore index removal races when inserting ([b9d2fa2](https://github.com/zkat/cacache/commit/b9d2fa2))
+* **memo:** use lru-cache for better mem management (#75) ([d8ac5aa](https://github.com/zkat/cacache/commit/d8ac5aa))
+
+
+### Features
+
+* **dependencies:** Switch to move-concurrently (#77) ([dc6482d](https://github.com/zkat/cacache/commit/dc6482d))
+
+
+
+<a name="6.2.0"></a>
+# [6.2.0](https://github.com/zkat/cacache/compare/v6.1.2...v6.2.0) (2017-03-15)
+
+
+### Bug Fixes
+
+* **index:** additional bucket entry verification with checksum (#72) ([f8e0f25](https://github.com/zkat/cacache/commit/f8e0f25))
+* **verify:** return fixOwner.chownr promise ([6818521](https://github.com/zkat/cacache/commit/6818521))
+
+
+### Features
+
+* **tmp:** safe tmp dir creation/management util (#73) ([c42da71](https://github.com/zkat/cacache/commit/c42da71))
+
+
+
+<a name="6.1.2"></a>
+## [6.1.2](https://github.com/zkat/cacache/compare/v6.1.1...v6.1.2) (2017-03-13)
+
+
+### Bug Fixes
+
+* **index:** set default hashAlgorithm ([d6eb2f0](https://github.com/zkat/cacache/commit/d6eb2f0))
+
+
+
+<a name="6.1.1"></a>
+## [6.1.1](https://github.com/zkat/cacache/compare/v6.1.0...v6.1.1) (2017-03-13)
+
+
+### Bug Fixes
+
+* **coverage:** bumping coverage for verify (#71) ([0b7faf6](https://github.com/zkat/cacache/commit/0b7faf6))
+* **deps:** glob should have been a regular dep :< ([0640bc4](https://github.com/zkat/cacache/commit/0640bc4))
+
+
+
+<a name="6.1.0"></a>
+# [6.1.0](https://github.com/zkat/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
+
+
+### Bug Fixes
+
+* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/zkat/cacache/commit/ef4f70a))
+* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/zkat/cacache/commit/6ab8132))
+
+
+### Features
+
+* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/zkat/cacache/commit/d5d25ba)), closes [#66](https://github.com/zkat/cacache/issues/66)
+* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/zkat/cacache/commit/45ad77a))
+
+
+
+<a name="6.0.2"></a>
+## [6.0.2](https://github.com/zkat/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)
+
+
+### Bug Fixes
+
+* **index:** segment cache items with another subbucket (#64) ([c3644e5](https://github.com/zkat/cacache/commit/c3644e5))
+
+
+
+<a name="6.0.1"></a>
+## [6.0.1](https://github.com/zkat/cacache/compare/v6.0.0...v6.0.1) (2017-03-05)
+
+
+### Bug Fixes
+
+* **docs:** Missed spots in README ([8ffb7fa](https://github.com/zkat/cacache/commit/8ffb7fa))
+
+
+
+<a name="6.0.0"></a>
+# [6.0.0](https://github.com/zkat/cacache/compare/v5.0.3...v6.0.0) (2017-03-05)
+
+
+### Bug Fixes
+
+* **api:** keep memo cache mostly-internal ([2f72d0a](https://github.com/zkat/cacache/commit/2f72d0a))
+* **content:** use the rest of the string, not the whole string ([fa8f3c3](https://github.com/zkat/cacache/commit/fa8f3c3))
+* **deps:** removed `format-number[@2](https://github.com/2).0.2` ([1187791](https://github.com/zkat/cacache/commit/1187791))
+* **deps:** removed inflight[@1](https://github.com/1).0.6 ([0d1819c](https://github.com/zkat/cacache/commit/0d1819c))
+* **deps:** rimraf[@2](https://github.com/2).6.1 ([9efab6b](https://github.com/zkat/cacache/commit/9efab6b))
+* **deps:** standard[@9](https://github.com/9).0.0 ([4202cba](https://github.com/zkat/cacache/commit/4202cba))
+* **deps:** tap[@10](https://github.com/10).3.0 ([aa03088](https://github.com/zkat/cacache/commit/aa03088))
+* **deps:** weallcontribute[@1](https://github.com/1).0.8 ([ad4f4dc](https://github.com/zkat/cacache/commit/ad4f4dc))
+* **docs:** add security note to hashKey ([03f81ba](https://github.com/zkat/cacache/commit/03f81ba))
+* **hashes:** change default hashAlgorithm to sha512 ([ea00ba6](https://github.com/zkat/cacache/commit/ea00ba6))
+* **hashes:** missed a spot for hashAlgorithm defaults ([45997d8](https://github.com/zkat/cacache/commit/45997d8))
+* **index:** add length header before JSON for verification ([fb8cb4d](https://github.com/zkat/cacache/commit/fb8cb4d))
+* **index:** change index filenames to sha1s of keys ([bbc5fca](https://github.com/zkat/cacache/commit/bbc5fca))
+* **index:** who cares about race conditions anyway ([b1d3888](https://github.com/zkat/cacache/commit/b1d3888))
+* **perf:** bulk-read get+read for massive speed ([d26cdf9](https://github.com/zkat/cacache/commit/d26cdf9))
+* **perf:** use bulk file reads for index reads ([79a8891](https://github.com/zkat/cacache/commit/79a8891))
+* **put-stream:** remove tmp file on stream insert error ([65f6632](https://github.com/zkat/cacache/commit/65f6632))
+* **put-stream:** robustified and predictibilized ([daf9e08](https://github.com/zkat/cacache/commit/daf9e08))
+* **put-stream:** use new promise API for moves ([1d36013](https://github.com/zkat/cacache/commit/1d36013))
+* **readme:** updated to reflect new default hashAlgo ([c60a2fa](https://github.com/zkat/cacache/commit/c60a2fa))
+* **verify:** tiny typo fix ([db22d05](https://github.com/zkat/cacache/commit/db22d05))
+
+
+### Features
+
+* **api:** converted external api ([7bf032f](https://github.com/zkat/cacache/commit/7bf032f))
+* **cacache:** exported clearMemoized() utility ([8d2c5b6](https://github.com/zkat/cacache/commit/8d2c5b6))
+* **cache:** add versioning to content and index ([31bc549](https://github.com/zkat/cacache/commit/31bc549))
+* **content:** collate content files into subdirs ([c094d9f](https://github.com/zkat/cacache/commit/c094d9f))
+* **deps:** [@npmcorp](https://github.com/npmcorp)/move[@1](https://github.com/1).0.0 ([bdd00bf](https://github.com/zkat/cacache/commit/bdd00bf))
+* **deps:** bluebird[@3](https://github.com/3).4.7 ([3a17aff](https://github.com/zkat/cacache/commit/3a17aff))
+* **deps:** promise-inflight[@1](https://github.com/1).0.1 ([a004fe6](https://github.com/zkat/cacache/commit/a004fe6))
+* **get:** added memoization support for get ([c77d794](https://github.com/zkat/cacache/commit/c77d794))
+* **get:** export hasContent ([2956ec3](https://github.com/zkat/cacache/commit/2956ec3))
+* **index:** add hashAlgorithm and format insert ret val ([b639746](https://github.com/zkat/cacache/commit/b639746))
+* **index:** collate index files into subdirs ([e8402a5](https://github.com/zkat/cacache/commit/e8402a5))
+* **index:** promisify entry index ([cda3335](https://github.com/zkat/cacache/commit/cda3335))
+* **memo:** added memoization lib ([da07b92](https://github.com/zkat/cacache/commit/da07b92))
+* **memo:** export memoization api ([954b1b3](https://github.com/zkat/cacache/commit/954b1b3))
+* **move-file:** add move fallback for weird errors ([5cf4616](https://github.com/zkat/cacache/commit/5cf4616))
+* **perf:** bulk content write api ([51b536e](https://github.com/zkat/cacache/commit/51b536e))
+* **put:** added memoization support to put ([b613a70](https://github.com/zkat/cacache/commit/b613a70))
+* **read:** switched to promises ([a869362](https://github.com/zkat/cacache/commit/a869362))
+* **rm:** added memoization support to rm ([4205cf0](https://github.com/zkat/cacache/commit/4205cf0))
+* **rm:** switched to promises ([a000d24](https://github.com/zkat/cacache/commit/a000d24))
+* **util:** promise-inflight ownership fix requests ([9517cd7](https://github.com/zkat/cacache/commit/9517cd7))
+* **util:** use promises for api ([ae204bb](https://github.com/zkat/cacache/commit/ae204bb))
+* **verify:** converted to Promises ([f0b3974](https://github.com/zkat/cacache/commit/f0b3974))
+
+
+### BREAKING CHANGES
+
+* cache: index/content directories are now versioned. Previous caches are no longer compatible and cannot be migrated.
+* util: fix-owner now uses Promises instead of callbacks
+* index: Previously-generated index entries are no longer compatible and the index must be regenerated.
+* index: The index format has changed and previous caches are no longer compatible. Existing caches will need to be regenerated.
+* hashes: Default hashAlgorithm changed from sha1 to sha512. If you
+rely on the prior setting, pass `opts.hashAlgorithm` in explicitly.
+* content: Previously-generated content directories are no longer compatible
+and must be regenerated.
+* verify: API is now promise-based
+* read: Switches to a Promise-based API and removes callback stuff
+* rm: Switches to a Promise-based API and removes callback stuff
+* index: this changes the API to work off promises instead of callbacks
+* api: this means we are going all in on promises now
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000..c05cb09586
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
@@ -0,0 +1,3 @@
+To the extent possible under law, maintainers for this project have waived all copyright and related or neighboring rights to this project.
+
+For more information on this waiver, see: https://creativecommons.org/publicdomain/zero/1.0/
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.es.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.es.md
new file mode 100644
index 0000000000..783a0a19b0
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.es.md
@@ -0,0 +1,628 @@
+# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
+
+[`cacache`](https://github.com/zkat/cacache) es una librería de Node.js para
+manejar caches locales en disco, con acceso tanto con claves únicas como
+direcciones de contenido (hashes/hacheos). Es súper rápida, excelente con el
+acceso concurrente, y jamás te dará datos incorrectos, aún si se corrompen o
+manipulan directamente los ficheros del cache.
+
+El propósito original era reemplazar el caché local de
+[npm](https://npm.im/npm), pero se puede usar por su propia cuenta.
+
+_Traducciones: [English](README.md)_
+
+## Instalación
+
+`$ npm install --save cacache`
+
+## Índice
+
+* [Ejemplo](#ejemplo)
+* [Características](#características)
+* [Cómo Contribuir](#cómo-contribuir)
+* [API](#api)
+ * [Usando el API en español](#localized-api)
+ * Leer
+ * [`ls`](#ls)
+ * [`ls.flujo`](#ls-stream)
+ * [`saca`](#get-data)
+ * [`saca.flujo`](#get-stream)
+ * [`saca.info`](#get-info)
+ * [`saca.tieneDatos`](#get-hasContent)
+ * Escribir
+ * [`mete`](#put-data)
+ * [`mete.flujo`](#put-stream)
+ * [opciones para `mete*`](#put-options)
+ * [`rm.todo`](#rm-all)
+ * [`rm.entrada`](#rm-entry)
+ * [`rm.datos`](#rm-content)
+ * Utilidades
+ * [`ponLenguaje`](#set-locale)
+ * [`limpiaMemoizado`](#clear-memoized)
+ * [`tmp.hazdir`](#tmp-mkdir)
+ * [`tmp.conTmp`](#with-tmp)
+ * Integridad
+ * [Subresource Integrity](#integrity)
+ * [`verifica`](#verify)
+ * [`verifica.ultimaVez`](#verify-last-run)
+
+### Ejemplo
+
+```javascript
+const cacache = require('cacache/es')
+const fs = require('fs')
+
+const tarbol = '/ruta/a/mi-tar.tgz'
+const rutaCache = '/tmp/my-toy-cache'
+const clave = 'mi-clave-única-1234'
+
+// ¡Añádelo al caché! Usa `rutaCache` como raíz del caché.
+cacache.mete(rutaCache, clave, '10293801983029384').then(integrity => {
+ console.log(`Saved content to ${rutaCache}.`)
+})
+
+const destino = '/tmp/mytar.tgz'
+
+// Copia el contenido del caché a otro fichero, pero esta vez con flujos.
+cacache.saca.flujo(
+ rutaCache, clave
+).pipe(
+ fs.createWriteStream(destino)
+).on('finish', () => {
+ console.log('extracción completada')
+})
+
+// La misma cosa, pero accesando el contenido directamente, sin tocar el índice.
+cacache.saca.porHacheo(rutaCache, integridad).then(datos => {
+ fs.writeFile(destino, datos, err => {
+ console.log('datos del tarbol sacados basado en su sha512, y escrito a otro fichero')
+ })
+})
+```
+
+### Características
+
+* Extracción por clave o por dirección de contenido (shasum, etc)
+* Usa el estándard de web, [Subresource Integrity](#integrity)
+* Compatible con multiples algoritmos - usa sha1, sha512, etc, en el mismo caché sin problema
+* Entradas con contenido idéntico comparten ficheros
+* Tolerancia de fallas (inmune a corrupción, ficheros parciales, carreras de proceso, etc)
+* Verificación completa de datos cuando (escribiendo y leyendo)
+* Concurrencia rápida, segura y "lockless"
+* Compatible con `stream`s (flujos)
+* Compatible con `Promise`s (promesas)
+* Bastante rápida -- acceso, incluyendo verificación, en microsegundos
+* Almacenaje de metadatos arbitrarios
+* Colección de basura y verificación adicional fuera de banda
+* Cobertura rigurosa de pruebas
+* Probablente hay un "Bloom filter" por ahí en algún lado. Eso le mola a la gente, ¿Verdad? 🤔
+
+### Cómo Contribuir
+
+El equipo de cacache felizmente acepta contribuciones de código y otras maneras de participación. ¡Hay muchas formas diferentes de contribuir! La [Guía de Colaboradores](CONTRIBUTING.md) (en inglés) tiene toda la información que necesitas para cualquier tipo de contribución: todo desde cómo reportar errores hasta cómo someter parches con nuevas características. Con todo y eso, no se preocupe por si lo que haces está exáctamente correcto: no hay ningún problema en hacer preguntas si algo no está claro, o no lo encuentras.
+
+El equipo de cacache tiene miembros hispanohablantes: es completamente aceptable crear `issues` y `pull requests` en español/castellano.
+
+Todos los participantes en este proyecto deben obedecer el [Código de Conducta](CODE_OF_CONDUCT.md) (en inglés), y en general actuar de forma amable y respetuosa mientras participan en esta comunidad.
+
+Por favor refiérase al [Historial de Cambios](CHANGELOG.md) (en inglés) para detalles sobre cambios importantes incluídos en cada versión.
+
+Finalmente, cacache tiene un sistema de localización de lenguaje. Si te interesa añadir lenguajes o mejorar los que existen, mira en el directorio `./locales` para comenzar.
+
+Happy hacking!
+
+### API
+
+#### <a name="localized-api"></a> Usando el API en español
+
+cacache incluye una traducción completa de su API al castellano, con las mismas
+características. Para usar el API como está documentado en este documento, usa
+`require('cacache/es')`
+
+cacache también tiene otros lenguajes: encuéntralos bajo `./locales`, y podrás
+usar el API en ese lenguaje con `require('cacache/<lenguaje>')`
+
+#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
+
+Enumera todas las entradas en el caché, dentro de un solo objeto. Cada entrada
+en el objeto tendrá como clave la clave única usada para el índice, el valor
+siendo un objeto de [`saca.info`](#get-info).
+
+##### Ejemplo
+
+```javascript
+cacache.ls(rutaCache).then(console.log)
+// Salida
+{
+ 'my-thing': {
+ key: 'my-thing',
+ integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
+ path: '.testcache/content/deadbeef', // unido con `rutaCache`
+ time: 12345698490,
+ size: 4023948,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+ },
+ 'other-thing': {
+ key: 'other-thing',
+ integrity: 'sha1-ANothER+hasH=',
+ path: '.testcache/content/bada55',
+ time: 11992309289,
+ size: 111112
+ }
+}
+```
+
+#### <a name="ls-stream"></a> `> cacache.ls.flujo(cache) -> Readable`
+
+Enumera todas las entradas en el caché, emitiendo un objeto de
+[`saca.info`](#get-info) por cada evento de `data` en el flujo.
+
+##### Ejemplo
+
+```javascript
+cacache.ls.flujo(rutaCache).on('data', console.log)
+// Salida
+{
+ key: 'my-thing',
+ integrity: 'sha512-BaSe64HaSh',
+ path: '.testcache/content/deadbeef', // unido con `rutaCache`
+ time: 12345698490,
+ size: 13423,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+}
+
+{
+ key: 'other-thing',
+ integrity: 'whirlpool-WoWSoMuchSupport',
+ path: '.testcache/content/bada55',
+ time: 11992309289,
+ size: 498023984029
+}
+
+{
+ ...
+}
+```
+
+#### <a name="get-data"></a> `> cacache.saca(cache, clave, [ops]) -> Promise({data, metadata, integrity})`
+
+Devuelve un objeto con los datos, hacheo de integridad y metadatos identificados
+por la `clave`. La propiedad `data` de este objeto será una instancia de
+`Buffer` con los datos almacenados en el caché. to do with it! cacache just
+won't care.
+
+`integrity` es un `string` de [Subresource Integrity](#integrity). Dígase, un
+`string` que puede ser usado para verificar a la `data`, que tiene como formato
+`<algoritmo>-<hacheo-integridad-base64>`.
+
+So no existe ninguna entrada identificada por `clave`, o se los datos
+almacenados localmente fallan verificación, el `Promise` fallará.
+
+Una sub-función, `saca.porHacheo`, tiene casi el mismo comportamiento, excepto
+que busca entradas usando el hacheo de integridad, sin tocar el índice general.
+Esta versión *sólo* devuelve `data`, sin ningún objeto conteniéndola.
+
+##### Nota
+
+Esta función lee la entrada completa a la memoria antes de devolverla. Si estás
+almacenando datos Muy Grandes, es posible que [`saca.flujo`](#get-stream) sea
+una mejor solución.
+
+##### Ejemplo
+
+```javascript
+// Busca por clave
+cache.saca(rutaCache, 'my-thing').then(console.log)
+// Salida:
+{
+ metadata: {
+ thingName: 'my'
+ },
+ integrity: 'sha512-BaSe64HaSh',
+ data: Buffer#<deadbeef>,
+ size: 9320
+}
+
+// Busca por hacheo
+cache.saca.porHacheo(rutaCache, 'sha512-BaSe64HaSh').then(console.log)
+// Salida:
+Buffer#<deadbeef>
+```
+
+#### <a name="get-stream"></a> `> cacache.saca.flujo(cache, clave, [ops]) -> Readable`
+
+Devuelve un [Readable
+Stream](https://nodejs.org/api/stream.html#stream_readable_streams) de los datos
+almacenados bajo `clave`.
+
+So no existe ninguna entrada identificada por `clave`, o se los datos
+almacenados localmente fallan verificación, el `Promise` fallará.
+
+`metadata` y `integrity` serán emitidos como eventos antes de que el flujo
+cierre.
+
+Una sub-función, `saca.flujo.porHacheo`, tiene casi el mismo comportamiento,
+excepto que busca entradas usando el hacheo de integridad, sin tocar el índice
+general. Esta versión no emite eventos de `metadata` o `integrity`.
+
+##### Ejemplo
+
+```javascript
+// Busca por clave
+cache.saca.flujo(
+ rutaCache, 'my-thing'
+).on('metadata', metadata => {
+ console.log('metadata:', metadata)
+}).on('integrity', integrity => {
+ console.log('integrity:', integrity)
+}).pipe(
+ fs.createWriteStream('./x.tgz')
+)
+// Salidas:
+metadata: { ... }
+integrity: 'sha512-SoMeDIGest+64=='
+
+// Busca por hacheo
+cache.saca.flujo.porHacheo(
+ rutaCache, 'sha512-SoMeDIGest+64=='
+).pipe(
+ fs.createWriteStream('./x.tgz')
+)
+```
+
+#### <a name="get-info"></a> `> cacache.saca.info(cache, clave) -> Promise`
+
+Busca la `clave` en el índice del caché, devolviendo información sobre la
+entrada si existe.
+
+##### Campos
+
+* `key` - Clave de la entrada. Igual al argumento `clave`.
+* `integrity` - [hacheo de Subresource Integrity](#integrity) del contenido al que se refiere esta entrada.
+* `path` - Dirección del fichero de datos almacenados, relativa al argumento `cache`.
+* `time` - Hora de creación de la entrada
+* `metadata` - Metadatos asignados a esta entrada por el usuario
+
+##### Ejemplo
+
+```javascript
+cacache.saca.info(rutaCache, 'my-thing').then(console.log)
+
+// Salida
+{
+ key: 'my-thing',
+ integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
+ path: '.testcache/content/deadbeef',
+ time: 12345698490,
+ size: 849234,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+}
+```
+
+#### <a name="get-hasContent"></a> `> cacache.saca.tieneDatos(cache, integrity) -> Promise`
+
+Busca un [hacheo Subresource Integrity](#integrity) en el caché. Si existe el
+contenido asociado con `integrity`, devuelve un objeto con dos campos: el hacheo
+_específico_ que se usó para la búsqueda, `sri`, y el tamaño total del
+contenido, `size`. Si no existe ningún contenido asociado con `integrity`,
+devuelve `false`.
+
+##### Ejemplo
+
+```javascript
+cacache.saca.tieneDatos(rutaCache, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
+
+// Salida
+{
+ sri: {
+ source: 'sha256-MUSTVERIFY+ALL/THINGS==',
+ algorithm: 'sha256',
+ digest: 'MUSTVERIFY+ALL/THINGS==',
+ options: []
+ },
+ size: 9001
+}
+
+cacache.saca.tieneDatos(rutaCache, 'sha521-NOT+IN/CACHE==').then(console.log)
+
+// Salida
+false
+```
+
+#### <a name="put-data"></a> `> cacache.mete(cache, clave, datos, [ops]) -> Promise`
+
+Inserta `datos` en el caché. El `Promise` devuelto se resuelve con un hacheo
+(generado conforme a [`ops.algorithms`](#optsalgorithms)) después que la entrada
+haya sido escrita en completo.
+
+##### Ejemplo
+
+```javascript
+fetch(
+ 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).then(datos => {
+ return cacache.mete(rutaCache, 'registry.npmjs.org|cacache@1.0.0', datos)
+}).then(integridad => {
+ console.log('el hacheo de integridad es', integridad)
+})
+```
+
+#### <a name="put-stream"></a> `> cacache.mete.flujo(cache, clave, [ops]) -> Writable`
+
+Devuelve un [Writable
+Stream](https://nodejs.org/api/stream.html#stream_writable_streams) que inserta
+al caché los datos escritos a él. Emite un evento `integrity` con el hacheo del
+contenido escrito, cuando completa.
+
+##### Ejemplo
+
+```javascript
+request.get(
+ 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).pipe(
+ cacache.mete.flujo(
+ rutaCache, 'registry.npmjs.org|cacache@1.0.0'
+ ).on('integrity', d => console.log(`integrity digest is ${d}`))
+)
+```
+
+#### <a name="put-options"></a> `> opciones para cacache.mete`
+
+La funciones `cacache.mete` tienen un número de opciones en común.
+
+##### `ops.metadata`
+
+Metadatos del usuario que se almacenarán con la entrada.
+
+##### `ops.size`
+
+El tamaño declarado de los datos que se van a insertar. Si es proveído, cacache
+verificará que los datos escritos sean de ese tamaño, o si no, fallará con un
+error con código `EBADSIZE`.
+
+##### `ops.integrity`
+
+El hacheo de integridad de los datos siendo escritos.
+
+Si es proveído, y los datos escritos no le corresponden, la operación fallará
+con un error con código `EINTEGRITY`.
+
+`ops.algorithms` no tiene ningún efecto si esta opción está presente.
+
+##### `ops.algorithms`
+
+Por Defecto: `['sha512']`
+
+Algoritmos que se deben usar cuando se calcule el hacheo de [subresource
+integrity](#integrity) para los datos insertados. Puede usar cualquier algoritmo
+enumerado en `crypto.getHashes()`.
+
+Por el momento, sólo se acepta un algoritmo (dígase, un array con exáctamente un
+valor). No tiene ningún efecto si `ops.integrity` también ha sido proveido.
+
+##### `ops.uid`/`ops.gid`
+
+Si están presentes, cacache hará todo lo posible para asegurarse que todos los
+ficheros creados en el proceso de sus operaciones en el caché usen esta
+combinación en particular.
+
+##### `ops.memoize`
+
+Por Defecto: `null`
+
+Si es verdad, cacache tratará de memoizar los datos de la entrada en memoria. La
+próxima vez que el proceso corriente trate de accesar los datos o entrada,
+cacache buscará en memoria antes de buscar en disco.
+
+Si `ops.memoize` es un objeto regular o un objeto como `Map` (es decir, un
+objeto con métodos `get()` y `set()`), este objeto en sí sera usado en vez del
+caché de memoria global. Esto permite tener lógica específica a tu aplicación
+encuanto al almacenaje en memoria de tus datos.
+
+Si quieres asegurarte que los datos se lean del disco en vez de memoria, usa
+`memoize: false` cuando uses funciones de `cacache.saca`.
+
+#### <a name="rm-all"></a> `> cacache.rm.todo(cache) -> Promise`
+
+Borra el caché completo, incluyendo ficheros temporeros, ficheros de datos, y el
+índice del caché.
+
+##### Ejemplo
+
+```javascript
+cacache.rm.todo(rutaCache).then(() => {
+ console.log('THE APOCALYPSE IS UPON US 😱')
+})
+```
+
+#### <a name="rm-entry"></a> `> cacache.rm.entrada(cache, clave) -> Promise`
+
+Alias: `cacache.rm`
+
+Borra la entrada `clave` del índuce. El contenido asociado con esta entrada
+seguirá siendo accesible por hacheo usando
+[`saca.flujo.porHacheo`](#get-stream).
+
+Para borrar el contenido en sí, usa [`rm.datos`](#rm-content). Si quieres hacer
+esto de manera más segura (pues ficheros de contenido pueden ser usados por
+multiples entradas), usa [`verifica`](#verify) para borrar huérfanos.
+
+##### Ejemplo
+
+```javascript
+cacache.rm.entrada(rutaCache, 'my-thing').then(() => {
+ console.log('I did not like it anyway')
+})
+```
+
+#### <a name="rm-content"></a> `> cacache.rm.datos(cache, integrity) -> Promise`
+
+Borra el contenido identificado por `integrity`. Cualquier entrada que se
+refiera a este contenido quedarán huérfanas y se invalidarán si se tratan de
+accesar, al menos que contenido idéntico sea añadido bajo `integrity`.
+
+##### Ejemplo
+
+```javascript
+cacache.rm.datos(rutaCache, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
+ console.log('los datos para `mi-cosa` se borraron')
+})
+```
+
+#### <a name="set-locale"></a> `> cacache.ponLenguaje(locale)`
+
+Configura el lenguaje usado para mensajes y errores de cacache. La lista de
+lenguajes disponibles está en el directorio `./locales` del proyecto.
+
+_Te interesa añadir más lenguajes? [Somete un PR](CONTRIBUTING.md)!_
+
+#### <a name="clear-memoized"></a> `> cacache.limpiaMemoizado()`
+
+Completamente reinicializa el caché de memoria interno. Si estás usando tu
+propio objecto con `ops.memoize`, debes hacer esto de manera específica a él.
+
+#### <a name="tmp-mkdir"></a> `> tmp.hazdir(cache, ops) -> Promise<Path>`
+
+Alias: `tmp.mkdir`
+
+Devuelve un directorio único dentro del directorio `tmp` del caché.
+
+Una vez tengas el directorio, es responsabilidad tuya asegurarte que todos los
+ficheros escrito a él sean creados usando los permisos y `uid`/`gid` concordante
+con el caché. Si no, puedes pedirle a cacache que lo haga llamando a
+[`cacache.tmp.fix()`](#tmp-fix). Esta función arreglará todos los permisos en el
+directorio tmp.
+
+Si quieres que cacache limpie el directorio automáticamente cuando termines, usa
+[`cacache.tmp.conTmp()`](#with-tpm).
+
+##### Ejemplo
+
+```javascript
+cacache.tmp.mkdir(cache).then(dir => {
+ fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+})
+```
+
+#### <a name="with-tmp"></a> `> tmp.conTmp(cache, ops, cb) -> Promise`
+
+Crea un directorio temporero con [`tmp.mkdir()`](#tmp-mkdir) y ejecuta `cb` con
+él como primer argumento. El directorio creado será removido automáticamente
+cuando el valor devolvido por `cb()` se resuelva.
+
+Las mismas advertencias aplican en cuanto a manejando permisos para los ficheros
+dentro del directorio.
+
+##### Ejemplo
+
+```javascript
+cacache.tmp.conTmp(cache, dir => {
+ return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+}).then(() => {
+ // `dir` no longer exists
+})
+```
+
+#### <a name="integrity"></a> Hacheos de Subresource Integrity
+
+cacache usa strings que siguen la especificación de [Subresource Integrity
+spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
+
+Es decir, donde quiera cacache espera un argumento o opción `integrity`, ese
+string debería usar el formato `<algoritmo>-<hacheo-base64>`.
+
+Una variación importante sobre los hacheos que cacache acepta es que acepta el
+nombre de cualquier algoritmo aceptado por el proceso de Node.js donde se usa.
+Puedes usar `crypto.getHashes()` para ver cuales están disponibles.
+
+##### Generando tus propios hacheos
+
+Si tienes un `shasum`, en general va a estar en formato de string hexadecimal
+(es decir, un `sha1` se vería como algo así:
+`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`).
+
+Para ser compatible con cacache, necesitas convertir esto a su equivalente en
+subresource integrity. Por ejemplo, el hacheo correspondiente al ejemplo
+anterior sería: `sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
+
+Puedes usar código así para generarlo por tu cuenta:
+
+```javascript
+const crypto = require('crypto')
+const algoritmo = 'sha512'
+const datos = 'foobarbaz'
+
+const integrity = (
+ algorithm +
+ '-' +
+ crypto.createHash(algoritmo).update(datos).digest('base64')
+)
+```
+
+También puedes usar [`ssri`](https://npm.im/ssri) para deferir el trabajo a otra
+librería que garantiza que todo esté correcto, pues maneja probablemente todas
+las operaciones que tendrías que hacer con SRIs, incluyendo convirtiendo entre
+hexadecimal y el formato SRI.
+
+#### <a name="verify"></a> `> cacache.verifica(cache, ops) -> Promise`
+
+Examina y arregla tu caché:
+
+* Limpia entradas inválidas, huérfanas y corrompidas
+* Te deja filtrar cuales entradas retener, con tu propio filtro
+* Reclama cualquier ficheros de contenido sin referencias en el índice
+* Verifica integridad de todos los ficheros de contenido y remueve los malos
+* Arregla permisos del caché
+* Remieve el directorio `tmp` en el caché, y todo su contenido.
+
+Cuando termine, devuelve un objeto con varias estadísticas sobre el proceso de
+verificación, por ejemplo la cantidad de espacio de disco reclamado, el número
+de entradas válidas, número de entradas removidas, etc.
+
+##### Opciones
+
+* `ops.uid` - uid para asignarle al caché y su contenido
+* `ops.gid` - gid para asignarle al caché y su contenido
+* `ops.filter` - recibe una entrada como argumento. Devuelve falso para removerla. Nota: es posible que esta función sea invocada con la misma entrada más de una vez.
+
+##### Example
+
+```sh
+echo somegarbage >> $RUTACACHE/content/deadbeef
+```
+
+```javascript
+cacache.verifica(rutaCache).then(stats => {
+ // deadbeef collected, because of invalid checksum.
+ console.log('cache is much nicer now! stats:', stats)
+})
+```
+
+#### <a name="verify-last-run"></a> `> cacache.verifica.ultimaVez(cache) -> Promise`
+
+Alias: `últimaVez`
+
+Devuelve un `Date` que representa la última vez que `cacache.verifica` fue
+ejecutada en `cache`.
+
+##### Example
+
+```javascript
+cacache.verifica(rutaCache).then(() => {
+ cacache.verifica.ultimaVez(rutaCache).then(última => {
+ console.log('La última vez que se usó cacache.verifica() fue ' + última)
+ })
+})
+```
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.md
new file mode 100644
index 0000000000..ea69b8f540
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/README.md
@@ -0,0 +1,624 @@
+# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)
+
+[`cacache`](https://github.com/zkat/cacache) is a Node.js library for managing
+local key and content address caches. It's really fast, really good at
+concurrency, and it will never give you corrupted data, even if cache files
+get corrupted or manipulated.
+
+It was originally written to be used as [npm](https://npm.im)'s local cache, but
+can just as easily be used on its own
+
+_Translations: [español](README.es.md)_
+
+## Install
+
+`$ npm install --save cacache`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+ * [Using localized APIs](#localized-api)
+ * Reading
+ * [`ls`](#ls)
+ * [`ls.stream`](#ls-stream)
+ * [`get`](#get-data)
+ * [`get.stream`](#get-stream)
+ * [`get.info`](#get-info)
+ * [`get.hasContent`](#get-hasContent)
+ * Writing
+ * [`put`](#put-data)
+ * [`put.stream`](#put-stream)
+ * [`put*` opts](#put-options)
+ * [`rm.all`](#rm-all)
+ * [`rm.entry`](#rm-entry)
+ * [`rm.content`](#rm-content)
+ * Utilities
+ * [`setLocale`](#set-locale)
+ * [`clearMemoized`](#clear-memoized)
+ * [`tmp.mkdir`](#tmp-mkdir)
+ * [`tmp.withTmp`](#with-tmp)
+ * Integrity
+ * [Subresource Integrity](#integrity)
+ * [`verify`](#verify)
+ * [`verify.lastRun`](#verify-last-run)
+
+### Example
+
+```javascript
+const cacache = require('cacache/en')
+const fs = require('fs')
+
+const tarball = '/path/to/mytar.tgz'
+const cachePath = '/tmp/my-toy-cache'
+const key = 'my-unique-key-1234'
+
+// Cache it! Use `cachePath` as the root of the content cache
+cacache.put(cachePath, key, '10293801983029384').then(integrity => {
+ console.log(`Saved content to ${cachePath}.`)
+})
+
+const destination = '/tmp/mytar.tgz'
+
+// Copy the contents out of the cache and into their destination!
+// But this time, use stream instead!
+cacache.get.stream(
+ cachePath, key
+).pipe(
+ fs.createWriteStream(destination)
+).on('finish', () => {
+ console.log('done extracting!')
+})
+
+// The same thing, but skip the key index.
+cacache.get.byDigest(cachePath, integrityHash).then(data => {
+ fs.writeFile(destination, data, err => {
+ console.log('tarball data fetched based on its sha512sum and written out!')
+ })
+})
+```
+
+### Features
+
+* Extraction by key or by content address (shasum, etc)
+* [Subresource Integrity](#integrity) web standard support
+* Multi-hash support - safely host sha1, sha512, etc, in a single cache
+* Automatic content deduplication
+* Fault tolerance (immune to corruption, partial writes, process races, etc)
+* Consistency guarantees on read and write (full data verification)
+* Lockless, high-concurrency cache access
+* Streaming support
+* Promise support
+* Pretty darn fast -- sub-millisecond reads and writes including verification
+* Arbitrary metadata storage
+* Garbage collection and additional offline verification
+* Thorough test coverage
+* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
+
+### Contributing
+
+The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
+
+All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
+
+Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
+
+Happy hacking!
+
+### API
+
+#### <a name="localized-api"></a> Using localized APIs
+
+cacache includes a complete API in English, with the same features as other
+translations. To use the English API as documented in this README, use
+`require('cacache/en')`. This is also currently the default if you do
+`require('cacache')`, but may change in the future.
+
+cacache also supports other languages! You can find the list of currently
+supported ones my looking in `./locales` in the source directory. You can use
+the API in that language with `require('cacache/<lang>')`.
+
+Want to add support for a new language? Please go ahead! You should be able to
+copy `./locales/en.js` and `./locales/en.json` and fill them in. Translating the
+`README.md` is a bit more work, but also appreciated if you get around to it. 👍🏼
+
+#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
+
+Lists info for all entries currently in the cache as a single large object. Each
+entry in the object will be keyed by the unique index key, with corresponding
+[`get.info`](#get-info) objects as the values.
+
+##### Example
+
+```javascript
+cacache.ls(cachePath).then(console.log)
+// Output
+{
+ 'my-thing': {
+ key: 'my-thing',
+ integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
+ path: '.testcache/content/deadbeef', // joined with `cachePath`
+ time: 12345698490,
+ size: 4023948,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+ },
+ 'other-thing': {
+ key: 'other-thing',
+ integrity: 'sha1-ANothER+hasH=',
+ path: '.testcache/content/bada55',
+ time: 11992309289,
+ size: 111112
+ }
+}
+```
+
+#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
+
+Lists info for all entries currently in the cache as a single large object.
+
+This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
+returned as `'data'` events on the returned stream.
+
+##### Example
+
+```javascript
+cacache.ls.stream(cachePath).on('data', console.log)
+// Output
+{
+ key: 'my-thing',
+ integrity: 'sha512-BaSe64HaSh',
+ path: '.testcache/content/deadbeef', // joined with `cachePath`
+ time: 12345698490,
+ size: 13423,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+}
+
+{
+ key: 'other-thing',
+ integrity: 'whirlpool-WoWSoMuchSupport',
+ path: '.testcache/content/bada55',
+ time: 11992309289,
+ size: 498023984029
+}
+
+{
+ ...
+}
+```
+
+#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
+
+Returns an object with the cached data, digest, and metadata identified by
+`key`. The `data` property of this object will be a `Buffer` instance that
+presumably holds some data that means something to you. I'm sure you know what
+to do with it! cacache just won't care.
+
+`integrity` is a [Subresource
+Integrity](#integrity)
+string. That is, a string that can be used to verify `data`, which looks like
+`<hash-algorithm>-<base64-integrity-hash>`.
+
+If there is no content identified by `key`, or if the locally-stored data does
+not pass the validity checksum, the promise will be rejected.
+
+A sub-function, `get.byDigest` may be used for identical behavior, except lookup
+will happen by integrity hash, bypassing the index entirely. This version of the
+function *only* returns `data` itself, without any wrapper.
+
+##### Note
+
+This function loads the entire cache entry into memory before returning it. If
+you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
+instead.
+
+##### Example
+
+```javascript
+// Look up by key
+cache.get(cachePath, 'my-thing').then(console.log)
+// Output:
+{
+ metadata: {
+ thingName: 'my'
+ },
+ integrity: 'sha512-BaSe64HaSh',
+ data: Buffer#<deadbeef>,
+ size: 9320
+}
+
+// Look up by digest
+cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
+// Output:
+Buffer#<deadbeef>
+```
+
+#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
+
+Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
+
+If there is no content identified by `key`, or if the locally-stored data does
+not pass the validity checksum, an error will be emitted.
+
+`metadata` and `integrity` events will be emitted before the stream closes, if
+you need to collect that extra data about the cached entry.
+
+A sub-function, `get.stream.byDigest` may be used for identical behavior,
+except lookup will happen by integrity hash, bypassing the index entirely. This
+version does not emit the `metadata` and `integrity` events at all.
+
+##### Example
+
+```javascript
+// Look up by key
+cache.get.stream(
+ cachePath, 'my-thing'
+).on('metadata', metadata => {
+ console.log('metadata:', metadata)
+}).on('integrity', integrity => {
+ console.log('integrity:', integrity)
+}).pipe(
+ fs.createWriteStream('./x.tgz')
+)
+// Outputs:
+metadata: { ... }
+integrity: 'sha512-SoMeDIGest+64=='
+
+// Look up by digest
+cache.get.stream.byDigest(
+ cachePath, 'sha512-SoMeDIGest+64=='
+).pipe(
+ fs.createWriteStream('./x.tgz')
+)
+```
+
+#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
+
+Looks up `key` in the cache index, returning information about the entry if
+one exists.
+
+##### Fields
+
+* `key` - Key the entry was looked up under. Matches the `key` argument.
+* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
+* `path` - Filesystem path relative to `cache` argument where content is stored.
+* `time` - Timestamp the entry was first added on.
+* `metadata` - User-assigned metadata associated with the entry/content.
+
+##### Example
+
+```javascript
+cacache.get.info(cachePath, 'my-thing').then(console.log)
+
+// Output
+{
+ key: 'my-thing',
+ integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
+ path: '.testcache/content/deadbeef',
+ time: 12345698490,
+ size: 849234,
+ metadata: {
+ name: 'blah',
+ version: '1.2.3',
+ description: 'this was once a package but now it is my-thing'
+ }
+}
+```
+
+#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
+
+Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
+exists for this `integrity`, it will return an object, with the specific single integrity hash
+that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
+
+##### Example
+
+```javascript
+cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
+
+// Output
+{
+ sri: {
+ source: 'sha256-MUSTVERIFY+ALL/THINGS==',
+ algorithm: 'sha256',
+ digest: 'MUSTVERIFY+ALL/THINGS==',
+ options: []
+ },
+ size: 9001
+}
+
+cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
+
+// Output
+false
+```
+
+#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
+
+Inserts data passed to it into the cache. The returned Promise resolves with a
+digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
+cache entry has been successfully written.
+
+##### Example
+
+```javascript
+fetch(
+ 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).then(data => {
+ return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
+}).then(integrity => {
+ console.log('integrity hash is', integrity)
+})
+```
+
+#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
+
+Returns a [Writable
+Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
+data written to it into the cache. Emits an `integrity` event with the digest of
+written contents when it succeeds.
+
+##### Example
+
+```javascript
+request.get(
+ 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
+).pipe(
+ cacache.put.stream(
+ cachePath, 'registry.npmjs.org|cacache@1.0.0'
+ ).on('integrity', d => console.log(`integrity digest is ${d}`))
+)
+```
+
+#### <a name="put-options"></a> `> cacache.put options`
+
+`cacache.put` functions have a number of options in common.
+
+##### `opts.metadata`
+
+Arbitrary metadata to be attached to the inserted key.
+
+##### `opts.size`
+
+If provided, the data stream will be verified to check that enough data was
+passed through. If there's more or less data than expected, insertion will fail
+with an `EBADSIZE` error.
+
+##### `opts.integrity`
+
+If present, the pre-calculated digest for the inserted content. If this option
+if provided and does not match the post-insertion digest, insertion will fail
+with an `EINTEGRITY` error.
+
+`algorithms` has no effect if this option is present.
+
+##### `opts.algorithms`
+
+Default: ['sha512']
+
+Hashing algorithms to use when calculating the [subresource integrity
+digest](#integrity)
+for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
+`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
+may also use any anagram of `'modnar'` to use this feature.
+
+Currently only supports one algorithm at a time (i.e., an array length of
+exactly `1`). Has no effect if `opts.integrity` is present.
+
+##### `opts.uid`/`opts.gid`
+
+If provided, cacache will do its best to make sure any new files added to the
+cache use this particular `uid`/`gid` combination. This can be used,
+for example, to drop permissions when someone uses `sudo`, but cacache makes
+no assumptions about your needs here.
+
+##### `opts.memoize`
+
+Default: null
+
+If provided, cacache will memoize the given cache insertion in memory, bypassing
+any filesystem checks for that key or digest in future cache fetches. Nothing
+will be written to the in-memory cache unless this option is explicitly truthy.
+
+If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
+and `set` methods), it will be written to instead of the global memoization
+cache.
+
+Reading from disk data can be forced by explicitly passing `memoize: false` to
+the reader functions, but their default will be to read from memory.
+
+#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
+
+Clears the entire cache. Mainly by blowing away the cache directory itself.
+
+##### Example
+
+```javascript
+cacache.rm.all(cachePath).then(() => {
+ console.log('THE APOCALYPSE IS UPON US 😱')
+})
+```
+
+#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise`
+
+Alias: `cacache.rm`
+
+Removes the index entry for `key`. Content will still be accessible if
+requested directly by content address ([`get.stream.byDigest`](#get-stream)).
+
+To remove the content itself (which might still be used by other entries), use
+[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
+[`verify`](#verify).
+
+##### Example
+
+```javascript
+cacache.rm.entry(cachePath, 'my-thing').then(() => {
+ console.log('I did not like it anyway')
+})
+```
+
+#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
+
+Removes the content identified by `integrity`. Any index entries referring to it
+will not be usable again until the content is re-added to the cache with an
+identical digest.
+
+##### Example
+
+```javascript
+cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
+ console.log('data for my-thing is gone!')
+})
+```
+
+#### <a name="set-locale"></a> `> cacache.setLocale(locale)`
+
+Configure the language/locale used for messages and errors coming from cacache.
+The list of available locales is in the `./locales` directory in the project
+root.
+
+_Interested in contributing more languages! [Submit a PR](CONTRIBUTING.md)!_
+
+#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
+
+Completely resets the in-memory entry cache.
+
+#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
+
+Returns a unique temporary directory inside the cache's `tmp` dir. This
+directory will use the same safe user assignment that all the other stuff use.
+
+Once the directory is made, it's the user's responsibility that all files within
+are made according to the same `opts.gid`/`opts.uid` settings that would be
+passed in. If not, you can ask cacache to do it for you by calling
+[`tmp.fix()`](#tmp-fix), which will fix all tmp directory permissions.
+
+If you want automatic cleanup of this directory, use
+[`tmp.withTmp()`](#with-tpm)
+
+##### Example
+
+```javascript
+cacache.tmp.mkdir(cache).then(dir => {
+ fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+})
+```
+
+#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
+
+Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
+with it. The created temporary directory will be removed when the return value
+of `cb()` resolves -- that is, if you return a Promise from `cb()`, the tmp
+directory will be automatically deleted once that promise completes.
+
+The same caveats apply when it comes to managing permissions for the tmp dir's
+contents.
+
+##### Example
+
+```javascript
+cacache.tmp.withTmp(cache, dir => {
+ return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
+}).then(() => {
+ // `dir` no longer exists
+})
+```
+
+#### <a name="integrity"></a> Subresource Integrity Digests
+
+For content verification and addressing, cacache uses strings following the
+[Subresource
+Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
+That is, any time cacache expects an `integrity` argument or option, it
+should be in the format `<hashAlgorithm>-<base64-hash>`.
+
+One deviation from the current spec is that cacache will support any hash
+algorithms supported by the underlying Node.js process. You can use
+`crypto.getHashes()` to see which ones you can use.
+
+##### Generating Digests Yourself
+
+If you have an existing content shasum, they are generally formatted as a
+hexadecimal string (that is, a sha1 would look like:
+`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
+cacache, you'll need to convert this to an equivalent subresource integrity
+string. For this example, the corresponding hash would be:
+`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
+
+If you want to generate an integrity string yourself for existing data, you can
+use something like this:
+
+```javascript
+const crypto = require('crypto')
+const hashAlgorithm = 'sha512'
+const data = 'foobarbaz'
+
+const integrity = (
+ hashAlgorithm +
+ '-' +
+ crypto.createHash(hashAlgorithm).update(data).digest('base64')
+)
+```
+
+You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
+around SRI strings, including generation, parsing, and translating from existing
+hex-formatted strings.
+
+#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
+
+Checks out and fixes up your cache:
+
+* Cleans up corrupted or invalid index entries.
+* Custom entry filtering options.
+* Garbage collects any content entries not referenced by the index.
+* Checks integrity for all content entries and removes invalid content.
+* Fixes cache ownership.
+* Removes the `tmp` directory in the cache and all its contents.
+
+When it's done, it'll return an object with various stats about the verification
+process, including amount of storage reclaimed, number of valid entries, number
+of entries removed, etc.
+
+##### Options
+
+* `opts.uid` - uid to assign to cache and its contents
+* `opts.gid` - gid to assign to cache and its contents
+* `opts.filter` - receives a formatted entry. Return false to remove it.
+ Note: might be called more than once on the same entry.
+
+##### Example
+
+```sh
+echo somegarbage >> $CACHEPATH/content/deadbeef
+```
+
+```javascript
+cacache.verify(cachePath).then(stats => {
+ // deadbeef collected, because of invalid checksum.
+ console.log('cache is much nicer now! stats:', stats)
+})
+```
+
+#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
+
+Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
+
+##### Example
+
+```javascript
+cacache.verify(cachePath).then(() => {
+ cacache.verify.lastRun(cachePath).then(lastTime => {
+ console.log('cacache.verify was last called on' + lastTime)
+ })
+})
+```
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/en.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/en.js
new file mode 100644
index 0000000000..a3db581c9f
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/en.js
@@ -0,0 +1,3 @@
+'use strict'
+
+module.exports = require('./locales/en.js')
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/es.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/es.js
new file mode 100644
index 0000000000..6282363c3b
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/es.js
@@ -0,0 +1,3 @@
+'use strict'
+
+module.exports = require('./locales/es.js')
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/get.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/get.js
new file mode 100644
index 0000000000..2bb3afa528
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/get.js
@@ -0,0 +1,190 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const fs = require('fs')
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const pipe = require('mississippi').pipe
+const pipeline = require('mississippi').pipeline
+const read = require('./lib/content/read')
+const through = require('mississippi').through
+
+module.exports = function get (cache, key, opts) {
+ return getData(false, cache, key, opts)
+}
+module.exports.byDigest = function getByDigest (cache, digest, opts) {
+ return getData(true, cache, digest, opts)
+}
+function getData (byDigest, cache, key, opts) {
+ opts = opts || {}
+ const memoized = (
+ byDigest
+ ? memo.get.byDigest(cache, key, opts)
+ : memo.get(cache, key, opts)
+ )
+ if (memoized && opts.memoize !== false) {
+ return BB.resolve(byDigest ? memoized : {
+ metadata: memoized.entry.metadata,
+ data: memoized.data,
+ integrity: memoized.entry.integrity,
+ size: memoized.entry.size
+ })
+ }
+ return (
+ byDigest ? BB.resolve(null) : index.find(cache, key, opts)
+ ).then(entry => {
+ if (!entry && !byDigest) {
+ throw new index.NotFoundError(cache, key)
+ }
+ return read(cache, byDigest ? key : entry.integrity, {
+ integrity: opts.integrity,
+ size: opts.size
+ }).then(data => byDigest ? data : {
+ metadata: entry.metadata,
+ data: data,
+ size: entry.size,
+ integrity: entry.integrity
+ }).then(res => {
+ if (opts.memoize && byDigest) {
+ memo.put.byDigest(cache, key, res, opts)
+ } else if (opts.memoize) {
+ memo.put(cache, entry, res.data, opts)
+ }
+ return res
+ })
+ })
+}
+
+module.exports.stream = getStream
+function getStream (cache, key, opts) {
+ opts = opts || {}
+ let stream = through()
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && opts.memoize !== false) {
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(memoized.entry.metadata)
+ ev === 'integrity' && cb(memoized.entry.integrity)
+ ev === 'size' && cb(memoized.entry.size)
+ })
+ stream.write(memoized.data, () => stream.end())
+ return stream
+ }
+ index.find(cache, key).then(entry => {
+ if (!entry) {
+ return stream.emit(
+ 'error', new index.NotFoundError(cache, key)
+ )
+ }
+ let memoStream
+ if (opts.memoize) {
+ let memoData = []
+ let memoLength = 0
+ memoStream = through((c, en, cb) => {
+ memoData && memoData.push(c)
+ memoLength += c.length
+ cb(null, c, en)
+ }, cb => {
+ memoData && memo.put(cache, entry, Buffer.concat(memoData, memoLength), opts)
+ cb()
+ })
+ } else {
+ memoStream = through()
+ }
+ opts.size = opts.size == null ? entry.size : opts.size
+ stream.emit('metadata', entry.metadata)
+ stream.emit('integrity', entry.integrity)
+ stream.emit('size', entry.size)
+ stream.on('newListener', function (ev, cb) {
+ ev === 'metadata' && cb(entry.metadata)
+ ev === 'integrity' && cb(entry.integrity)
+ ev === 'size' && cb(entry.size)
+ })
+ pipe(
+ read.readStream(cache, entry.integrity, opts),
+ memoStream,
+ stream
+ )
+ }, err => stream.emit('error', err))
+ return stream
+}
+
+module.exports.stream.byDigest = getStreamDigest
+function getStreamDigest (cache, integrity, opts) {
+ opts = opts || {}
+ const memoized = memo.get.byDigest(cache, integrity, opts)
+ if (memoized && opts.memoize !== false) {
+ const stream = through()
+ stream.write(memoized, () => stream.end())
+ return stream
+ } else {
+ let stream = read.readStream(cache, integrity, opts)
+ if (opts.memoize) {
+ let memoData = []
+ let memoLength = 0
+ const memoStream = through((c, en, cb) => {
+ memoData && memoData.push(c)
+ memoLength += c.length
+ cb(null, c, en)
+ }, cb => {
+ memoData && memo.put.byDigest(
+ cache,
+ integrity,
+ Buffer.concat(memoData, memoLength),
+ opts
+ )
+ cb()
+ })
+ stream = pipeline(stream, memoStream)
+ }
+ return stream
+ }
+}
+
+module.exports.info = info
+function info (cache, key, opts) {
+ opts = opts || {}
+ const memoized = memo.get(cache, key, opts)
+ if (memoized && opts.memoize !== false) {
+ return BB.resolve(memoized.entry)
+ } else {
+ return index.find(cache, key)
+ }
+}
+
+module.exports.hasContent = read.hasContent
+
+module.exports.copy = function cp (cache, key, dest, opts) {
+ return copy(false, cache, key, dest, opts)
+}
+module.exports.copy.byDigest = function cpDigest (cache, digest, dest, opts) {
+ return copy(true, cache, digest, dest, opts)
+}
+function copy (byDigest, cache, key, dest, opts) {
+ opts = opts || {}
+ if (read.copy) {
+ return (
+ byDigest ? BB.resolve(null) : index.find(cache, key, opts)
+ ).then(entry => {
+ if (!entry && !byDigest) {
+ throw new index.NotFoundError(cache, key)
+ }
+ return read.copy(
+ cache, byDigest ? key : entry.integrity, dest, opts
+ ).then(() => byDigest ? key : {
+ metadata: entry.metadata,
+ size: entry.size,
+ integrity: entry.integrity
+ })
+ })
+ } else {
+ return getData(byDigest, cache, key, opts).then(res => {
+ return fs.writeFileAsync(dest, byDigest ? res : res.data)
+ .then(() => byDigest ? key : {
+ metadata: res.metadata,
+ size: res.size,
+ integrity: res.integrity
+ })
+ })
+ }
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/index.js
new file mode 100644
index 0000000000..a3db581c9f
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/index.js
@@ -0,0 +1,3 @@
+'use strict'
+
+module.exports = require('./locales/en.js')
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000..fa6491ba6f
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+function contentPath (cache, integrity) {
+ const sri = ssri.parse(integrity, {single: true})
+ // contentPath is the *strongest* algo given
+ return path.join.apply(path, [
+ contentDir(cache),
+ sri.algorithm
+ ].concat(hashToSegments(sri.hexDigest())))
+}
+
+module.exports._contentDir = contentDir
+function contentDir (cache) {
+ return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000..b09ad5cb40
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,115 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const contentPath = require('./path')
+const fs = require('graceful-fs')
+const PassThrough = require('stream').PassThrough
+const pipe = BB.promisify(require('mississippi').pipe)
+const ssri = require('ssri')
+const Y = require('../util/y.js')
+
+BB.promisifyAll(fs)
+
+module.exports = read
+function read (cache, integrity, opts) {
+ opts = opts || {}
+ return pickContentSri(cache, integrity).then(content => {
+ const sri = content.sri
+ const cpath = contentPath(cache, sri)
+ return fs.readFileAsync(cpath, null).then(data => {
+ if (typeof opts.size === 'number' && opts.size !== data.length) {
+ throw sizeError(opts.size, data.length)
+ } else if (ssri.checkData(data, sri)) {
+ return data
+ } else {
+ throw integrityError(sri, cpath)
+ }
+ })
+ })
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+function readStream (cache, integrity, opts) {
+ opts = opts || {}
+ const stream = new PassThrough()
+ pickContentSri(
+ cache, integrity
+ ).then(content => {
+ const sri = content.sri
+ return pipe(
+ fs.createReadStream(contentPath(cache, sri)),
+ ssri.integrityStream({
+ integrity: sri,
+ size: opts.size
+ }),
+ stream
+ )
+ }).catch(err => {
+ stream.emit('error', err)
+ })
+ return stream
+}
+
+if (fs.copyFile) {
+ module.exports.copy = copy
+}
+function copy (cache, integrity, dest, opts) {
+ opts = opts || {}
+ return pickContentSri(cache, integrity).then(content => {
+ const sri = content.sri
+ const cpath = contentPath(cache, sri)
+ return fs.copyFileAsync(cpath, dest).then(() => content.size)
+ })
+}
+
+module.exports.hasContent = hasContent
+function hasContent (cache, integrity) {
+ if (!integrity) { return BB.resolve(false) }
+ return pickContentSri(cache, integrity)
+ .catch({code: 'ENOENT'}, () => false)
+ .catch({code: 'EPERM'}, err => {
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
+ }
+ }).then(content => {
+ if (!content.sri) return false
+ return ({ sri: content.sri, size: content.stat.size })
+ })
+}
+
+module.exports._pickContentSri = pickContentSri
+function pickContentSri (cache, integrity) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fs.lstatAsync(cpath).then(stat => ({ sri: digests[0], stat }))
+ } else {
+ return BB.any(sri[sri.pickAlgorithm()].map(meta => {
+ return pickContentSri(cache, meta)
+ }))
+ }
+}
+
+function sizeError (expected, found) {
+ var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function integrityError (sri, path) {
+ var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
+ err.code = 'EINTEGRITY'
+ err.sri = sri
+ err.path = path
+ return err
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000..12cf158235
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,21 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const contentPath = require('./path')
+const hasContent = require('./read').hasContent
+const rimraf = BB.promisify(require('rimraf'))
+
+module.exports = rm
+function rm (cache, integrity) {
+ return hasContent(cache, integrity).then(content => {
+ if (content) {
+ const sri = content.sri
+ if (sri) {
+ return rimraf(contentPath(cache, sri)).then(() => true)
+ }
+ } else {
+ return false
+ }
+ })
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000..a79ae92902
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,162 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const contentPath = require('./path')
+const fixOwner = require('../util/fix-owner')
+const fs = require('graceful-fs')
+const moveFile = require('../util/move-file')
+const PassThrough = require('stream').PassThrough
+const path = require('path')
+const pipe = BB.promisify(require('mississippi').pipe)
+const rimraf = BB.promisify(require('rimraf'))
+const ssri = require('ssri')
+const to = require('mississippi').to
+const uniqueFilename = require('unique-filename')
+const Y = require('../util/y.js')
+
+const writeFileAsync = BB.promisify(fs.writeFile)
+
+module.exports = write
+function write (cache, data, opts) {
+ opts = opts || {}
+ if (opts.algorithms && opts.algorithms.length > 1) {
+ throw new Error(
+ Y`opts.algorithms only supports a single algorithm for now`
+ )
+ }
+ if (typeof opts.size === 'number' && data.length !== opts.size) {
+ return BB.reject(sizeError(opts.size, data.length))
+ }
+ const sri = ssri.fromData(data, opts)
+ if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
+ return BB.reject(checksumError(opts.integrity, sri))
+ }
+ return BB.using(makeTmp(cache, opts), tmp => (
+ writeFileAsync(
+ tmp.target, data, {flag: 'wx'}
+ ).then(() => (
+ moveToDestination(tmp, cache, sri, opts)
+ ))
+ )).then(() => ({integrity: sri, size: data.length}))
+}
+
+module.exports.stream = writeStream
+function writeStream (cache, opts) {
+ opts = opts || {}
+ const inputStream = new PassThrough()
+ let inputErr = false
+ function errCheck () {
+ if (inputErr) { throw inputErr }
+ }
+
+ let allDone
+ const ret = to((c, n, cb) => {
+ if (!allDone) {
+ allDone = handleContent(inputStream, cache, opts, errCheck)
+ }
+ inputStream.write(c, n, cb)
+ }, cb => {
+ inputStream.end(() => {
+ if (!allDone) {
+ const e = new Error(Y`Cache input stream was empty`)
+ e.code = 'ENODATA'
+ return ret.emit('error', e)
+ }
+ allDone.then(res => {
+ res.integrity && ret.emit('integrity', res.integrity)
+ res.size !== null && ret.emit('size', res.size)
+ cb()
+ }, e => {
+ ret.emit('error', e)
+ })
+ })
+ })
+ ret.once('error', e => {
+ inputErr = e
+ })
+ return ret
+}
+
+function handleContent (inputStream, cache, opts, errCheck) {
+ return BB.using(makeTmp(cache, opts), tmp => {
+ errCheck()
+ return pipeToTmp(
+ inputStream, cache, tmp.target, opts, errCheck
+ ).then(res => {
+ return moveToDestination(
+ tmp, cache, res.integrity, opts, errCheck
+ ).then(() => res)
+ })
+ })
+}
+
+function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
+ return BB.resolve().then(() => {
+ let integrity
+ let size
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
+ size: opts.size
+ }).on('integrity', s => {
+ integrity = s
+ }).on('size', s => {
+ size = s
+ })
+ const outStream = fs.createWriteStream(tmpTarget, {
+ flags: 'wx'
+ })
+ errCheck()
+ return pipe(inputStream, hashStream, outStream).then(() => {
+ return {integrity, size}
+ }, err => {
+ return rimraf(tmpTarget).then(() => { throw err })
+ })
+ })
+}
+
+function makeTmp (cache, opts) {
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ return fixOwner.mkdirfix(
+ path.dirname(tmpTarget), opts.uid, opts.gid
+ ).then(() => ({
+ target: tmpTarget,
+ moved: false
+ })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
+}
+
+function moveToDestination (tmp, cache, sri, opts, errCheck) {
+ errCheck && errCheck()
+ const destination = contentPath(cache, sri)
+ const destDir = path.dirname(destination)
+
+ return fixOwner.mkdirfix(
+ destDir, opts.uid, opts.gid
+ ).then(() => {
+ errCheck && errCheck()
+ return moveFile(tmp.target, destination)
+ }).then(() => {
+ errCheck && errCheck()
+ tmp.moved = true
+ return fixOwner.chownr(destination, opts.uid, opts.gid)
+ })
+}
+
+function sizeError (expected, found) {
+ var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
+
+function checksumError (expected, found) {
+ var err = new Error(Y`Integrity check failed:
+ Wanted: ${expected}
+ Found: ${found}`)
+ err.code = 'EINTEGRITY'
+ err.expected = expected
+ err.found = found
+ return err
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000..face0fe79c
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,224 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const contentPath = require('./content/path')
+const crypto = require('crypto')
+const fixOwner = require('./util/fix-owner')
+const fs = require('graceful-fs')
+const hashToSegments = require('./util/hash-to-segments')
+const ms = require('mississippi')
+const path = require('path')
+const ssri = require('ssri')
+const Y = require('./util/y.js')
+
+const indexV = require('../package.json')['cache-version'].index
+
+const appendFileAsync = BB.promisify(fs.appendFile)
+const readFileAsync = BB.promisify(fs.readFile)
+const readdirAsync = BB.promisify(fs.readdir)
+const concat = ms.concat
+const from = ms.from
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+ constructor (cache, key) {
+ super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
+ this.code = 'ENOENT'
+ this.cache = cache
+ this.key = key
+ }
+}
+
+module.exports.insert = insert
+function insert (cache, key, integrity, opts) {
+ opts = opts || {}
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
+ size: opts.size,
+ metadata: opts.metadata
+ }
+ return fixOwner.mkdirfix(
+ path.dirname(bucket), opts.uid, opts.gid
+ ).then(() => {
+ const stringified = JSON.stringify(entry)
+ // NOTE - Cleverness ahoy!
+ //
+ // This works because it's tremendously unlikely for an entry to corrupt
+ // another while still preserving the string length of the JSON in
+ // question. So, we just slap the length in there and verify it on read.
+ //
+ // Thanks to @isaacs for the whiteboarding session that ended up with this.
+ return appendFileAsync(
+ bucket, `\n${hashEntry(stringified)}\t${stringified}`
+ )
+ }).then(
+ () => fixOwner.chownr(bucket, opts.uid, opts.gid)
+ ).catch({code: 'ENOENT'}, () => {
+ // There's a class of race conditions that happen when things get deleted
+ // during fixOwner, or between the two mkdirfix/chownr calls.
+ //
+ // It's perfectly fine to just not bother in those cases and lie
+ // that the index entry was written. Because it's a cache.
+ }).then(() => {
+ return formatEntry(cache, entry)
+ })
+}
+
+module.exports.find = find
+function find (cache, key) {
+ const bucket = bucketPath(cache, key)
+ return bucketEntries(cache, bucket).then(entries => {
+ return entries.reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ }).catch(err => {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
+ }
+ })
+}
+
+module.exports.delete = del
+function del (cache, key, opts) {
+ return insert(cache, key, null, opts)
+}
+
+module.exports.lsStream = lsStream
+function lsStream (cache) {
+ const indexDir = bucketDir(cache)
+ const stream = from.obj()
+
+ // "/cachename/*"
+ readdirOrEmpty(indexDir).map(bucket => {
+ const bucketPath = path.join(indexDir, bucket)
+
+ // "/cachename/<bucket 0xFF>/*"
+ return readdirOrEmpty(bucketPath).map(subbucket => {
+ const subbucketPath = path.join(bucketPath, subbucket)
+
+ // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
+ return readdirOrEmpty(subbucketPath).map(entry => {
+ const getKeyToEntry = bucketEntries(
+ cache,
+ path.join(subbucketPath, entry)
+ ).reduce((acc, entry) => {
+ acc.set(entry.key, entry)
+ return acc
+ }, new Map())
+
+ return getKeyToEntry.then(reduced => {
+ return Array.from(reduced.values()).map(
+ entry => stream.push(formatEntry(cache, entry))
+ )
+ }).catch({code: 'ENOENT'}, nop)
+ })
+ })
+ }).then(() => {
+ stream.push(null)
+ }, err => {
+ stream.emit('error', err)
+ })
+
+ return stream
+}
+
+module.exports.ls = ls
+function ls (cache) {
+ return BB.fromNode(cb => {
+ lsStream(cache).on('error', cb).pipe(concat(entries => {
+ cb(null, entries.reduce((acc, xs) => {
+ acc[xs.key] = xs
+ return acc
+ }, {}))
+ }))
+ })
+}
+
+function bucketEntries (cache, bucket, filter) {
+ return readFileAsync(
+ bucket, 'utf8'
+ ).then(data => {
+ let entries = []
+ data.split('\n').forEach(entry => {
+ if (!entry) { return }
+ const pieces = entry.split('\t')
+ if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+ // Hash is no good! Corruption or malice? Doesn't matter!
+ // EJECT EJECT
+ return
+ }
+ let obj
+ try {
+ obj = JSON.parse(pieces[1])
+ } catch (e) {
+ // Entry is corrupted!
+ return
+ }
+ if (obj) {
+ entries.push(obj)
+ }
+ })
+ return entries
+ })
+}
+
+module.exports._bucketDir = bucketDir
+function bucketDir (cache) {
+ return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports._bucketPath = bucketPath
+function bucketPath (cache, key) {
+ const hashed = hashKey(key)
+ return path.join.apply(path, [bucketDir(cache)].concat(
+ hashToSegments(hashed)
+ ))
+}
+
+module.exports._hashKey = hashKey
+function hashKey (key) {
+ return hash(key, 'sha256')
+}
+
+module.exports._hashEntry = hashEntry
+function hashEntry (str) {
+ return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+ return crypto
+ .createHash(digest)
+ .update(str)
+ .digest('hex')
+}
+
+function formatEntry (cache, entry) {
+ // Treat null digests as deletions. They'll shadow any previous entries.
+ if (!entry.integrity) { return null }
+ return {
+ key: entry.key,
+ integrity: entry.integrity,
+ path: contentPath(cache, entry.integrity),
+ size: entry.size,
+ time: entry.time,
+ metadata: entry.metadata
+ }
+}
+
+function readdirOrEmpty (dir) {
+ return readdirAsync(dir)
+ .catch({code: 'ENOENT'}, () => [])
+ .catch({code: 'ENOTDIR'}, () => [])
+}
+
+function nop () {
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000..92179c7ac6
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,69 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MAX_SIZE = 50 * 1024 * 1024 // 50MB
+const MAX_AGE = 3 * 60 * 1000
+
+let MEMOIZED = new LRU({
+ max: MAX_SIZE,
+ maxAge: MAX_AGE,
+ length: (entry, key) => {
+ if (key.startsWith('key:')) {
+ return entry.data.length
+ } else if (key.startsWith('digest:')) {
+ return entry.length
+ }
+ }
+})
+
+module.exports.clearMemoized = clearMemoized
+function clearMemoized () {
+ const old = {}
+ MEMOIZED.forEach((v, k) => {
+ old[k] = v
+ })
+ MEMOIZED.reset()
+ return old
+}
+
+module.exports.put = put
+function put (cache, entry, data, opts) {
+ pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+ putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+function putDigest (cache, integrity, data, opts) {
+ pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+function get (cache, key, opts) {
+ return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+function getDigest (cache, integrity, opts) {
+ return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+ constructor (obj) {
+ this.obj = obj
+ }
+ get (key) { return this.obj[key] }
+ set (key, val) { this.obj[key] = val }
+}
+
+function pickMem (opts) {
+ if (!opts || !opts.memoize) {
+ return MEMOIZED
+ } else if (opts.memoize.get && opts.memoize.set) {
+ return opts.memoize
+ } else if (typeof opts.memoize === 'object') {
+ return new ObjProxy(opts.memoize)
+ } else {
+ return MEMOIZED
+ }
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
new file mode 100644
index 0000000000..7000bff048
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/fix-owner.js
@@ -0,0 +1,44 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const chownr = BB.promisify(require('chownr'))
+const mkdirp = BB.promisify(require('mkdirp'))
+const inflight = require('promise-inflight')
+
+module.exports.chownr = fixOwner
+function fixOwner (filepath, uid, gid) {
+ if (!process.getuid) {
+ // This platform doesn't need ownership fixing
+ return BB.resolve()
+ }
+ if (typeof uid !== 'number' && typeof gid !== 'number') {
+ // There's no permissions override. Nothing to do here.
+ return BB.resolve()
+ }
+ if ((typeof uid === 'number' && process.getuid() === uid) &&
+ (typeof gid === 'number' && process.getgid() === gid)) {
+ // No need to override if it's already what we used.
+ return BB.resolve()
+ }
+ return inflight(
+ 'fixOwner: fixing ownership on ' + filepath,
+ () => chownr(
+ filepath,
+ typeof uid === 'number' ? uid : process.getuid(),
+ typeof gid === 'number' ? gid : process.getgid()
+ ).catch({code: 'ENOENT'}, () => null)
+ )
+}
+
+module.exports.mkdirfix = mkdirfix
+function mkdirfix (p, uid, gid, cb) {
+ return mkdirp(p).then(made => {
+ if (made) {
+ return fixOwner(made, uid, gid).then(() => made)
+ }
+ }).catch({code: 'EEXIST'}, () => {
+ // There's a race in mkdirp!
+ return fixOwner(p, uid, gid).then(() => null)
+ })
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000..192be2a6d6
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,11 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+ return [
+ hash.slice(0, 2),
+ hash.slice(2, 4),
+ hash.slice(4)
+ ]
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
new file mode 100644
index 0000000000..e12e98188c
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/move-file.js
@@ -0,0 +1,55 @@
+'use strict'
+
+const fs = require('graceful-fs')
+const BB = require('bluebird')
+const chmod = BB.promisify(fs.chmod)
+const unlink = BB.promisify(fs.unlink)
+let move
+let pinflight
+
+module.exports = moveFile
+function moveFile (src, dest) {
+ // This isn't quite an fs.rename -- the assumption is that
+ // if `dest` already exists, and we get certain errors while
+ // trying to move it, we should just not bother.
+ //
+ // In the case of cache corruption, users will receive an
+ // EINTEGRITY error elsewhere, and can remove the offending
+ // content their own way.
+ //
+ // Note that, as the name suggests, this strictly only supports file moves.
+ return BB.fromNode(cb => {
+ fs.link(src, dest, err => {
+ if (err) {
+ if (err.code === 'EEXIST' || err.code === 'EBUSY') {
+ // file already exists, so whatever
+ } else if (err.code === 'EPERM' && process.platform === 'win32') {
+ // file handle stayed open even past graceful-fs limits
+ } else {
+ return cb(err)
+ }
+ }
+ return cb()
+ })
+ }).then(() => {
+ // content should never change for any reason, so make it read-only
+ return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
+ }).catch(err => {
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ if (!pinflight) { pinflight = require('promise-inflight') }
+ return pinflight('cacache-move-file:' + dest, () => {
+ return BB.promisify(fs.stat)(dest).catch(err => {
+ if (err !== 'ENOENT') {
+ // Something else is wrong here. Bail bail bail
+ throw err
+ }
+ // file doesn't already exist! let's try a rename -> copy fallback
+ if (!move) { move = require('move-concurrently') }
+ return move(src, dest, { BB, fs })
+ })
+ })
+ }
+ })
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000..4fc4512cc8
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const fixOwner = require('./fix-owner')
+const path = require('path')
+const rimraf = BB.promisify(require('rimraf'))
+const uniqueFilename = require('unique-filename')
+
+module.exports.mkdir = mktmpdir
+function mktmpdir (cache, opts) {
+ opts = opts || {}
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ return fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid).then(() => {
+ return tmpTarget
+ })
+}
+
+module.exports.withTmp = withTmp
+function withTmp (cache, opts, cb) {
+ if (!cb) {
+ cb = opts
+ opts = null
+ }
+ opts = opts || {}
+ return BB.using(mktmpdir(cache, opts).disposer(rimraf), cb)
+}
+
+module.exports.fix = fixtmpdir
+function fixtmpdir (cache, opts) {
+ return fixOwner(path.join(cache, 'tmp'), opts.uid, opts.gid)
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js
new file mode 100644
index 0000000000..d62bedacb3
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/util/y.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const path = require('path')
+const y18n = require('y18n')({
+ directory: path.join(__dirname, '../../locales'),
+ locale: 'en',
+ updateFiles: process.env.CACACHE_UPDATE_LOCALE_FILES === 'true'
+})
+
+module.exports = yTag
+function yTag (parts) {
+ let str = ''
+ parts.forEach((part, i) => {
+ const arg = arguments[i + 1]
+ str += part
+ if (arg) {
+ str += '%s'
+ }
+ })
+ return y18n.__.apply(null, [str].concat([].slice.call(arguments, 1)))
+}
+
+module.exports.setLocale = locale => {
+ y18n.setLocale(locale)
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000..6a01004c97
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
@@ -0,0 +1,213 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const contentPath = require('./content/path')
+const finished = BB.promisify(require('mississippi').finished)
+const fixOwner = require('./util/fix-owner')
+const fs = require('graceful-fs')
+const glob = BB.promisify(require('glob'))
+const index = require('./entry-index')
+const path = require('path')
+const rimraf = BB.promisify(require('rimraf'))
+const ssri = require('ssri')
+
+BB.promisifyAll(fs)
+
+module.exports = verify
+function verify (cache, opts) {
+ opts = opts || {}
+ opts.log && opts.log.silly('verify', 'verifying cache at', cache)
+ return BB.reduce([
+ markStartTime,
+ fixPerms,
+ garbageCollect,
+ rebuildIndex,
+ cleanTmp,
+ writeVerifile,
+ markEndTime
+ ], (stats, step, i) => {
+ const label = step.name || `step #${i}`
+ const start = new Date()
+ return BB.resolve(step(cache, opts)).then(s => {
+ s && Object.keys(s).forEach(k => {
+ stats[k] = s[k]
+ })
+ const end = new Date()
+ if (!stats.runTime) { stats.runTime = {} }
+ stats.runTime[label] = end - start
+ return stats
+ })
+ }, {}).tap(stats => {
+ stats.runTime.total = stats.endTime - stats.startTime
+ opts.log && opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
+ })
+}
+
+function markStartTime (cache, opts) {
+ return { startTime: new Date() }
+}
+
+function markEndTime (cache, opts) {
+ return { endTime: new Date() }
+}
+
+function fixPerms (cache, opts) {
+ opts.log && opts.log.silly('verify', 'fixing cache permissions')
+ return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => {
+ // TODO - fix file permissions too
+ return fixOwner.chownr(cache, opts.uid, opts.gid)
+ }).then(() => null)
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rimraf it.
+//
+function garbageCollect (cache, opts) {
+ opts.log && opts.log.silly('verify', 'garbage collecting content')
+ const indexStream = index.lsStream(cache)
+ const liveContent = new Set()
+ indexStream.on('data', entry => {
+ if (opts && opts.filter && !opts.filter(entry)) { return }
+ liveContent.add(entry.integrity.toString())
+ })
+ return finished(indexStream).then(() => {
+ const contentDir = contentPath._contentDir(cache)
+ return glob(path.join(contentDir, '**'), {
+ follow: false,
+ nodir: true,
+ nosort: true
+ }).then(files => {
+ return BB.resolve({
+ verifiedContent: 0,
+ reclaimedCount: 0,
+ reclaimedSize: 0,
+ badContentCount: 0,
+ keptSize: 0
+ }).tap((stats) => BB.map(files, (f) => {
+ const split = f.split(/[/\\]/)
+ const digest = split.slice(split.length - 3).join('')
+ const algo = split[split.length - 4]
+ const integrity = ssri.fromHex(digest, algo)
+ if (liveContent.has(integrity.toString())) {
+ return verifyContent(f, integrity).then(info => {
+ if (!info.valid) {
+ stats.reclaimedCount++
+ stats.badContentCount++
+ stats.reclaimedSize += info.size
+ } else {
+ stats.verifiedContent++
+ stats.keptSize += info.size
+ }
+ return stats
+ })
+ } else {
+ // No entries refer to this content. We can delete.
+ stats.reclaimedCount++
+ return fs.statAsync(f).then(s => {
+ return rimraf(f).then(() => {
+ stats.reclaimedSize += s.size
+ return stats
+ })
+ })
+ }
+ }, {concurrency: opts.concurrency || 20}))
+ })
+ })
+}
+
+function verifyContent (filepath, sri) {
+ return fs.statAsync(filepath).then(stat => {
+ const contentInfo = {
+ size: stat.size,
+ valid: true
+ }
+ return ssri.checkStream(
+ fs.createReadStream(filepath),
+ sri
+ ).catch(err => {
+ if (err.code !== 'EINTEGRITY') { throw err }
+ return rimraf(filepath).then(() => {
+ contentInfo.valid = false
+ })
+ }).then(() => contentInfo)
+ }).catch({code: 'ENOENT'}, () => ({size: 0, valid: false}))
+}
+
+function rebuildIndex (cache, opts) {
+ opts.log && opts.log.silly('verify', 'rebuilding index')
+ return index.ls(cache).then(entries => {
+ const stats = {
+ missingContent: 0,
+ rejectedEntries: 0,
+ totalEntries: 0
+ }
+ const buckets = {}
+ for (let k in entries) {
+ if (entries.hasOwnProperty(k)) {
+ const hashed = index._hashKey(k)
+ const entry = entries[k]
+ const excluded = opts && opts.filter && !opts.filter(entry)
+ excluded && stats.rejectedEntries++
+ if (buckets[hashed] && !excluded) {
+ buckets[hashed].push(entry)
+ } else if (buckets[hashed] && excluded) {
+ // skip
+ } else if (excluded) {
+ buckets[hashed] = []
+ buckets[hashed]._path = index._bucketPath(cache, k)
+ } else {
+ buckets[hashed] = [entry]
+ buckets[hashed]._path = index._bucketPath(cache, k)
+ }
+ }
+ }
+ return BB.map(Object.keys(buckets), key => {
+ return rebuildBucket(cache, buckets[key], stats, opts)
+ }, {concurrency: opts.concurrency || 20}).then(() => stats)
+ })
+}
+
+function rebuildBucket (cache, bucket, stats, opts) {
+ return fs.truncateAsync(bucket._path).then(() => {
+ // This needs to be serialized because cacache explicitly
+ // lets very racy bucket conflicts clobber each other.
+ return BB.mapSeries(bucket, entry => {
+ const content = contentPath(cache, entry.integrity)
+ return fs.statAsync(content).then(() => {
+ return index.insert(cache, entry.key, entry.integrity, {
+ uid: opts.uid,
+ gid: opts.gid,
+ metadata: entry.metadata
+ }).then(() => { stats.totalEntries++ })
+ }).catch({code: 'ENOENT'}, () => {
+ stats.rejectedEntries++
+ stats.missingContent++
+ })
+ })
+ })
+}
+
+function cleanTmp (cache, opts) {
+ opts.log && opts.log.silly('verify', 'cleaning tmp directory')
+ return rimraf(path.join(cache, 'tmp'))
+}
+
+function writeVerifile (cache, opts) {
+ const verifile = path.join(cache, '_lastverified')
+ opts.log && opts.log.silly('verify', 'writing verifile to ' + verifile)
+ return fs.writeFileAsync(verifile, '' + (+(new Date())))
+}
+
+module.exports.lastRun = lastRun
+function lastRun (cache) {
+ return fs.readFileAsync(
+ path.join(cache, '_lastverified'), 'utf8'
+ ).then(data => new Date(+data))
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js
new file mode 100644
index 0000000000..22025cf0e8
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.js
@@ -0,0 +1,44 @@
+'use strict'
+
+const ls = require('../ls.js')
+const get = require('../get.js')
+const put = require('../put.js')
+const rm = require('../rm.js')
+const verify = require('../verify.js')
+const setLocale = require('../lib/util/y.js').setLocale
+const clearMemoized = require('../lib/memoization.js').clearMemoized
+const tmp = require('../lib/util/tmp.js')
+
+setLocale('en')
+
+const x = module.exports
+
+x.ls = cache => ls(cache)
+x.ls.stream = cache => ls.stream(cache)
+
+x.get = (cache, key, opts) => get(cache, key, opts)
+x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
+x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
+x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
+x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
+x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
+x.get.info = (cache, key) => get.info(cache, key)
+x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
+
+x.put = (cache, key, data, opts) => put(cache, key, data, opts)
+x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
+
+x.rm = (cache, key) => rm.entry(cache, key)
+x.rm.all = cache => rm.all(cache)
+x.rm.entry = x.rm
+x.rm.content = (cache, hash) => rm.content(cache, hash)
+
+x.setLocale = lang => setLocale(lang)
+x.clearMemoized = () => clearMemoized()
+
+x.tmp = {}
+x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
+x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
+
+x.verify = (cache, opts) => verify(cache, opts)
+x.verify.lastRun = cache => verify.lastRun(cache)
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json
new file mode 100644
index 0000000000..82ecb08324
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/en.json
@@ -0,0 +1,6 @@
+{
+ "No cache entry for `%s` found in `%s`": "No cache entry for %s found in %s",
+ "Integrity verification failed for %s (%s)": "Integrity verification failed for %s (%s)",
+ "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Bad data size: expected inserted data to be %s bytes, but got %s instead",
+ "Cache input stream was empty": "Cache input stream was empty"
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js
new file mode 100644
index 0000000000..9a27de6585
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.js
@@ -0,0 +1,46 @@
+'use strict'
+
+const ls = require('../ls.js')
+const get = require('../get.js')
+const put = require('../put.js')
+const rm = require('../rm.js')
+const verify = require('../verify.js')
+const setLocale = require('../lib/util/y.js').setLocale
+const clearMemoized = require('../lib/memoization.js').clearMemoized
+const tmp = require('../lib/util/tmp.js')
+
+setLocale('es')
+
+const x = module.exports
+
+x.ls = cache => ls(cache)
+x.ls.flujo = cache => ls.stream(cache)
+
+x.saca = (cache, clave, ops) => get(cache, clave, ops)
+x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops)
+x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops)
+x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops)
+x.sava.copia = (cache, clave, destino, opts) => get.copy(cache, clave, destino, opts)
+x.sava.copia.porHacheo = (cache, hacheo, destino, opts) => get.copy.byDigest(cache, hacheo, destino, opts)
+x.saca.info = (cache, clave) => get.info(cache, clave)
+x.saca.tieneDatos = (cache, hacheo) => get.hasContent(cache, hacheo)
+
+x.mete = (cache, clave, datos, ops) => put(cache, clave, datos, ops)
+x.mete.flujo = (cache, clave, ops) => put.stream(cache, clave, ops)
+
+x.rm = (cache, clave) => rm.entry(cache, clave)
+x.rm.todo = cache => rm.all(cache)
+x.rm.entrada = x.rm
+x.rm.datos = (cache, hacheo) => rm.content(cache, hacheo)
+
+x.ponLenguaje = lang => setLocale(lang)
+x.limpiaMemoizado = () => clearMemoized()
+
+x.tmp = {}
+x.tmp.mkdir = (cache, ops) => tmp.mkdir(cache, ops)
+x.tmp.hazdir = x.tmp.mkdir
+x.tmp.conTmp = (cache, ops, cb) => tmp.withTmp(cache, ops, cb)
+
+x.verifica = (cache, ops) => verify(cache, ops)
+x.verifica.ultimaVez = cache => verify.lastRun(cache)
+x.verifica.últimaVez = x.verifica.ultimaVez
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json
new file mode 100644
index 0000000000..a91d76225b
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/locales/es.json
@@ -0,0 +1,6 @@
+{
+ "No cache entry for `%s` found in `%s`": "No existe ninguna entrada para «%s» en «%s»",
+ "Integrity verification failed for %s (%s)": "Verificación de integridad falló para «%s» (%s)",
+ "Bad data size: expected inserted data to be %s bytes, but got %s instead": "Tamaño incorrecto de datos: los datos insertados debieron haber sido %s octetos, pero fueron %s",
+ "Cache input stream was empty": "El stream de entrada al caché estaba vacío"
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/ls.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/ls.js
new file mode 100644
index 0000000000..9f49b388ac
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/ls.js
@@ -0,0 +1,6 @@
+'use strict'
+
+var index = require('./lib/entry-index')
+
+module.exports = index.ls
+module.exports.stream = index.lsStream
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/LICENSE b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/LICENSE
new file mode 100644
index 0000000000..3c157f0b9d
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
+THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/README.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/README.md
new file mode 100644
index 0000000000..9859458f20
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/README.md
@@ -0,0 +1,91 @@
+# y18n
+
+[![Build Status][travis-image]][travis-url]
+[![Coverage Status][coveralls-image]][coveralls-url]
+[![NPM version][npm-image]][npm-url]
+[![js-standard-style][standard-image]][standard-url]
+
+The bare-bones internationalization library used by yargs.
+
+Inspired by [i18n](https://www.npmjs.com/package/i18n).
+
+## Examples
+
+_simple string translation:_
+
+```js
+var __ = require('y18n').__
+
+console.log(__('my awesome string %s', 'foo'))
+```
+
+output:
+
+`my awesome string foo`
+
+_pluralization support:_
+
+```js
+var __n = require('y18n').__n
+
+console.log(__n('one fish %s', '%d fishes %s', 2, 'foo'))
+```
+
+output:
+
+`2 fishes foo`
+
+## JSON Language Files
+
+The JSON language files should be stored in a `./locales` folder.
+File names correspond to locales, e.g., `en.json`, `pirate.json`.
+
+When strings are observed for the first time they will be
+added to the JSON file corresponding to the current locale.
+
+## Methods
+
+### require('y18n')(config)
+
+Create an instance of y18n with the config provided, options include:
+
+* `directory`: the locale directory, default `./locales`.
+* `updateFiles`: should newly observed strings be updated in file, default `true`.
+* `locale`: what locale should be used.
+* `fallbackToLanguage`: should fallback to a language-only file (e.g. `en.json`)
+ be allowed if a file matching the locale does not exist (e.g. `en_US.json`),
+ default `true`.
+
+### y18n.\_\_(str, arg, arg, arg)
+
+Print a localized string, `%s` will be replaced with `arg`s.
+
+### y18n.\_\_n(singularString, pluralString, count, arg, arg, arg)
+
+Print a localized string with appropriate pluralization. If `%d` is provided
+in the string, the `count` will replace this placeholder.
+
+### y18n.setLocale(str)
+
+Set the current locale being used.
+
+### y18n.getLocale()
+
+What locale is currently being used?
+
+### y18n.updateLocale(obj)
+
+Update the current locale with the key value pairs in `obj`.
+
+## License
+
+ISC
+
+[travis-url]: https://travis-ci.org/yargs/y18n
+[travis-image]: https://img.shields.io/travis/yargs/y18n.svg
+[coveralls-url]: https://coveralls.io/github/yargs/y18n
+[coveralls-image]: https://img.shields.io/coveralls/yargs/y18n.svg
+[npm-url]: https://npmjs.org/package/y18n
+[npm-image]: https://img.shields.io/npm/v/y18n.svg
+[standard-image]: https://img.shields.io/badge/code%20style-standard-brightgreen.svg
+[standard-url]: https://github.com/feross/standard
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/index.js
new file mode 100644
index 0000000000..91b159e342
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/index.js
@@ -0,0 +1,172 @@
+var fs = require('fs')
+var path = require('path')
+var util = require('util')
+
+function Y18N (opts) {
+ // configurable options.
+ opts = opts || {}
+ this.directory = opts.directory || './locales'
+ this.updateFiles = typeof opts.updateFiles === 'boolean' ? opts.updateFiles : true
+ this.locale = opts.locale || 'en'
+ this.fallbackToLanguage = typeof opts.fallbackToLanguage === 'boolean' ? opts.fallbackToLanguage : true
+
+ // internal stuff.
+ this.cache = {}
+ this.writeQueue = []
+}
+
+Y18N.prototype.__ = function () {
+ var args = Array.prototype.slice.call(arguments)
+ var str = args.shift()
+ var cb = function () {} // start with noop.
+
+ if (typeof args[args.length - 1] === 'function') cb = args.pop()
+ cb = cb || function () {} // noop.
+
+ if (!this.cache[this.locale]) this._readLocaleFile()
+
+ // we've observed a new string, update the language file.
+ if (!this.cache[this.locale][str] && this.updateFiles) {
+ this.cache[this.locale][str] = str
+
+ // include the current directory and locale,
+ // since these values could change before the
+ // write is performed.
+ this._enqueueWrite([this.directory, this.locale, cb])
+ } else {
+ cb()
+ }
+
+ return util.format.apply(util, [this.cache[this.locale][str] || str].concat(args))
+}
+
+Y18N.prototype._enqueueWrite = function (work) {
+ this.writeQueue.push(work)
+ if (this.writeQueue.length === 1) this._processWriteQueue()
+}
+
+Y18N.prototype._processWriteQueue = function () {
+ var _this = this
+ var work = this.writeQueue[0]
+
+ // destructure the enqueued work.
+ var directory = work[0]
+ var locale = work[1]
+ var cb = work[2]
+
+ var languageFile = this._resolveLocaleFile(directory, locale)
+ var serializedLocale = JSON.stringify(this.cache[locale], null, 2)
+
+ fs.writeFile(languageFile, serializedLocale, 'utf-8', function (err) {
+ _this.writeQueue.shift()
+ if (_this.writeQueue.length > 0) _this._processWriteQueue()
+ cb(err)
+ })
+}
+
+Y18N.prototype._readLocaleFile = function () {
+ var localeLookup = {}
+ var languageFile = this._resolveLocaleFile(this.directory, this.locale)
+
+ try {
+ localeLookup = JSON.parse(fs.readFileSync(languageFile, 'utf-8'))
+ } catch (err) {
+ if (err instanceof SyntaxError) {
+ err.message = 'syntax error in ' + languageFile
+ }
+
+ if (err.code === 'ENOENT') localeLookup = {}
+ else throw err
+ }
+
+ this.cache[this.locale] = localeLookup
+}
+
+Y18N.prototype._resolveLocaleFile = function (directory, locale) {
+ var file = path.resolve(directory, './', locale + '.json')
+ if (this.fallbackToLanguage && !this._fileExistsSync(file) && ~locale.lastIndexOf('_')) {
+ // attempt fallback to language only
+ var languageFile = path.resolve(directory, './', locale.split('_')[0] + '.json')
+ if (this._fileExistsSync(languageFile)) file = languageFile
+ }
+ return file
+}
+
+// this only exists because fs.existsSync() "will be deprecated"
+// see https://nodejs.org/api/fs.html#fs_fs_existssync_path
+Y18N.prototype._fileExistsSync = function (file) {
+ try {
+ return fs.statSync(file).isFile()
+ } catch (err) {
+ return false
+ }
+}
+
+Y18N.prototype.__n = function () {
+ var args = Array.prototype.slice.call(arguments)
+ var singular = args.shift()
+ var plural = args.shift()
+ var quantity = args.shift()
+
+ var cb = function () {} // start with noop.
+ if (typeof args[args.length - 1] === 'function') cb = args.pop()
+
+ if (!this.cache[this.locale]) this._readLocaleFile()
+
+ var str = quantity === 1 ? singular : plural
+ if (this.cache[this.locale][singular]) {
+ str = this.cache[this.locale][singular][quantity === 1 ? 'one' : 'other']
+ }
+
+ // we've observed a new string, update the language file.
+ if (!this.cache[this.locale][singular] && this.updateFiles) {
+ this.cache[this.locale][singular] = {
+ one: singular,
+ other: plural
+ }
+
+ // include the current directory and locale,
+ // since these values could change before the
+ // write is performed.
+ this._enqueueWrite([this.directory, this.locale, cb])
+ } else {
+ cb()
+ }
+
+ // if a %d placeholder is provided, add quantity
+ // to the arguments expanded by util.format.
+ var values = [str]
+ if (~str.indexOf('%d')) values.push(quantity)
+
+ return util.format.apply(util, values.concat(args))
+}
+
+Y18N.prototype.setLocale = function (locale) {
+ this.locale = locale
+}
+
+Y18N.prototype.getLocale = function () {
+ return this.locale
+}
+
+Y18N.prototype.updateLocale = function (obj) {
+ if (!this.cache[this.locale]) this._readLocaleFile()
+
+ for (var key in obj) {
+ this.cache[this.locale][key] = obj[key]
+ }
+}
+
+module.exports = function (opts) {
+ var y18n = new Y18N(opts)
+
+ // bind all functions to y18n, so that
+ // they can be used in isolation.
+ for (var key in y18n) {
+ if (typeof y18n[key] === 'function') {
+ y18n[key] = y18n[key].bind(y18n)
+ }
+ }
+
+ return y18n
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/package.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/package.json
new file mode 100644
index 0000000000..8e45957e98
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/node_modules/y18n/package.json
@@ -0,0 +1,65 @@
+{
+ "_from": "y18n@^3.2.1",
+ "_id": "y18n@3.2.1",
+ "_inBundle": false,
+ "_integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=",
+ "_location": "/npm-profile/make-fetch-happen/cacache/y18n",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "y18n@^3.2.1",
+ "name": "y18n",
+ "escapedName": "y18n",
+ "rawSpec": "^3.2.1",
+ "saveSpec": null,
+ "fetchSpec": "^3.2.1"
+ },
+ "_requiredBy": [
+ "/npm-profile/make-fetch-happen/cacache"
+ ],
+ "_resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz",
+ "_shasum": "6d15fba884c08679c0d77e88e7759e811e07fa41",
+ "_spec": "y18n@^3.2.1",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache",
+ "author": {
+ "name": "Ben Coe",
+ "email": "ben@npmjs.com"
+ },
+ "bugs": {
+ "url": "https://github.com/yargs/y18n/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "the bare-bones internationalization library used by yargs",
+ "devDependencies": {
+ "chai": "^3.4.1",
+ "coveralls": "^2.11.6",
+ "mocha": "^2.3.4",
+ "nyc": "^6.1.1",
+ "rimraf": "^2.5.0",
+ "standard": "^5.4.1"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/yargs/y18n",
+ "keywords": [
+ "i18n",
+ "internationalization",
+ "yargs"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "y18n",
+ "repository": {
+ "type": "git",
+ "url": "git+ssh://git@github.com/yargs/y18n.git"
+ },
+ "scripts": {
+ "coverage": "nyc report --reporter=text-lcov | coveralls",
+ "pretest": "standard",
+ "test": "nyc mocha"
+ },
+ "version": "3.2.1"
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/package.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/package.json
new file mode 100644
index 0000000000..7f7ad4057e
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/package.json
@@ -0,0 +1,126 @@
+{
+ "_from": "cacache@^9.2.9",
+ "_id": "cacache@9.3.0",
+ "_inBundle": false,
+ "_integrity": "sha512-Vbi8J1XfC8v+FbQ6QkOtKXsHpPnB0i9uMeYFJoj40EbdOsEqWB3DPpNjfsnYBkqOPYA8UvrqH6FZPpBP0zdN7g==",
+ "_location": "/npm-profile/make-fetch-happen/cacache",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "cacache@^9.2.9",
+ "name": "cacache",
+ "escapedName": "cacache",
+ "rawSpec": "^9.2.9",
+ "saveSpec": null,
+ "fetchSpec": "^9.2.9"
+ },
+ "_requiredBy": [
+ "/npm-profile/make-fetch-happen"
+ ],
+ "_resolved": "https://registry.npmjs.org/cacache/-/cacache-9.3.0.tgz",
+ "_shasum": "9cd58f2dd0b8c8cacf685b7067b416d6d3cf9db1",
+ "_spec": "cacache@^9.2.9",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-profile/node_modules/make-fetch-happen",
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@sykosomatic.org"
+ },
+ "bugs": {
+ "url": "https://github.com/zkat/cacache/issues"
+ },
+ "bundleDependencies": false,
+ "cache-version": {
+ "content": "2",
+ "index": "5"
+ },
+ "config": {
+ "nyc": {
+ "exclude": [
+ "node_modules/**",
+ "test/**"
+ ]
+ }
+ },
+ "contributors": [
+ {
+ "name": "Charlotte Spencer",
+ "email": "charlottelaspencer@gmail.com"
+ },
+ {
+ "name": "Rebecca Turner",
+ "email": "me@re-becca.org"
+ }
+ ],
+ "dependencies": {
+ "bluebird": "^3.5.0",
+ "chownr": "^1.0.1",
+ "glob": "^7.1.2",
+ "graceful-fs": "^4.1.11",
+ "lru-cache": "^4.1.1",
+ "mississippi": "^1.3.0",
+ "mkdirp": "^0.5.1",
+ "move-concurrently": "^1.0.1",
+ "promise-inflight": "^1.0.1",
+ "rimraf": "^2.6.1",
+ "ssri": "^4.1.6",
+ "unique-filename": "^1.1.0",
+ "y18n": "^3.2.1"
+ },
+ "deprecated": false,
+ "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+ "devDependencies": {
+ "benchmark": "^2.1.4",
+ "chalk": "^2.0.1",
+ "cross-env": "^5.0.1",
+ "nyc": "^11.1.0",
+ "require-inject": "^1.4.2",
+ "safe-buffer": "^5.1.1",
+ "standard": "^10.0.2",
+ "standard-version": "^4.2.0",
+ "tacks": "^1.2.2",
+ "tap": "^10.7.0",
+ "weallbehave": "^1.2.0",
+ "weallcontribute": "^1.0.8"
+ },
+ "files": [
+ "*.js",
+ "lib",
+ "locales"
+ ],
+ "homepage": "https://github.com/zkat/cacache#readme",
+ "keywords": [
+ "cache",
+ "caching",
+ "content-addressable",
+ "sri",
+ "sri hash",
+ "subresource integrity",
+ "cache",
+ "storage",
+ "store",
+ "file store",
+ "filesystem",
+ "disk cache",
+ "disk storage"
+ ],
+ "license": "CC0-1.0",
+ "main": "index.js",
+ "name": "cacache",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/zkat/cacache.git"
+ },
+ "scripts": {
+ "benchmarks": "node test/benchmarks",
+ "postrelease": "npm publish && git push --follow-tags",
+ "prerelease": "npm t",
+ "pretest": "standard lib test *.js",
+ "release": "standard-version -s",
+ "test": "cross-env CACACHE_UPDATE_LOCALE_FILES=true nyc --all -- tap -J test/*.js",
+ "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
+ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ },
+ "version": "9.3.0"
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/put.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/put.js
new file mode 100644
index 0000000000..fe1293e5e7
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/put.js
@@ -0,0 +1,71 @@
+'use strict'
+
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const write = require('./lib/content/write')
+const to = require('mississippi').to
+
+module.exports = putData
+function putData (cache, key, data, opts) {
+ opts = opts || {}
+ return write(cache, data, opts).then(res => {
+ // TODO - stop modifying opts
+ opts.size = res.size
+ return index.insert(cache, key, res.integrity, opts).then(entry => {
+ if (opts.memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+ return res.integrity
+ })
+ })
+}
+
+module.exports.stream = putStream
+function putStream (cache, key, opts) {
+ opts = opts || {}
+ let integrity
+ let size
+ const contentStream = write.stream(
+ cache, opts
+ ).on('integrity', int => {
+ integrity = int
+ }).on('size', s => {
+ size = s
+ })
+ let memoData
+ let memoTotal = 0
+ const stream = to((chunk, enc, cb) => {
+ contentStream.write(chunk, enc, () => {
+ if (opts.memoize) {
+ if (!memoData) { memoData = [] }
+ memoData.push(chunk)
+ memoTotal += chunk.length
+ }
+ cb()
+ })
+ }, cb => {
+ contentStream.end(() => {
+ // TODO - stop modifying `opts`
+ opts.size = size
+ index.insert(cache, key, integrity, opts).then(entry => {
+ if (opts.memoize) {
+ memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
+ }
+ stream.emit('integrity', integrity)
+ cb()
+ })
+ })
+ })
+ let erred = false
+ stream.once('error', err => {
+ if (erred) { return }
+ erred = true
+ contentStream.emit('error', err)
+ })
+ contentStream.once('error', err => {
+ if (erred) { return }
+ erred = true
+ stream.emit('error', err)
+ })
+ return stream
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/rm.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/rm.js
new file mode 100644
index 0000000000..e71a1d27b4
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/rm.js
@@ -0,0 +1,28 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const index = require('./lib/entry-index')
+const memo = require('./lib/memoization')
+const path = require('path')
+const rimraf = BB.promisify(require('rimraf'))
+const rmContent = require('./lib/content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+function entry (cache, key) {
+ memo.clearMemoized()
+ return index.delete(cache, key)
+}
+
+module.exports.content = content
+function content (cache, integrity) {
+ memo.clearMemoized()
+ return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+function all (cache) {
+ memo.clearMemoized()
+ return rimraf(path.join(cache, '*(content-*|index-*)'))
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/verify.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/verify.js
new file mode 100644
index 0000000000..db7763d7af
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/cacache/verify.js
@@ -0,0 +1,3 @@
+'use strict'
+
+module.exports = require('./lib/verify')
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/index.js
index 2e9c58c99f..8a5bcab597 100644
--- a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/index.js
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/index.js
@@ -221,7 +221,14 @@ module.exports = class CachePolicy {
responseHeaders() {
const headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
- headers.age = `${Math.round(this.age())}`;
+ const age = this.age();
+
+ // A cache SHOULD generate 113 warning if it heuristically chose a freshness
+ // lifetime greater than 24 hours and the response's age is greater than 24 hours.
+ if (age > 3600*24 && !this._hasExplicitExpiration() && this.maxAge() > 3600*24) {
+ headers.warning = (headers.warning ? `${headers.warning}, ` : '') + '113 - "rfc7234 5.5.4"';
+ }
+ headers.age = `${Math.round(age)}`;
return headers;
}
@@ -260,6 +267,13 @@ module.exports = class CachePolicy {
return isFinite(ageValue) ? ageValue : 0;
}
+ /**
+ * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
+ *
+ * For an up-to-date value, see `timeToLive()`.
+ *
+ * @return Number
+ */
maxAge() {
if (!this.storable() || this._rescc['no-cache']) {
return 0;
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/node4/index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/node4/index.js
index 7011106818..bcdaebe80f 100644
--- a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/node4/index.js
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/node4/index.js
@@ -271,7 +271,14 @@ module.exports = function () {
CachePolicy.prototype.responseHeaders = function responseHeaders() {
var headers = this._copyWithoutHopByHopHeaders(this._resHeaders);
- headers.age = '' + Math.round(this.age());
+ var age = this.age();
+
+ // A cache SHOULD generate 113 warning if it heuristically chose a freshness
+ // lifetime greater than 24 hours and the response's age is greater than 24 hours.
+ if (age > 3600 * 24 && !this._hasExplicitExpiration() && this.maxAge() > 3600 * 24) {
+ headers.warning = (headers.warning ? `${headers.warning}, ` : '') + '113 - "rfc7234 5.5.4"';
+ }
+ headers.age = `${Math.round(age)}`;
return headers;
};
@@ -314,6 +321,15 @@ module.exports = function () {
return isFinite(ageValue) ? ageValue : 0;
};
+ /**
+ * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`.
+ *
+ * For an up-to-date value, see `timeToLive()`.
+ *
+ * @return Number
+ */
+
+
CachePolicy.prototype.maxAge = function maxAge() {
if (!this.storable() || this._rescc['no-cache']) {
return 0;
@@ -442,7 +458,7 @@ module.exports = function () {
/* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */
if (this._resHeaders.etag) {
- headers['if-none-match'] = headers['if-none-match'] ? headers['if-none-match'] + ', ' + this._resHeaders.etag : this._resHeaders.etag;
+ headers['if-none-match'] = headers['if-none-match'] ? `${headers['if-none-match']}, ${this._resHeaders.etag}` : this._resHeaders.etag;
}
// Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request.
@@ -531,7 +547,7 @@ module.exports = function () {
var newResponse = Object.assign({}, response, {
status: this._status,
method: this._method,
- headers: headers
+ headers
});
return {
policy: new this.constructor(request, newResponse),
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
index 0fde2994a3..0b18a6f9f6 100644
--- a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/package.json
@@ -1,8 +1,8 @@
{
"_from": "http-cache-semantics@^3.7.3",
- "_id": "http-cache-semantics@3.7.3",
+ "_id": "http-cache-semantics@3.8.0",
"_inBundle": false,
- "_integrity": "sha1-LzXFMuzSnx5UE7mvgztySjxvf3I=",
+ "_integrity": "sha512-HGQFfBdru2fj/dwPn1oLx1fy6QMPeTAD1yzKcxD4l5biw+5QVaui/ehCqxaitoKJC/vHMLKv3Yd+nTlxboOJig==",
"_location": "/npm-profile/make-fetch-happen/http-cache-semantics",
"_phantomChildren": {},
"_requested": {
@@ -18,10 +18,10 @@
"_requiredBy": [
"/npm-profile/make-fetch-happen"
],
- "_resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.7.3.tgz",
- "_shasum": "2f35c532ecd29f1e5413b9af833b724a3c6f7f72",
+ "_resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.0.tgz",
+ "_shasum": "1e3ce248730e189ac692a6697b9e3fdea2ff8da3",
"_spec": "http-cache-semantics@^3.7.3",
- "_where": "/Users/rebecca/code/npm/node_modules/npm-profile/node_modules/make-fetch-happen",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-profile/node_modules/make-fetch-happen",
"author": {
"name": "Kornel Lesiński",
"email": "kornel@geekhood.net",
@@ -34,9 +34,9 @@
"deprecated": false,
"description": "Parses Cache-Control and other headers. Helps building correct HTTP caches and proxies",
"devDependencies": {
- "babel-cli": "^6.24.0",
- "babel-preset-env": "^1.3.2",
- "mocha": "^3.2.0"
+ "babel-cli": "^6.24.1",
+ "babel-preset-env": "^1.5.2",
+ "mocha": "^3.4.2"
},
"files": [
"node4/index.js",
@@ -56,5 +56,5 @@
"prepublish": "npm run compile",
"test": "npm run compile; mocha node4/test"
},
- "version": "3.7.3"
+ "version": "3.8.0"
}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/responsetest.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/responsetest.js
index 27bdb28e04..763910b82c 100644
--- a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/responsetest.js
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/responsetest.js
@@ -34,6 +34,12 @@ describe('Response headers', function() {
assert.equal(cache.maxAge(), 678);
});
+ it('IIS', function() {
+ const cache = new CachePolicy(req, {headers:{'cache-control': 'private, public, max-age=259200'}}, {shared:false});
+ assert(!cache.stale());
+ assert.equal(cache.maxAge(), 259200);
+ });
+
it('pre-check tolerated', function() {
const cc = 'pre-check=0, post-check=0, no-store, no-cache, max-age=100';
const cache = new CachePolicy(req, {headers:{'cache-control': cc}});
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/revalidatetest.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/revalidatetest.js
index 1f905ec27c..9dc737718d 100644
--- a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/revalidatetest.js
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/http-cache-semantics/test/revalidatetest.js
@@ -106,6 +106,7 @@ describe('Can be revalidated?', function() {
const headers = cache.revalidationHeaders(simpleRequest);
assertHeadersPassed(headers);
assert.equal(headers['if-modified-since'], 'Tue, 15 Nov 1994 12:45:26 GMT');
+ assert(!/113/.test(headers.warning));
});
it('not without validators', function() {
@@ -113,6 +114,20 @@ describe('Can be revalidated?', function() {
const headers = cache.revalidationHeaders(simpleRequest);
assertHeadersPassed(headers);
assertNoValidators(headers);
+ assert(!/113/.test(headers.warning));
+ })
+
+ it('113 added', function() {
+ const veryOldResponse = {
+ headers: {
+ age: 3600*72,
+ 'last-modified': 'Tue, 15 Nov 1994 12:45:26 GMT',
+ },
+ };
+
+ const cache = new CachePolicy(simpleRequest, veryOldResponse);
+ const headers = cache.responseHeaders(simpleRequest);
+ assert(/113/.test(headers.warning));
})
});
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md
new file mode 100644
index 0000000000..c1136092e3
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/CHANGELOG.md
@@ -0,0 +1,175 @@
+# Change Log
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+<a name="4.1.6"></a>
+## [4.1.6](https://github.com/zkat/ssri/compare/v4.1.5...v4.1.6) (2017-06-07)
+
+
+### Bug Fixes
+
+* **checkStream:** make sure to pass all opts through ([0b1bcbe](https://github.com/zkat/ssri/commit/0b1bcbe))
+
+
+
+<a name="4.1.5"></a>
+## [4.1.5](https://github.com/zkat/ssri/compare/v4.1.4...v4.1.5) (2017-06-05)
+
+
+### Bug Fixes
+
+* **integrityStream:** stop crashing if opts.algorithms and opts.integrity have an algo mismatch ([fb1293e](https://github.com/zkat/ssri/commit/fb1293e))
+
+
+
+<a name="4.1.4"></a>
+## [4.1.4](https://github.com/zkat/ssri/compare/v4.1.3...v4.1.4) (2017-05-31)
+
+
+### Bug Fixes
+
+* **node:** older versions of node[@4](https://github.com/4) do not support base64buffer string parsing ([513df4e](https://github.com/zkat/ssri/commit/513df4e))
+
+
+
+<a name="4.1.3"></a>
+## [4.1.3](https://github.com/zkat/ssri/compare/v4.1.2...v4.1.3) (2017-05-24)
+
+
+### Bug Fixes
+
+* **check:** handle various bad hash corner cases better ([c2c262b](https://github.com/zkat/ssri/commit/c2c262b))
+
+
+
+<a name="4.1.2"></a>
+## [4.1.2](https://github.com/zkat/ssri/compare/v4.1.1...v4.1.2) (2017-04-18)
+
+
+### Bug Fixes
+
+* **stream:** _flush can be called multiple times. use on("end") ([b1c4805](https://github.com/zkat/ssri/commit/b1c4805))
+
+
+
+<a name="4.1.1"></a>
+## [4.1.1](https://github.com/zkat/ssri/compare/v4.1.0...v4.1.1) (2017-04-12)
+
+
+### Bug Fixes
+
+* **pickAlgorithm:** error if pickAlgorithm() is used in an empty Integrity ([fab470e](https://github.com/zkat/ssri/commit/fab470e))
+
+
+
+<a name="4.1.0"></a>
+# [4.1.0](https://github.com/zkat/ssri/compare/v4.0.0...v4.1.0) (2017-04-07)
+
+
+### Features
+
+* adding ssri.create for a crypto style interface (#2) ([96f52ad](https://github.com/zkat/ssri/commit/96f52ad))
+
+
+
+<a name="4.0.0"></a>
+# [4.0.0](https://github.com/zkat/ssri/compare/v3.0.2...v4.0.0) (2017-04-03)
+
+
+### Bug Fixes
+
+* **integrity:** should have changed the error code before. oops ([8381afa](https://github.com/zkat/ssri/commit/8381afa))
+
+
+### BREAKING CHANGES
+
+* **integrity:** EBADCHECKSUM -> EINTEGRITY for verification errors
+
+
+
+<a name="3.0.2"></a>
+## [3.0.2](https://github.com/zkat/ssri/compare/v3.0.1...v3.0.2) (2017-04-03)
+
+
+
+<a name="3.0.1"></a>
+## [3.0.1](https://github.com/zkat/ssri/compare/v3.0.0...v3.0.1) (2017-04-03)
+
+
+### Bug Fixes
+
+* **package.json:** really should have these in the keywords because search ([a6ac6d0](https://github.com/zkat/ssri/commit/a6ac6d0))
+
+
+
+<a name="3.0.0"></a>
+# [3.0.0](https://github.com/zkat/ssri/compare/v2.0.0...v3.0.0) (2017-04-03)
+
+
+### Bug Fixes
+
+* **hashes:** IntegrityMetadata -> Hash ([d04aa1f](https://github.com/zkat/ssri/commit/d04aa1f))
+
+
+### Features
+
+* **check:** return IntegrityMetadata on check success ([2301e74](https://github.com/zkat/ssri/commit/2301e74))
+* **fromHex:** ssri.fromHex to make it easier to generate them from hex valus ([049b89e](https://github.com/zkat/ssri/commit/049b89e))
+* **hex:** utility function for getting hex version of digest ([a9f021c](https://github.com/zkat/ssri/commit/a9f021c))
+* **hexDigest:** added hexDigest method to Integrity objects too ([85208ba](https://github.com/zkat/ssri/commit/85208ba))
+* **integrity:** add .isIntegrity and .isIntegrityMetadata ([1b29e6f](https://github.com/zkat/ssri/commit/1b29e6f))
+* **integrityStream:** new stream that can both generate and check streamed data ([fd23e1b](https://github.com/zkat/ssri/commit/fd23e1b))
+* **parse:** allow parsing straight into a single IntegrityMetadata object ([c8ddf48](https://github.com/zkat/ssri/commit/c8ddf48))
+* **pickAlgorithm:** Intergrity#pickAlgorithm() added ([b97a796](https://github.com/zkat/ssri/commit/b97a796))
+* **size:** calculate and update stream sizes ([02ed1ad](https://github.com/zkat/ssri/commit/02ed1ad))
+
+
+### BREAKING CHANGES
+
+* **hashes:** `.isIntegrityMetadata` is now `.isHash`. Also, any references to `IntegrityMetadata` now refer to `Hash`.
+* **integrityStream:** createCheckerStream has been removed and replaced with a general-purpose integrityStream.
+
+To convert existing createCheckerStream code, move the `sri` argument into `opts.integrity` in integrityStream. All other options should be the same.
+* **check:** `checkData`, `checkStream`, and `createCheckerStream` now yield a whole IntegrityMetadata instance representing the first successful hash match.
+
+
+
+<a name="2.0.0"></a>
+# [2.0.0](https://github.com/zkat/ssri/compare/v1.0.0...v2.0.0) (2017-03-24)
+
+
+### Bug Fixes
+
+* **strict-mode:** make regexes more rigid ([122a32c](https://github.com/zkat/ssri/commit/122a32c))
+
+
+### Features
+
+* **api:** added serialize alias for unparse ([999b421](https://github.com/zkat/ssri/commit/999b421))
+* **concat:** add Integrity#concat() ([cae12c7](https://github.com/zkat/ssri/commit/cae12c7))
+* **pickAlgo:** pick the strongest algorithm provided, by default ([58c18f7](https://github.com/zkat/ssri/commit/58c18f7))
+* **strict-mode:** strict SRI support ([3f0b64c](https://github.com/zkat/ssri/commit/3f0b64c))
+* **stringify:** replaced unparse/serialize with stringify ([4acad30](https://github.com/zkat/ssri/commit/4acad30))
+* **verification:** add opts.pickAlgorithm ([f72e658](https://github.com/zkat/ssri/commit/f72e658))
+
+
+### BREAKING CHANGES
+
+* **pickAlgo:** ssri will prioritize specific hashes now
+* **stringify:** serialize and unparse have been removed. Use ssri.stringify instead.
+* **strict-mode:** functions that accepted an optional `sep` argument now expect `opts.sep`.
+
+
+
+<a name="1.0.0"></a>
+# 1.0.0 (2017-03-23)
+
+
+### Features
+
+* **api:** implemented initial api ([4fbb16b](https://github.com/zkat/ssri/commit/4fbb16b))
+
+
+### BREAKING CHANGES
+
+* **api:** Initial API established.
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md
new file mode 100644
index 0000000000..c05cb09586
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/LICENSE.md
@@ -0,0 +1,3 @@
+To the extent possible under law, maintainers for this project have waived all copyright and related or neighboring rights to this project.
+
+For more information on this waiver, see: https://creativecommons.org/publicdomain/zero/1.0/
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/README.md b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/README.md
new file mode 100644
index 0000000000..f2fc035da5
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/README.md
@@ -0,0 +1,462 @@
+# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/zkat/ssri.svg)](https://travis-ci.org/zkat/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/ssri?svg=true)](https://ci.appveyor.com/project/zkat/ssri) [![Coverage Status](https://coveralls.io/repos/github/zkat/ssri/badge.svg?branch=latest)](https://coveralls.io/github/zkat/ssri?branch=latest)
+
+[`ssri`](https://github.com/zkat/ssri), short for Standard Subresource
+Integrity, is a Node.js utility for parsing, manipulating, serializing,
+generating, and verifying [Subresource
+Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes.
+
+## Install
+
+`$ npm install --save ssri`
+
+## Table of Contents
+
+* [Example](#example)
+* [Features](#features)
+* [Contributing](#contributing)
+* [API](#api)
+ * Parsing & Serializing
+ * [`parse`](#parse)
+ * [`stringify`](#stringify)
+ * [`Integrity#concat`](#integrity-concat)
+ * [`Integrity#toString`](#integrity-to-string)
+ * [`Integrity#toJSON`](#integrity-to-json)
+ * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm)
+ * [`Integrity#hexDigest`](#integrity-hex-digest)
+ * Integrity Generation
+ * [`fromHex`](#from-hex)
+ * [`fromData`](#from-data)
+ * [`fromStream`](#from-stream)
+ * [`create`](#create)
+ * Integrity Verification
+ * [`checkData`](#check-data)
+ * [`checkStream`](#check-stream)
+ * [`integrityStream`](#integrity-stream)
+
+### Example
+
+```javascript
+const ssri = require('ssri')
+
+const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
+
+// Parsing and serializing
+const parsed = ssri.parse(integrity)
+ssri.stringify(parsed) // === integrity (works on non-Integrity objects)
+parsed.toString() // === integrity
+
+// Async stream functions
+ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...)
+ssri.fromStream(fs.createReadStream('./my-file')).then(sri => {
+ sri.toString() === integrity
+})
+fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri))
+
+// Sync data functions
+ssri.fromData(fs.readFileSync('./my-file')) // === parsed
+ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512'
+```
+
+### Features
+
+* Parses and stringifies SRI strings.
+* Generates SRI strings from raw data or Streams.
+* Strict standard compliance.
+* `?foo` metadata option support.
+* Multiple entries for the same algorithm.
+* Object-based integrity hash manipulation.
+* Small footprint: no dependencies, concise implementation.
+* Full test coverage.
+* Customizable algorithm picker.
+
+### Contributing
+
+The ssri team enthusiastically welcomes contributions and project participation!
+There's a bunch of things you can do if you want to contribute! The [Contributor
+Guide](CONTRIBUTING.md) has all the information you need for everything from
+reporting bugs to contributing entire new features. Please don't hesitate to
+jump in if you'd like to, or even ask us questions if something isn't clear.
+
+### API
+
+#### <a name="parse"></a> `> ssri.parse(sri, [opts]) -> Integrity`
+
+Parses `sri` into an `Integrity` data structure. `sri` can be an integrity
+string, an `Hash`-like with `digest` and `algorithm` fields and an optional
+`options` field, or an `Integrity`-like object. The resulting object will be an
+`Integrity` instance that has this shape:
+
+```javascript
+{
+ 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}],
+ 'sha512': [
+ {algorithm: 'sha512', digest: 'c0ffee', options: []},
+ {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']}
+ ],
+}
+```
+
+If `opts.single` is truthy, a single `Hash` object will be returned. That is, a
+single object that looks like `{algorithm, digest, options}`, as opposed to a
+larger object with multiple of these.
+
+If `opts.strict` is truthy, the resulting object will be filtered such that
+it strictly follows the Subresource Integrity spec, throwing away any entries
+with any invalid components. This also means a restricted set of algorithms
+will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`.
+
+Strict mode is recommended if the integrity strings are intended for use in
+browsers, or in other situations where strict adherence to the spec is needed.
+
+##### Example
+
+```javascript
+ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object
+```
+
+#### <a name="stringify"></a> `> ssri.stringify(sri, [opts]) -> String`
+
+This function is identical to [`Integrity#toString()`](#integrity-to-string),
+except it can be used on _any_ object that [`parse`](#parse) can handle -- that
+is, a string, an `Hash`-like, or an `Integrity`-like.
+
+The `opts.sep` option defines the string to use when joining multiple entries
+together. To be spec-compliant, this _must_ be whitespace. The default is a
+single space (`' '`).
+
+If `opts.strict` is true, the integrity string will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+// Useful for cleaning up input SRI strings:
+ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar')
+// -> 'sha512-foo sha384-bar'
+
+// Hash-like: only a single entry.
+ssri.stringify({
+ algorithm: 'sha512',
+ digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
+ options: ['foo']
+})
+// ->
+// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
+
+// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse`
+ssri.stringify({
+ 'sha512': [
+ {
+ algorithm: 'sha512',
+ digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==',
+ options: ['foo']
+ }
+ ]
+})
+// ->
+// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
+```
+
+#### <a name="integrity-concat"></a> `> Integrity#concat(otherIntegrity, [opts]) -> Integrity`
+
+Concatenates an `Integrity` object with another IntegrityLike, or an integrity
+string.
+
+This is functionally equivalent to concatenating the string format of both
+integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string.
+
+If `opts.strict` is true, the new `Integrity` will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+// This will combine the integrity checks for two different versions of
+// your index.js file so you can use a single integrity string and serve
+// either of these to clients, from a single `<script>` tag.
+const desktopIntegrity = ssri.fromData(fs.readFileSync('./index.desktop.js'))
+const mobileIntegrity = ssri.fromData(fs.readFileSync('./index.mobile.js'))
+
+// Note that browsers (and ssri) will succeed as long as ONE of the entries
+// for the *prioritized* algorithm succeeds. That is, in order for this fallback
+// to work, both desktop and mobile *must* use the same `algorithm` values.
+desktopIntegrity.concat(mobileIntegrity)
+```
+
+#### <a name="integrity-to-string"></a> `> Integrity#toString([opts]) -> String`
+
+Returns the string representation of an `Integrity` object. All hash entries
+will be concatenated in the string by `opts.sep`, which defaults to `' '`.
+
+If you want to serialize an object that didn't from from an `ssri` function,
+use [`ssri.stringify()`](#stringify).
+
+If `opts.strict` is true, the integrity string will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo'
+
+ssri.parse(integrity).toString() === integrity
+```
+
+#### <a name="integrity-to-json"></a> `> Integrity#toJSON() -> String`
+
+Returns the string representation of an `Integrity` object. All hash entries
+will be concatenated in the string by `' '`.
+
+This is a convenience method so you can pass an `Integrity` object directly to `JSON.stringify`.
+For more info check out [toJSON() behavior on mdn](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#toJSON%28%29_behavior).
+
+##### Example
+
+```javascript
+const integrity = '"sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo"'
+
+JSON.stringify(ssri.parse(integrity)) === integrity
+```
+
+#### <a name="integrity-pick-algorithm"></a> `> Integrity#pickAlgorithm([opts]) -> String`
+
+Returns the "best" algorithm from those available in the integrity object.
+
+If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
+arguments. ssri will prioritize whichever of the two algorithms is returned by
+this function. Note that the function may be called multiple times, and it
+**must** return one of the two algorithms provided. By default, ssri will make
+a best-effort to pick the strongest/most reliable of the given algorithms. It
+may intentionally deprioritize algorithms with known vulnerabilities.
+
+##### Example
+
+```javascript
+ssri.parse('sha1-WEakDigEST sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1').pickAlgorithm() // sha512
+```
+
+#### <a name="integrity-hex-digest"></a> `> Integrity#hexDigest() -> String`
+
+`Integrity` is assumed to be either a single-hash `Integrity` instance, or a
+`Hash` instance. Returns its `digest`, converted to a hex representation of the
+base64 data.
+
+##### Example
+
+```javascript
+ssri.parse('sha1-deadbeef').hexDigest() // '75e69d6de79f'
+```
+
+#### <a name="from-hex"></a> `> ssri.fromHex(hexDigest, algorithm, [opts]) -> Integrity`
+
+Creates an `Integrity` object with a single entry, based on a hex-formatted
+hash. This is a utility function to help convert existing shasums to the
+Integrity format, and is roughly equivalent to something like:
+
+```javascript
+algorithm + '-' + Buffer.from(hexDigest, 'hex').toString('base64')
+```
+
+`opts.options` may optionally be passed in: it must be an array of option
+strings that will be added to all generated integrity hashes generated by
+`fromData`. This is a loosely-specified feature of SRIs, and currently has no
+specified semantics besides being `?`-separated. Use at your own risk, and
+probably avoid if your integrity strings are meant to be used with browsers.
+
+If `opts.strict` is true, the integrity object will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+If `opts.single` is true, a single `Hash` object will be returned.
+
+##### Example
+
+```javascript
+ssri.fromHex('75e69d6de79f', 'sha1').toString() // 'sha1-deadbeef'
+```
+
+#### <a name="from-data"></a> `> ssri.fromData(data, [opts]) -> Integrity`
+
+Creates an `Integrity` object from either string or `Buffer` data, calculating
+all the requested hashes and adding any specified options to the object.
+
+`opts.algorithms` determines which algorithms to generate hashes for. All
+results will be included in a single `Integrity` object. The default value for
+`opts.algorithms` is `['sha512']`. All algorithm strings must be hashes listed
+in `crypto.getHashes()` for the host Node.js platform.
+
+`opts.options` may optionally be passed in: it must be an array of option
+strings that will be added to all generated integrity hashes generated by
+`fromData`. This is a loosely-specified feature of SRIs, and currently has no
+specified semantics besides being `?`-separated. Use at your own risk, and
+probably avoid if your integrity strings are meant to be used with browsers.
+
+If `opts.strict` is true, the integrity object will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+const integrityObj = ssri.fromData('foobarbaz', {
+ algorithms: ['sha256', 'sha384', 'sha512']
+})
+integrity.toString('\n')
+// ->
+// sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0=
+// sha384-irnCxQ0CfQhYGlVAUdwTPC9bF3+YWLxlaDGM4xbYminxpbXEq+D+2GCEBTxcjES9
+// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
+```
+
+#### <a name="from-stream"></a> `> ssri.fromStream(stream, [opts]) -> Promise<Integrity>`
+
+Returns a Promise of an Integrity object calculated by reading data from
+a given `stream`.
+
+It accepts both `opts.algorithms` and `opts.options`, which are documented as
+part of [`ssri.fromData`](#from-data).
+
+Additionally, `opts.Promise` may be passed in to inject a Promise library of
+choice. By default, ssri will use Node's built-in Promises.
+
+If `opts.strict` is true, the integrity object will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+ssri.fromStream(fs.createReadStream('index.js'), {
+ algorithms: ['sha1', 'sha512']
+}).then(integrity => {
+ return ssri.checkStream(fs.createReadStream('index.js'), integrity)
+}) // succeeds
+```
+
+#### <a name="create"></a> `> ssri.create([opts]) -> <Hash>`
+
+Returns a Hash object with `update(<Buffer or string>[,enc])` and `digest()` methods.
+
+
+The Hash object provides the same methods as [crypto class Hash](https://nodejs.org/dist/latest-v6.x/docs/api/crypto.html#crypto_class_hash).
+`digest()` accepts no arguments and returns an Integrity object calculated by reading data from
+calls to update.
+
+It accepts both `opts.algorithms` and `opts.options`, which are documented as
+part of [`ssri.fromData`](#from-data).
+
+If `opts.strict` is true, the integrity object will be created using strict
+parsing rules. See [`ssri.parse`](#parse).
+
+##### Example
+
+```javascript
+const integrity = ssri.create().update('foobarbaz').digest()
+integrity.toString()
+// ->
+// sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1+9vBnypkYWg==
+```
+
+#### <a name="check-data"></a> `> ssri.checkData(data, sri, [opts]) -> Hash|false`
+
+Verifies `data` integrity against an `sri` argument. `data` may be either a
+`String` or a `Buffer`, and `sri` can be any subresource integrity
+representation that [`ssri.parse`](#parse) can handle.
+
+If verification succeeds, `checkData` will return the name of the algorithm that
+was used for verification (a truthy value). Otherwise, it will return `false`.
+
+If `opts.pickAlgorithm` is provided, it will be used by
+[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
+the available digests to match against.
+
+##### Example
+
+```javascript
+const data = fs.readFileSync('index.js')
+ssri.checkData(data, ssri.fromData(data)) // -> 'sha512'
+ssri.checkData(data, 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0')
+ssri.checkData(data, 'sha1-BaDDigEST') // -> false
+```
+
+#### <a name="check-stream"></a> `> ssri.checkStream(stream, sri, [opts]) -> Promise<Hash>`
+
+Verifies the contents of `stream` against an `sri` argument. `stream` will be
+consumed in its entirety by this process. `sri` can be any subresource integrity
+representation that [`ssri.parse`](#parse) can handle.
+
+`checkStream` will return a Promise that either resolves to the
+`Hash` that succeeded verification, or, if the verification fails
+or an error happens with `stream`, the Promise will be rejected.
+
+If the Promise is rejected because verification failed, the returned error will
+have `err.code` as `EINTEGRITY`.
+
+If `opts.size` is given, it will be matched against the stream size. An error
+with `err.code` `EBADSIZE` will be returned by a rejection if the expected size
+and actual size fail to match.
+
+If `opts.pickAlgorithm` is provided, it will be used by
+[`Integrity#pickAlgorithm`](#integrity-pick-algorithm) when deciding which of
+the available digests to match against.
+
+##### Example
+
+```javascript
+const integrity = ssri.fromData(fs.readFileSync('index.js'))
+
+ssri.checkStream(
+ fs.createReadStream('index.js'),
+ integrity
+)
+// ->
+// Promise<{
+// algorithm: 'sha512',
+// digest: 'sha512-yzd8ELD1piyANiWnmdnpCL5F52f10UfUdEkHywVZeqTt0ymgrxR63Qz0GB7TKPoeeZQmWCaz7T1'
+// }>
+
+ssri.checkStream(
+ fs.createReadStream('index.js'),
+ 'sha256-l981iLWj8kurw4UbNy8Lpxqdzd7UOxS50Glhv8FwfZ0'
+) // -> Promise<Hash>
+
+ssri.checkStream(
+ fs.createReadStream('index.js'),
+ 'sha1-BaDDigEST'
+) // -> Promise<Error<{code: 'EINTEGRITY'}>>
+```
+
+#### <a name="integrity-stream"></a> `> integrityStream(sri, [opts]) -> IntegrityStream`
+
+Returns a `Transform` stream that data can be piped through in order to generate
+and optionally check data integrity for piped data. When the stream completes
+successfully, it emits `size` and `integrity` events, containing the total
+number of bytes processed and a calculated `Integrity` instance based on stream
+data, respectively.
+
+If `opts.algorithms` is passed in, the listed algorithms will be calculated when
+generating the final `Integrity` instance. The default is `['sha512']`.
+
+If `opts.single` is passed in, a single `Hash` instance will be returned.
+
+If `opts.integrity` is passed in, it should be an `integrity` value understood
+by [`parse`](#parse) that the stream will check the data against. If
+verification succeeds, the integrity stream will emit a `verified` event whose
+value is a single `Hash` object that is the one that succeeded verification. If
+verification fails, the stream will error with an `EINTEGRITY` error code.
+
+If `opts.size` is given, it will be matched against the stream size. An error
+with `err.code` `EBADSIZE` will be emitted by the stream if the expected size
+and actual size fail to match.
+
+If `opts.pickAlgorithm` is provided, it will be passed two algorithms as
+arguments. ssri will prioritize whichever of the two algorithms is returned by
+this function. Note that the function may be called multiple times, and it
+**must** return one of the two algorithms provided. By default, ssri will make
+a best-effort to pick the strongest/most reliable of the given algorithms. It
+may intentionally deprioritize algorithms with known vulnerabilities.
+
+##### Example
+
+```javascript
+const integrity = ssri.fromData(fs.readFileSync('index.js'))
+fs.createReadStream('index.js')
+.pipe(ssri.checkStream(integrity))
+```
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/index.js b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/index.js
new file mode 100644
index 0000000000..8ece662ba6
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/index.js
@@ -0,0 +1,334 @@
+'use strict'
+
+const Buffer = require('safe-buffer').Buffer
+
+const crypto = require('crypto')
+const Transform = require('stream').Transform
+
+const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512']
+
+const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
+const SRI_REGEX = /^([^-]+)-([^?]+)([?\S*]*)$/
+const STRICT_SRI_REGEX = /^([^-]+)-([A-Za-z0-9+/]+(?:=?=?))([?\x21-\x7E]*)$/
+const VCHAR_REGEX = /^[\x21-\x7E]+$/
+
+class Hash {
+ get isHash () { return true }
+ constructor (hash, opts) {
+ const strict = !!(opts && opts.strict)
+ this.source = hash.trim()
+ // 3.1. Integrity metadata (called "Hash" by ssri)
+ // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description
+ const match = this.source.match(
+ strict
+ ? STRICT_SRI_REGEX
+ : SRI_REGEX
+ )
+ if (!match) { return }
+ if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { return }
+ this.algorithm = match[1]
+ this.digest = match[2]
+
+ const rawOpts = match[3]
+ this.options = rawOpts ? rawOpts.slice(1).split('?') : []
+ }
+ hexDigest () {
+ return this.digest && Buffer.from(this.digest, 'base64').toString('hex')
+ }
+ toJSON () {
+ return this.toString()
+ }
+ toString (opts) {
+ if (opts && opts.strict) {
+ // Strict mode enforces the standard as close to the foot of the
+ // letter as it can.
+ if (!(
+ // The spec has very restricted productions for algorithms.
+ // https://www.w3.org/TR/CSP2/#source-list-syntax
+ SPEC_ALGORITHMS.some(x => x === this.algorithm) &&
+ // Usually, if someone insists on using a "different" base64, we
+ // leave it as-is, since there's multiple standards, and the
+ // specified is not a URL-safe variant.
+ // https://www.w3.org/TR/CSP2/#base64_value
+ this.digest.match(BASE64_REGEX) &&
+ // Option syntax is strictly visual chars.
+ // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression
+ // https://tools.ietf.org/html/rfc5234#appendix-B.1
+ (this.options || []).every(opt => opt.match(VCHAR_REGEX))
+ )) {
+ return ''
+ }
+ }
+ const options = this.options && this.options.length
+ ? `?${this.options.join('?')}`
+ : ''
+ return `${this.algorithm}-${this.digest}${options}`
+ }
+}
+
+class Integrity {
+ get isIntegrity () { return true }
+ toJSON () {
+ return this.toString()
+ }
+ toString (opts) {
+ opts = opts || {}
+ let sep = opts.sep || ' '
+ if (opts.strict) {
+ // Entries must be separated by whitespace, according to spec.
+ sep = sep.replace(/\S+/g, ' ')
+ }
+ return Object.keys(this).map(k => {
+ return this[k].map(hash => {
+ return Hash.prototype.toString.call(hash, opts)
+ }).filter(x => x.length).join(sep)
+ }).filter(x => x.length).join(sep)
+ }
+ concat (integrity, opts) {
+ const other = typeof integrity === 'string'
+ ? integrity
+ : stringify(integrity, opts)
+ return parse(`${this.toString(opts)} ${other}`, opts)
+ }
+ hexDigest () {
+ return parse(this, {single: true}).hexDigest()
+ }
+ pickAlgorithm (opts) {
+ const pickAlgorithm = (opts && opts.pickAlgorithm) || getPrioritizedHash
+ const keys = Object.keys(this)
+ if (!keys.length) {
+ throw new Error(`No algorithms available for ${
+ JSON.stringify(this.toString())
+ }`)
+ }
+ return keys.reduce((acc, algo) => {
+ return pickAlgorithm(acc, algo) || acc
+ })
+ }
+}
+
+module.exports.parse = parse
+function parse (sri, opts) {
+ opts = opts || {}
+ if (typeof sri === 'string') {
+ return _parse(sri, opts)
+ } else if (sri.algorithm && sri.digest) {
+ const fullSri = new Integrity()
+ fullSri[sri.algorithm] = [sri]
+ return _parse(stringify(fullSri, opts), opts)
+ } else {
+ return _parse(stringify(sri, opts), opts)
+ }
+}
+
+function _parse (integrity, opts) {
+ // 3.4.3. Parse metadata
+ // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
+ if (opts.single) {
+ return new Hash(integrity, opts)
+ }
+ return integrity.trim().split(/\s+/).reduce((acc, string) => {
+ const hash = new Hash(string, opts)
+ if (hash.algorithm && hash.digest) {
+ const algo = hash.algorithm
+ if (!acc[algo]) { acc[algo] = [] }
+ acc[algo].push(hash)
+ }
+ return acc
+ }, new Integrity())
+}
+
+module.exports.stringify = stringify
+function stringify (obj, opts) {
+ if (obj.algorithm && obj.digest) {
+ return Hash.prototype.toString.call(obj, opts)
+ } else if (typeof obj === 'string') {
+ return stringify(parse(obj, opts), opts)
+ } else {
+ return Integrity.prototype.toString.call(obj, opts)
+ }
+}
+
+module.exports.fromHex = fromHex
+function fromHex (hexDigest, algorithm, opts) {
+ const optString = (opts && opts.options && opts.options.length)
+ ? `?${opts.options.join('?')}`
+ : ''
+ return parse(
+ `${algorithm}-${
+ Buffer.from(hexDigest, 'hex').toString('base64')
+ }${optString}`, opts
+ )
+}
+
+module.exports.fromData = fromData
+function fromData (data, opts) {
+ opts = opts || {}
+ const algorithms = opts.algorithms || ['sha512']
+ const optString = opts.options && opts.options.length
+ ? `?${opts.options.join('?')}`
+ : ''
+ return algorithms.reduce((acc, algo) => {
+ const digest = crypto.createHash(algo).update(data).digest('base64')
+ const hash = new Hash(
+ `${algo}-${digest}${optString}`,
+ opts
+ )
+ if (hash.algorithm && hash.digest) {
+ const algo = hash.algorithm
+ if (!acc[algo]) { acc[algo] = [] }
+ acc[algo].push(hash)
+ }
+ return acc
+ }, new Integrity())
+}
+
+module.exports.fromStream = fromStream
+function fromStream (stream, opts) {
+ opts = opts || {}
+ const P = opts.Promise || Promise
+ const istream = integrityStream(opts)
+ return new P((resolve, reject) => {
+ stream.pipe(istream)
+ stream.on('error', reject)
+ istream.on('error', reject)
+ let sri
+ istream.on('integrity', s => { sri = s })
+ istream.on('end', () => resolve(sri))
+ istream.on('data', () => {})
+ })
+}
+
+module.exports.checkData = checkData
+function checkData (data, sri, opts) {
+ opts = opts || {}
+ sri = parse(sri, opts)
+ if (!Object.keys(sri).length) { return false }
+ const algorithm = sri.pickAlgorithm(opts)
+ const digests = sri[algorithm] || []
+ const digest = crypto.createHash(algorithm).update(data).digest('base64')
+ return digests.find(hash => hash.digest === digest) || false
+}
+
+module.exports.checkStream = checkStream
+function checkStream (stream, sri, opts) {
+ opts = opts || {}
+ const P = opts.Promise || Promise
+ const checker = integrityStream(Object.assign({}, opts, {
+ integrity: sri
+ }))
+ return new P((resolve, reject) => {
+ stream.pipe(checker)
+ stream.on('error', reject)
+ checker.on('error', reject)
+ let sri
+ checker.on('verified', s => { sri = s })
+ checker.on('end', () => resolve(sri))
+ checker.on('data', () => {})
+ })
+}
+
+module.exports.integrityStream = integrityStream
+function integrityStream (opts) {
+ opts = opts || {}
+ // For verification
+ const sri = opts.integrity && parse(opts.integrity, opts)
+ const goodSri = sri && Object.keys(sri).length
+ const algorithm = goodSri && sri.pickAlgorithm(opts)
+ const digests = goodSri && sri[algorithm]
+ // Calculating stream
+ const algorithms = opts.algorithms || [algorithm || 'sha512']
+ const hashes = algorithms.map(crypto.createHash)
+ let streamSize = 0
+ const stream = new Transform({
+ transform (chunk, enc, cb) {
+ streamSize += chunk.length
+ hashes.forEach(h => h.update(chunk, enc))
+ cb(null, chunk, enc)
+ }
+ }).on('end', () => {
+ const optString = (opts.options && opts.options.length)
+ ? `?${opts.options.join('?')}`
+ : ''
+ const newSri = parse(hashes.map((h, i) => {
+ return `${algorithms[i]}-${h.digest('base64')}${optString}`
+ }).join(' '), opts)
+ const match = (
+ // Integrity verification mode
+ opts.integrity &&
+ newSri[algorithm] &&
+ digests &&
+ digests.find(hash => {
+ return newSri[algorithm].find(newhash => {
+ return hash.digest === newhash.digest
+ })
+ })
+ )
+ if (typeof opts.size === 'number' && streamSize !== opts.size) {
+ const err = new Error(`stream size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${streamSize}`)
+ err.code = 'EBADSIZE'
+ err.found = streamSize
+ err.expected = opts.size
+ err.sri = sri
+ stream.emit('error', err)
+ } else if (opts.integrity && !match) {
+ const err = new Error(`${sri} integrity checksum failed when using ${algorithm}: wanted ${digests} but got ${newSri}. (${streamSize} bytes)`)
+ err.code = 'EINTEGRITY'
+ err.found = newSri
+ err.expected = digests
+ err.algorithm = algorithm
+ err.sri = sri
+ stream.emit('error', err)
+ } else {
+ stream.emit('size', streamSize)
+ stream.emit('integrity', newSri)
+ match && stream.emit('verified', match)
+ }
+ })
+ return stream
+}
+
+module.exports.create = createIntegrity
+function createIntegrity (opts) {
+ opts = opts || {}
+ const algorithms = opts.algorithms || ['sha512']
+ const optString = opts.options && opts.options.length
+ ? `?${opts.options.join('?')}`
+ : ''
+
+ const hashes = algorithms.map(crypto.createHash)
+
+ return {
+ update: function (chunk, enc) {
+ hashes.forEach(h => h.update(chunk, enc))
+ return this
+ },
+ digest: function (enc) {
+ const integrity = algorithms.reduce((acc, algo) => {
+ const digest = hashes.shift().digest('base64')
+ const hash = new Hash(
+ `${algo}-${digest}${optString}`,
+ opts
+ )
+ if (hash.algorithm && hash.digest) {
+ const algo = hash.algorithm
+ if (!acc[algo]) { acc[algo] = [] }
+ acc[algo].push(hash)
+ }
+ return acc
+ }, new Integrity())
+
+ return integrity
+ }
+ }
+}
+
+// This is a Best Effort™ at a reasonable priority for hash algos
+const DEFAULT_PRIORITY = [
+ 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'
+]
+function getPrioritizedHash (algo1, algo2) {
+ return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase())
+ ? algo1
+ : algo2
+}
diff --git a/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/package.json b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/package.json
new file mode 100644
index 0000000000..8bc38753dc
--- /dev/null
+++ b/deps/npm/node_modules/npm-profile/node_modules/make-fetch-happen/node_modules/ssri/package.json
@@ -0,0 +1,90 @@
+{
+ "_from": "ssri@^4.1.6",
+ "_id": "ssri@4.1.6",
+ "_inBundle": false,
+ "_integrity": "sha512-WUbCdgSAMQjTFZRWvSPpauryvREEA+Krn19rx67UlJEJx/M192ZHxMmJXjZ4tkdFm+Sb0SXGlENeQVlA5wY7kA==",
+ "_location": "/npm-profile/make-fetch-happen/ssri",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "ssri@^4.1.6",
+ "name": "ssri",
+ "escapedName": "ssri",
+ "rawSpec": "^4.1.6",
+ "saveSpec": null,
+ "fetchSpec": "^4.1.6"
+ },
+ "_requiredBy": [
+ "/npm-profile/make-fetch-happen",
+ "/npm-profile/make-fetch-happen/cacache"
+ ],
+ "_resolved": "https://registry.npmjs.org/ssri/-/ssri-4.1.6.tgz",
+ "_shasum": "0cb49b6ac84457e7bdd466cb730c3cb623e9a25b",
+ "_spec": "ssri@^4.1.6",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-profile/node_modules/make-fetch-happen",
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@sykosomatic.org"
+ },
+ "bugs": {
+ "url": "https://github.com/zkat/ssri/issues"
+ },
+ "bundleDependencies": false,
+ "config": {
+ "nyc": {
+ "exclude": [
+ "node_modules/**",
+ "test/**"
+ ]
+ }
+ },
+ "dependencies": {
+ "safe-buffer": "^5.1.0"
+ },
+ "deprecated": false,
+ "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
+ "devDependencies": {
+ "nyc": "^10.3.2",
+ "standard": "^9.0.2",
+ "standard-version": "^4.1.0",
+ "tap": "^10.3.3",
+ "weallbehave": "^1.2.0",
+ "weallcontribute": "^1.0.8"
+ },
+ "files": [
+ "*.js"
+ ],
+ "homepage": "https://github.com/zkat/ssri#readme",
+ "keywords": [
+ "w3c",
+ "web",
+ "security",
+ "integrity",
+ "checksum",
+ "hashing",
+ "subresource integrity",
+ "sri",
+ "sri hash",
+ "sri string",
+ "sri generator",
+ "html"
+ ],
+ "license": "CC0-1.0",
+ "main": "index.js",
+ "name": "ssri",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/zkat/ssri.git"
+ },
+ "scripts": {
+ "postrelease": "npm publish && git push --follow-tags",
+ "prerelease": "npm t",
+ "pretest": "standard",
+ "release": "standard-version -s",
+ "test": "tap -J --coverage test/*.js",
+ "update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
+ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
+ },
+ "version": "4.1.6"
+}
diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json
index d0df6f801f..e698111270 100644
--- a/deps/npm/node_modules/npm-profile/package.json
+++ b/deps/npm/node_modules/npm-profile/package.json
@@ -1,8 +1,8 @@
{
- "_from": "npm-profile@latest",
- "_id": "npm-profile@2.0.4",
+ "_from": "npm-profile@2.0.5",
+ "_id": "npm-profile@2.0.5",
"_inBundle": false,
- "_integrity": "sha512-g8WTBuQDOXRuU46qyM1JcUMVhnvA74q77UY6eHb3CTpley2hySLxmNZh+fth+YoU7aYlvUOiCRNQXmSNiF66pw==",
+ "_integrity": "sha512-tLmpDUCV72f/1/oXoyb+VwsZjOlsanp34pZeIZS0mxDoQUOX4Ld1hgPeOqoX4XggE88m7W47DHET2v+qd6sihg==",
"_location": "/npm-profile",
"_phantomChildren": {
"cacache": "9.2.9",
@@ -13,23 +13,23 @@
"ssri": "4.1.6"
},
"_requested": {
- "type": "tag",
+ "type": "version",
"registry": true,
- "raw": "npm-profile@latest",
+ "raw": "npm-profile@2.0.5",
"name": "npm-profile",
"escapedName": "npm-profile",
- "rawSpec": "latest",
+ "rawSpec": "2.0.5",
"saveSpec": null,
- "fetchSpec": "latest"
+ "fetchSpec": "2.0.5"
},
"_requiredBy": [
"#USER",
"/"
],
- "_resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-2.0.4.tgz",
- "_shasum": "148070c0da22b512bf61a4a87758b957fdb4bbe7",
- "_spec": "npm-profile@latest",
- "_where": "/Users/rebecca/code/npm",
+ "_resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-2.0.5.tgz",
+ "_shasum": "0e61b8f1611bd19d1eeff5e3d5c82e557da3b9d7",
+ "_spec": "npm-profile@2.0.5",
+ "_where": "/Users/zkat/Documents/code/npm",
"author": {
"name": "Rebecca Turner",
"email": "me@re-becca.org",
@@ -57,5 +57,5 @@
"type": "git",
"url": "git+https://github.com/npm/npm-profile.git"
},
- "version": "2.0.4"
+ "version": "2.0.5"
}