summaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorRuy Adorno <ruyadorno@hotmail.com>2020-11-17 15:37:44 -0500
committerRich Trott <rtrott@gmail.com>2020-11-20 05:27:50 -0800
commit8d381c0ff0e1cc8e2aa64e202a514f0022264907 (patch)
tree1a978e3b9961d1c5a9bca7a60d6cdb6eb6dafac8 /deps
parent80a83d9e244a9c129e2e6ae941e41f83dde84edb (diff)
downloadios-node-v8-8d381c0ff0e1cc8e2aa64e202a514f0022264907.tar.gz
ios-node-v8-8d381c0ff0e1cc8e2aa64e202a514f0022264907.tar.bz2
ios-node-v8-8d381c0ff0e1cc8e2aa64e202a514f0022264907.zip
deps: upgrade npm to 7.0.12
PR-URL: https://github.com/nodejs/node/pull/36153 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Rich Trott <rtrott@gmail.com> Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de> Reviewed-By: Gireesh Punathil <gpunathi@in.ibm.com> Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: Trivikram Kamat <trivikr.dev@gmail.com>
Diffstat (limited to 'deps')
-rw-r--r--deps/npm/.eslintrc.json2
-rw-r--r--deps/npm/AUTHORS1
-rw-r--r--deps/npm/CHANGELOG.md27
-rw-r--r--deps/npm/docs/content/commands/npm-audit.md4
-rw-r--r--deps/npm/docs/output/commands/npm-audit.html4
-rw-r--r--deps/npm/docs/output/commands/npm-ls.html2
-rw-r--r--deps/npm/docs/output/commands/npm.html2
-rw-r--r--deps/npm/lib/audit.js4
-rw-r--r--deps/npm/lib/ci.js4
-rw-r--r--deps/npm/lib/dedupe.js4
-rw-r--r--deps/npm/lib/init.js15
-rw-r--r--deps/npm/lib/install.js63
-rw-r--r--deps/npm/lib/link.js6
-rw-r--r--deps/npm/lib/prune.js4
-rw-r--r--deps/npm/lib/run-script.js2
-rw-r--r--deps/npm/lib/uninstall.js4
-rw-r--r--deps/npm/lib/update.js4
-rw-r--r--deps/npm/lib/utils/reify-finish.js31
-rw-r--r--deps/npm/man/man1/npm-audit.14
-rw-r--r--deps/npm/man/man1/npm-ls.12
-rw-r--r--deps/npm/man/man1/npm.12
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js20
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js3
-rw-r--r--deps/npm/node_modules/@npmcli/run-script/package.json3
-rw-r--r--deps/npm/node_modules/puka/CHANGELOG.md31
-rw-r--r--deps/npm/node_modules/puka/LICENSE.txt18
-rw-r--r--deps/npm/node_modules/puka/README.md411
-rw-r--r--deps/npm/node_modules/puka/index.js804
-rw-r--r--deps/npm/node_modules/puka/package.json38
-rw-r--r--deps/npm/package.json10
-rw-r--r--deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js19
-rw-r--r--deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js15
-rw-r--r--deps/npm/test/bin/npm-cli.js2
-rw-r--r--deps/npm/test/bin/npx-cli.js18
-rw-r--r--deps/npm/test/lib/access.js166
-rw-r--r--deps/npm/test/lib/adduser.js32
-rw-r--r--deps/npm/test/lib/audit.js55
-rw-r--r--deps/npm/test/lib/auth/legacy.js70
-rw-r--r--deps/npm/test/lib/auth/oauth.js8
-rw-r--r--deps/npm/test/lib/auth/saml.js8
-rw-r--r--deps/npm/test/lib/auth/sso.js34
-rw-r--r--deps/npm/test/lib/bin.js6
-rw-r--r--deps/npm/test/lib/birthday.js2
-rw-r--r--deps/npm/test/lib/bugs.js68
-rw-r--r--deps/npm/test/lib/cache.js36
-rw-r--r--deps/npm/test/lib/ci.js68
-rw-r--r--deps/npm/test/lib/cli.js47
-rw-r--r--deps/npm/test/lib/config.js68
-rw-r--r--deps/npm/test/lib/dedupe.js20
-rw-r--r--deps/npm/test/lib/dist-tag.js59
-rw-r--r--deps/npm/test/lib/docs.js48
-rw-r--r--deps/npm/test/lib/exec.js314
-rw-r--r--deps/npm/test/lib/explain.js60
-rw-r--r--deps/npm/test/lib/explore.js100
-rw-r--r--deps/npm/test/lib/find-dupes.js4
-rw-r--r--deps/npm/test/lib/fund.js292
-rw-r--r--deps/npm/test/lib/get.js6
-rw-r--r--deps/npm/test/lib/init.js211
-rw-r--r--deps/npm/test/lib/install.js114
-rw-r--r--deps/npm/test/lib/link.js115
-rw-r--r--deps/npm/test/lib/ll.js8
-rw-r--r--deps/npm/test/lib/load-all-commands.js3
-rw-r--r--deps/npm/test/lib/load-all.js4
-rw-r--r--deps/npm/test/lib/logout.js18
-rw-r--r--deps/npm/test/lib/ls.js1944
-rw-r--r--deps/npm/test/lib/npm.js65
-rw-r--r--deps/npm/test/lib/outdated.js175
-rw-r--r--deps/npm/test/lib/owner.js131
-rw-r--r--deps/npm/test/lib/pack.js69
-rw-r--r--deps/npm/test/lib/ping.js14
-rw-r--r--deps/npm/test/lib/prefix.js2
-rw-r--r--deps/npm/test/lib/prune.js14
-rw-r--r--deps/npm/test/lib/repo.js186
-rw-r--r--deps/npm/test/lib/root.js2
-rw-r--r--deps/npm/test/lib/run-script.js260
-rw-r--r--deps/npm/test/lib/test.js8
-rw-r--r--deps/npm/test/lib/token.js293
-rw-r--r--deps/npm/test/lib/utils/audit-error.js34
-rw-r--r--deps/npm/test/lib/utils/cleanup-log-files.js20
-rw-r--r--deps/npm/test/lib/utils/completion/installed-deep.js122
-rw-r--r--deps/npm/test/lib/utils/completion/installed-shallow.js56
-rw-r--r--deps/npm/test/lib/utils/completion/none.js2
-rw-r--r--deps/npm/test/lib/utils/config.js49
-rw-r--r--deps/npm/test/lib/utils/error-handler.js52
-rw-r--r--deps/npm/test/lib/utils/error-message.js102
-rw-r--r--deps/npm/test/lib/utils/escape-arg.js2
-rw-r--r--deps/npm/test/lib/utils/escape-exec-path.js2
-rw-r--r--deps/npm/test/lib/utils/explain-dep.js2
-rw-r--r--deps/npm/test/lib/utils/explain-eresolve.js2
-rw-r--r--deps/npm/test/lib/utils/file-exists.js4
-rw-r--r--deps/npm/test/lib/utils/flat-options.js52
-rw-r--r--deps/npm/test/lib/utils/get-identity.js34
-rw-r--r--deps/npm/test/lib/utils/get-project-scope.js10
-rw-r--r--deps/npm/test/lib/utils/hosted-git-info-from-manifest.js6
-rw-r--r--deps/npm/test/lib/utils/is-windows-bash.js4
-rw-r--r--deps/npm/test/lib/utils/is-windows-shell.js2
-rw-r--r--deps/npm/test/lib/utils/is-windows.js2
-rw-r--r--deps/npm/test/lib/utils/lifecycle-cmd.js6
-rw-r--r--deps/npm/test/lib/utils/path.js2
-rw-r--r--deps/npm/test/lib/utils/perf.js10
-rw-r--r--deps/npm/test/lib/utils/ping.js4
-rw-r--r--deps/npm/test/lib/utils/proc-log-listener.js16
-rw-r--r--deps/npm/test/lib/utils/read-local-package.js16
-rw-r--r--deps/npm/test/lib/utils/reify-finish.js80
-rw-r--r--deps/npm/test/lib/utils/reify-output.js152
-rw-r--r--deps/npm/test/lib/utils/setup-log.js85
-rw-r--r--deps/npm/test/lib/utils/tar.js55
-rw-r--r--deps/npm/test/lib/utils/unsupported.js6
-rw-r--r--deps/npm/test/lib/utils/update-notifier.js33
-rw-r--r--deps/npm/test/lib/view.js451
-rw-r--r--deps/npm/test/lib/whoami.js4
111 files changed, 5022 insertions, 3247 deletions
diff --git a/deps/npm/.eslintrc.json b/deps/npm/.eslintrc.json
index 6232a8f821..139716eefd 100644
--- a/deps/npm/.eslintrc.json
+++ b/deps/npm/.eslintrc.json
@@ -133,7 +133,7 @@
"no-shadow-restricted-names": "error",
"no-sparse-arrays": "error",
"no-tabs": "error",
- "no-template-curly-in-string": "error",
+ "no-template-curly-in-string": "off",
"no-this-before-super": "error",
"no-throw-literal": "off",
"no-trailing-spaces": "error",
diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS
index c0986e8be2..2405a171bc 100644
--- a/deps/npm/AUTHORS
+++ b/deps/npm/AUTHORS
@@ -733,3 +733,4 @@ Jan Tojnar <jtojnar@gmail.com>
Jason Attwood <jason_attwood@hotmail.co.uk>
Vlad GURDIGA <gurdiga@gmail.com>
Sébastien Puech <s.puech@tricentis.com>
+Jannis Hell <Primajin@users.noreply.github.com>
diff --git a/deps/npm/CHANGELOG.md b/deps/npm/CHANGELOG.md
index 8cafde1158..9c40ec4167 100644
--- a/deps/npm/CHANGELOG.md
+++ b/deps/npm/CHANGELOG.md
@@ -1,3 +1,30 @@
+## 7.0.12 (2020-11-17)
+
+### BUG FIXES
+
+* [`7b89576bd`](https://github.com/npm/cli/commit/7b89576bd1fa557a312a841afa66b895558d1b12)
+ [#2174](https://github.com/npm/cli/issues/2174)
+ fix running empty scripts with `npm run-script`
+ ([@nlf](https://github.com/nlf))
+* [`bc9afb195`](https://github.com/npm/cli/commit/bc9afb195f5aad7c06bc96049c0f00dc8e752dee)
+ [#2002](https://github.com/npm/cli/issues/2002)
+ [#2184](https://github.com/npm/cli/issues/2184)
+ Preserve builtin conf when installing npm globally
+ ([@isaacs](https://github.com/isaacs))
+
+### DEPENDENCIES
+
+* [`b74c05d88`](https://github.com/npm/cli/commit/b74c05d88dc48fabef031ea66ffaa4e548845655)
+ `@npmcli/run-script@1.8.0`
+ * fix windows command-line argument escaping
+
+### DOCUMENTATION
+
+* [`4e522fdc9`](https://github.com/npm/cli/commit/4e522fdc917bc85af2ca8ff7669a0178e2f35123)
+ [#2179](https://github.com/npm/cli/issues/2179)
+ remove mention to --parseable option from `npm audit` docs
+ ([@Primajin](https://github.com/Primajin))
+
## 7.0.11 (2020-11-13)
### DEPENDENCIES
diff --git a/deps/npm/docs/content/commands/npm-audit.md b/deps/npm/docs/content/commands/npm-audit.md
index 645ab87b15..2c0a8f5804 100644
--- a/deps/npm/docs/content/commands/npm-audit.md
+++ b/deps/npm/docs/content/commands/npm-audit.md
@@ -7,8 +7,8 @@ description: Run a security audit
### Synopsis
```bash
-npm audit [--json|--parseable|--audit-level=(low|moderate|high|critical)]
-npm audit fix [--force|--package-lock-only|--dry-run]
+npm audit [--json] [--production] [--audit-level=(low|moderate|high|critical)]
+npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]
common options: [--production] [--only=(dev|prod)]
```
diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html
index 4dd59417e8..4482f66089 100644
--- a/deps/npm/docs/output/commands/npm-audit.html
+++ b/deps/npm/docs/output/commands/npm-audit.html
@@ -145,8 +145,8 @@ npm command-line interface
</section>
<div id="_content"><h3 id="synopsis">Synopsis</h3>
-<pre lang="bash"><code>npm audit [--json|--parseable|--audit-level=(low|moderate|high|critical)]
-npm audit fix [--force|--package-lock-only|--dry-run]
+<pre lang="bash"><code>npm audit [--json] [--production] [--audit-level=(low|moderate|high|critical)]
+npm audit fix [--force|--package-lock-only|--dry-run|--production|--only=(dev|prod)]
common options: [--production] [--only=(dev|prod)]
</code></pre>
diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html
index 437f062c29..fa7ec7e0b0 100644
--- a/deps/npm/docs/output/commands/npm-ls.html
+++ b/deps/npm/docs/output/commands/npm-ls.html
@@ -156,7 +156,7 @@ installed, as well as their dependencies, in a tree-structure.</p>
limit the results to only the paths to the packages named. Note that
nested packages will <em>also</em> show the paths to the specified packages.
For example, running <code>npm ls promzard</code> in npm’s source tree will show:</p>
-<pre lang="bash"><code> npm@7.0.11 /path/to/npm
+<pre lang="bash"><code> npm@7.0.12 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
</code></pre>
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html
index 77cf24a8fb..42f76ca7c4 100644
--- a/deps/npm/docs/output/commands/npm.html
+++ b/deps/npm/docs/output/commands/npm.html
@@ -148,7 +148,7 @@ npm command-line interface
<pre lang="bash"><code>npm &lt;command&gt; [args]
</code></pre>
<h3 id="version">Version</h3>
-<p>7.0.11</p>
+<p>7.0.12</p>
<h3 id="description">Description</h3>
<p>npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/lib/audit.js b/deps/npm/lib/audit.js
index e77beab1ef..cb8ab5b3a4 100644
--- a/deps/npm/lib/audit.js
+++ b/deps/npm/lib/audit.js
@@ -2,7 +2,7 @@ const Arborist = require('@npmcli/arborist')
const auditReport = require('npm-audit-report')
const npm = require('./npm.js')
const output = require('./utils/output.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const auditError = require('./utils/audit-error.js')
const audit = async args => {
@@ -14,7 +14,7 @@ const audit = async args => {
const fix = args[0] === 'fix'
await arb.audit({ fix })
if (fix)
- reifyOutput(arb)
+ await reifyFinish(arb)
else {
// will throw if there's an error, because this is an audit command
auditError(arb.auditReport)
diff --git a/deps/npm/lib/ci.js b/deps/npm/lib/ci.js
index a1632e7e98..1255fbc264 100644
--- a/deps/npm/lib/ci.js
+++ b/deps/npm/lib/ci.js
@@ -1,7 +1,7 @@
const util = require('util')
const Arborist = require('@npmcli/arborist')
const rimraf = util.promisify(require('rimraf'))
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const log = require('npmlog')
const npm = require('./npm.js')
@@ -35,7 +35,7 @@ const ci = async () => {
])
// npm ci should never modify the lockfile or package.json
await arb.reify({ ...npm.flatOptions, save: false })
- reifyOutput(arb)
+ await reifyFinish(arb)
}
module.exports = Object.assign(cmd, { completion, usage })
diff --git a/deps/npm/lib/dedupe.js b/deps/npm/lib/dedupe.js
index a08c9f3f8f..fe8243e21e 100644
--- a/deps/npm/lib/dedupe.js
+++ b/deps/npm/lib/dedupe.js
@@ -2,7 +2,7 @@
const npm = require('./npm.js')
const Arborist = require('@npmcli/arborist')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const usage = usageUtil('dedupe', 'npm dedupe')
const completion = require('./utils/completion/none.js')
@@ -18,7 +18,7 @@ const dedupe = async (args) => {
dryRun,
})
await arb.dedupe(npm.flatOptions)
- reifyOutput(arb)
+ await reifyFinish(arb)
}
module.exports = Object.assign(cmd, { usage, completion })
diff --git a/deps/npm/lib/init.js b/deps/npm/lib/init.js
index e805a2eda7..ed476ef38c 100644
--- a/deps/npm/lib/init.js
+++ b/deps/npm/lib/init.js
@@ -1,11 +1,11 @@
-// initialize a package.json file
-
-const usageUtil = require('./utils/usage.js')
-const completion = require('./utils/completion/none.js')
+'use strict'
+const initJson = require('init-package-json')
const npa = require('npm-package-arg')
+
const npm = require('./npm.js')
-const initJson = require('init-package-json')
+const usageUtil = require('./utils/usage.js')
+const completion = require('./utils/completion/none.js')
const output = require('./utils/output.js')
const usage = usageUtil(
@@ -78,11 +78,12 @@ const init = async args => {
npm.log.warn('init', 'canceled')
return res()
}
- npm.log.info('init', 'written successfully')
if (er)
rej(er)
- else
+ else {
+ npm.log.info('init', 'written successfully')
res(data)
+ }
})
})
}
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 5f04fcd4f9..f621c85c23 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -6,13 +6,15 @@ const util = require('util')
const readdir = util.promisify(fs.readdir)
const npm = require('./npm.js')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const log = require('npmlog')
const { resolve, join } = require('path')
const Arborist = require('@npmcli/arborist')
const runScript = require('@npmcli/run-script')
-const install = async (args, cb) => {
+const cmd = async (args, cb) => install(args).then(() => cb()).catch(cb)
+
+const install = async args => {
// the /path/to/node_modules/..
const globalTop = resolve(npm.globalDir, '..')
const { ignoreScripts, global: isGlobalInstall } = npm.flatOptions
@@ -34,38 +36,33 @@ const install = async (args, cb) => {
path: where,
})
- try {
- await arb.reify({
- ...npm.flatOptions,
- add: args,
- })
- if (!args.length && !isGlobalInstall && !ignoreScripts) {
- const { scriptShell } = npm.flatOptions
- const scripts = [
- 'preinstall',
- 'install',
- 'postinstall',
- 'prepublish', // XXX should we remove this finally??
- 'preprepare',
- 'prepare',
- 'postprepare',
- ]
- for (const event of scripts) {
- await runScript({
- path: where,
- args: [],
- scriptShell,
- stdio: 'inherit',
- stdioString: true,
- event,
- })
- }
+ await arb.reify({
+ ...npm.flatOptions,
+ add: args,
+ })
+ if (!args.length && !isGlobalInstall && !ignoreScripts) {
+ const { scriptShell } = npm.flatOptions
+ const scripts = [
+ 'preinstall',
+ 'install',
+ 'postinstall',
+ 'prepublish', // XXX should we remove this finally??
+ 'preprepare',
+ 'prepare',
+ 'postprepare',
+ ]
+ for (const event of scripts) {
+ await runScript({
+ path: where,
+ args: [],
+ scriptShell,
+ stdio: 'inherit',
+ stdioString: true,
+ event,
+ })
}
- reifyOutput(arb)
- cb()
- } catch (er) {
- cb(er)
}
+ await reifyFinish(arb)
}
const usage = usageUtil(
@@ -144,4 +141,4 @@ const completion = async (opts, cb) => {
cb()
}
-module.exports = Object.assign(install, { usage, completion })
+module.exports = Object.assign(cmd, { usage, completion })
diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js
index d7303fd086..bee44d43a7 100644
--- a/deps/npm/lib/link.js
+++ b/deps/npm/lib/link.js
@@ -10,7 +10,7 @@ const semver = require('semver')
const npm = require('./npm.js')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const completion = (opts, cb) => {
const dir = npm.globalDir
@@ -122,7 +122,7 @@ const linkInstall = async args => {
add: names.map(l => `file:${resolve(globalTop, 'node_modules', l)}`),
})
- reifyOutput(localArb)
+ await reifyFinish(localArb)
}
const linkPkg = async () => {
@@ -133,7 +133,7 @@ const linkPkg = async () => {
global: true,
})
await arb.reify({ add: [`file:${npm.prefix}`] })
- reifyOutput(arb)
+ await reifyFinish(arb)
}
module.exports = Object.assign(cmd, { completion, usage })
diff --git a/deps/npm/lib/prune.js b/deps/npm/lib/prune.js
index aa2ed37808..ea6ed4108a 100644
--- a/deps/npm/lib/prune.js
+++ b/deps/npm/lib/prune.js
@@ -3,7 +3,7 @@ const npm = require('./npm.js')
const Arborist = require('@npmcli/arborist')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const usage = usageUtil('prune',
'npm prune [[<@scope>/]<pkg>...] [--production]'
@@ -19,7 +19,7 @@ const prune = async () => {
path: where,
})
await arb.prune(npm.flatOptions)
- reifyOutput(arb)
+ await reifyFinish(arb)
}
module.exports = Object.assign(cmd, { usage, completion })
diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js
index 568a5712f6..c095e6decd 100644
--- a/deps/npm/lib/run-script.js
+++ b/deps/npm/lib/run-script.js
@@ -46,7 +46,7 @@ const runScript = async (args) => {
pkg.scripts = scripts
- if (!scripts[event] && !(event === 'start' && await isServerPackage(path))) {
+ if (!Object.prototype.hasOwnProperty.call(scripts, event) && !(event === 'start' && await isServerPackage(path))) {
if (npm.config.get('if-present'))
return
diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js
index ec997ae645..dbaa992f50 100644
--- a/deps/npm/lib/uninstall.js
+++ b/deps/npm/lib/uninstall.js
@@ -5,7 +5,7 @@ const npm = require('./npm.js')
const rpj = require('read-package-json-fast')
const { resolve } = require('path')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const cmd = (args, cb) => rm(args).then(() => cb()).catch(cb)
@@ -32,7 +32,7 @@ const rm = async args => {
...npm.flatOptions,
rm: args,
})
- reifyOutput(arb)
+ await reifyFinish(arb)
}
const usage = usageUtil(
diff --git a/deps/npm/lib/update.js b/deps/npm/lib/update.js
index 791e67e407..0a786e30f3 100644
--- a/deps/npm/lib/update.js
+++ b/deps/npm/lib/update.js
@@ -4,7 +4,7 @@ const Arborist = require('@npmcli/arborist')
const log = require('npmlog')
const npm = require('./npm.js')
const usageUtil = require('./utils/usage.js')
-const reifyOutput = require('./utils/reify-output.js')
+const reifyFinish = require('./utils/reify-finish.js')
const completion = require('./utils/completion/installed-deep.js')
const usage = usageUtil(
@@ -32,7 +32,7 @@ const update = async args => {
})
await arb.reify({ update })
- reifyOutput(arb)
+ await reifyFinish(arb)
}
module.exports = Object.assign(cmd, { usage, completion })
diff --git a/deps/npm/lib/utils/reify-finish.js b/deps/npm/lib/utils/reify-finish.js
new file mode 100644
index 0000000000..76dba06cb5
--- /dev/null
+++ b/deps/npm/lib/utils/reify-finish.js
@@ -0,0 +1,31 @@
+const reifyOutput = require('./reify-output.js')
+const npm = require('../npm.js')
+const ini = require('ini')
+const {writeFile} = require('fs').promises
+const {resolve} = require('path')
+
+const reifyFinish = async arb => {
+ await saveBuiltinConfig(arb)
+ reifyOutput(arb)
+}
+
+const saveBuiltinConfig = async arb => {
+ const { options: { global }, actualTree } = arb
+ if (!global)
+ return
+
+ // if we are using a builtin config, and just installed npm as
+ // a top-level global package, we have to preserve that config.
+ const npmNode = actualTree.inventory.get('node_modules/npm')
+ if (!npmNode)
+ return
+
+ const builtinConf = npm.config.data.get('builtin')
+ if (builtinConf.loadError)
+ return
+
+ const content = ini.stringify(builtinConf.raw).trim() + '\n'
+ await writeFile(resolve(npmNode.path, 'npmrc'), content)
+}
+
+module.exports = reifyFinish
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index ebd91e8198..cd52afeac3 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -5,8 +5,8 @@
.P
.RS 2
.nf
-npm audit [\-\-json|\-\-parseable|\-\-audit\-level=(low|moderate|high|critical)]
-npm audit fix [\-\-force|\-\-package\-lock\-only|\-\-dry\-run]
+npm audit [\-\-json] [\-\-production] [\-\-audit\-level=(low|moderate|high|critical)]
+npm audit fix [\-\-force|\-\-package\-lock\-only|\-\-dry\-run|\-\-production|\-\-only=(dev|prod)]
common options: [\-\-production] [\-\-only=(dev|prod)]
.fi
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 67c5234ef4..87d6c7ae99 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -22,7 +22,7 @@ For example, running \fBnpm ls promzard\fP in npm's source tree will show:
.P
.RS 2
.nf
- npm@7\.0\.11 /path/to/npm
+ npm@7\.0\.12 /path/to/npm
└─┬ init\-package\-json@0\.0\.4
└── promzard@0\.1\.5
.fi
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 32e4a5b425..1464bc383b 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -10,7 +10,7 @@ npm <command> [args]
.RE
.SS Version
.P
-7\.0\.11
+7\.0\.12
.SS Description
.P
npm is the package manager for the Node JavaScript platform\. It puts
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
index 181be8493f..aa241d5e61 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js
@@ -3,6 +3,24 @@ const isWindows = require('./is-windows.js')
const setPATH = require('./set-path.js')
const {resolve} = require('path')
const npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
+const { quoteForShell, ShellString, ShellStringText, ShellStringUnquoted } = require('puka')
+
+const escapeCmd = cmd => {
+ const result = []
+ const parsed = ShellString.sh([cmd])
+ for (const child of parsed.children) {
+ if (child instanceof ShellStringText) {
+ const children = child.contents.filter(segment => segment !== null).map(segment => quoteForShell(segment, false, isWindows && 'win32'))
+ result.push(...children)
+ } else if (child instanceof ShellStringUnquoted) {
+ result.push(child.value)
+ } else {
+ result.push(isWindows ? '&' : ';')
+ }
+ }
+
+ return result.join('')
+}
const makeSpawnArgs = options => {
const {
@@ -16,7 +34,7 @@ const makeSpawnArgs = options => {
} = options
const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(scriptShell)
- const args = isCmd ? ['/d', '/s', '/c', `"${cmd}"`] : ['-c', cmd]
+ const args = isCmd ? ['/d', '/s', '/c', escapeCmd(cmd)] : ['-c', escapeCmd(cmd)]
const spawnOpts = {
env: setPATH(path, {
diff --git a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
index 47f386304e..ccde173e01 100644
--- a/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ b/deps/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -6,7 +6,8 @@ const signalManager = require('./signal-manager.js')
const isServerPackage = require('./is-server-package.js')
// you wouldn't like me when I'm angry...
-const bruce = (id, event, cmd) => `\n> ${id ? id + ' ' : ''}${event}\n> ${cmd}\n`
+const bruce = (id, event, cmd) =>
+ `\n> ${id ? id + ' ' : ''}${event}\n> ${cmd.trim().replace(/\n/g, '\n> ')}\n`
const runScriptPkg = async options => {
const {
diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json
index c8a052f036..925e85c061 100644
--- a/deps/npm/node_modules/@npmcli/run-script/package.json
+++ b/deps/npm/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/run-script",
- "version": "1.7.5",
+ "version": "1.8.0",
"description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
"author": "Isaac Z. Schlueter <i@izs.me> (https://izs.me)",
"license": "ISC",
@@ -32,6 +32,7 @@
"@npmcli/promise-spawn": "^1.3.0",
"infer-owner": "^1.0.4",
"node-gyp": "^7.1.0",
+ "puka": "^1.0.1",
"read-package-json-fast": "^1.1.3"
},
"files": [
diff --git a/deps/npm/node_modules/puka/CHANGELOG.md b/deps/npm/node_modules/puka/CHANGELOG.md
new file mode 100644
index 0000000000..781b81295a
--- /dev/null
+++ b/deps/npm/node_modules/puka/CHANGELOG.md
@@ -0,0 +1,31 @@
+# Changelog
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [1.0.1](https://gitlab.com/rhendric/puka/-/compare/v1.0.0...v1.0.1) - 2020-05-16
+
+### Fixed
+
+- Add more carets to win32 command arguments ([45965ca](https://gitlab.com/rhendric/puka/-/commit/45965ca60fcc518082e0b085d8e81f3f3279ffb4))
+
+ As previously documented and implemented, Puka assumed that all programs
+ are batch files for the purpose of multi-escaping commands that appear
+ in pipelines. However, regardless of whether a command is in a pipeline,
+ one extra layer of escaping is needed if the command invokes a batch
+ file, which Puka was not producing. This only applies to the arguments
+ to the command, not to the batch file path, nor to paths used in
+ redirects. (The property-based spawn test which was supposed to catch
+ such oversights missed this one because it was invoking the Node.js
+ executable directly, not, as recommended in the documentation, a batch
+ file.)
+
+ Going forward, the caveats described in the documentation continue to
+ apply: if you are running programs on Windows with Puka, make sure they
+ are batch files, or you may find arguments are being escaped with too
+ many carets. As the documentation says, if this causes problems for you,
+ please open an issue so we can work out the details of what a good
+ workaround looks like.
+
+## [1.0.0](https://gitlab.com/rhendric/puka/-/tags/v1.0.0) - 2017-09-29
diff --git a/deps/npm/node_modules/puka/LICENSE.txt b/deps/npm/node_modules/puka/LICENSE.txt
new file mode 100644
index 0000000000..0141196a59
--- /dev/null
+++ b/deps/npm/node_modules/puka/LICENSE.txt
@@ -0,0 +1,18 @@
+Copyright 2017 Ryan Hendrickson <ryan.hendrickson@alum.mit.edu>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/puka/README.md b/deps/npm/node_modules/puka/README.md
new file mode 100644
index 0000000000..edbda4d3a2
--- /dev/null
+++ b/deps/npm/node_modules/puka/README.md
@@ -0,0 +1,411 @@
+# Puka
+
+[![GitLab CI pipeline status](https://gitlab.com/rhendric/puka/badges/master/pipeline.svg)](https://gitlab.com/rhendric/puka/commits/master) [![AppVeyor build status](https://img.shields.io/appveyor/ci/rhendric/puka.svg?label=windows%20tests)](https://ci.appveyor.com/project/rhendric/puka) [![Codecov status](https://img.shields.io/codecov/c/gl/rhendric/puka.svg)](https://codecov.io/gl/rhendric/puka)
+
+Puka is a cross-platform library for safely passing strings through shells.
+
+#### Contents
+
+- [Introduction](#introduction)
+ - [Why would I use Puka?](#why-would-i-use-puka)
+ - [How do I use Puka?](#how-do-i-use-puka)
+ - [What's the catch?](#whats-the-catch)
+- [API Documentation](#api-documentation)
+ - [Basic API](#basic-api)
+ - [sh](#sh)
+ - [unquoted](#unquoted)
+ - [Advanced API](#advanced-api)
+ - [quoteForShell](#quoteforshell)
+ - [quoteForCmd](#quoteforcmd)
+ - [quoteForSh](#quoteforsh)
+ - [ShellString](#shellstring)
+ - [Secret API](#secret-api)
+- [The sh DSL](#the-sh-dsl)
+ - [Syntax](#syntax)
+ - [Semantics](#semantics)
+ - [Types of placeholders](#types-of-placeholders)
+
+## Introduction
+
+### Why would I use Puka?
+
+When launching a child process from Node, you have a choice between launching
+directly from the operating system (as with [child_process.spawn](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options),
+if you don't use the `{ shell: true }` option), and running the command through
+a shell (as with [child_process.exec](https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback)).
+Using a shell gives you more power, such as the ability to chain multiple
+commands together or use redirection, but you have to construct your command as
+a single string instead of using an array of arguments. And doing that can be
+buggy (if not dangerous) if you don't take care to quote any arguments
+correctly for the shell you're targeting, _and_ the quoting has to be done
+differently on Windows and non-Windows shells.
+
+Puka solves that problem by giving you a simple and platform-agnostic way to
+build shell commands with arguments that pass through your shell unaltered and
+with no unsafe side effects, **whether you are running on Windows or a
+Unix-based OS**.
+
+### How do I use Puka?
+
+Puka gives you an `sh` function intended for tagging
+[template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals),
+which quotes (if necessary) any values interpolated into the template. A simple
+example:
+
+```javascript
+const { sh } = require('puka');
+const { execSync } = require('child_process');
+
+const arg = 'file with spaces.txt';
+execSync(sh`some-command ${arg}`);
+```
+
+But Puka supports more than this! See [the `sh` DSL documentation](#the-sh-dsl)
+for a detailed description of all the features currently supported.
+
+### What's the catch?
+
+Here are the ones I know about:
+
+Puka does _not_ ensure that the actual commands you're running are
+cross-platform. If you're running npm programs, you generally won't have a
+problem with that, but if you want to run ``sh`cat file` `` on Windows, you'll
+need to depend on something like
+[cash-cat](https://www.npmjs.com/package/cash-cat).
+
+I searched for days for a way to quote or escape line breaks in arguments to
+`cmd.exe`, but couldn't find one (regular `^`-prepending and quotation marks
+don't seem to cut it). If you know of a way that works, please [open an
+issue](https://gitlab.com/rhendric/puka/issues/new) to tell me about it! Until
+then, any line break characters (`\r` or `\n`) in values being interpolated by
+`sh` will cause an error to be thrown on Windows only.
+
+Also on Windows, you may notice quoting mistakes if you run commands that
+involve invoking a native executable (not a batch file ending in `.cmd` or
+`.bat`). Unfortunately, batch files require some extra escaping on Windows, and
+Puka assumes all programs are batch files because npm creates batch file shims
+for programs it installs (and, if you care about cross-platform, you'll be
+using npm programs in your commands). If this causes problems for you, please
+[open an issue](https://gitlab.com/rhendric/puka/issues/new); if your situation
+is specific enough, there may be workarounds or improvements to Puka to be
+found.
+
+## API Documentation
+
+### Basic API
+
+
+
+
+#### sh
+
+A string template tag for safely constructing cross-platform shell commands.
+
+An `sh` template is not actually treated as a literal string to be
+interpolated; instead, it is a tiny DSL designed to make working with shell
+strings safe, simple, and straightforward. To get started quickly, see the
+examples below. [More detailed documentation][1] is available
+further down.
+
+##### Examples
+
+```javascript
+const title = '"this" & "that"';
+sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'"
+// Note: these examples show results for non-Windows platforms.
+// On Windows, the above would instead be
+// 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'.
+
+const names = ['file1', 'file 2'];
+sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'"
+
+const cmd1 = ['cat', 'file 1.txt', 'file 2.txt'];
+const cmd2 = ['use-input', '-abc'];
+sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc"
+```
+
+Returns **[String][2]** a string formatted for the platform Node is currently
+running on.
+
+#### unquoted
+
+This function permits raw strings to be interpolated into a `sh` template.
+
+**IMPORTANT**: If you're using Puka due to security concerns, make sure you
+don't pass any untrusted content to `unquoted`. This may be obvious, but
+stray punctuation in an `unquoted` section can compromise the safety of the
+entire shell command.
+
+##### Parameters
+
+- `value` any value (it will be treated as a string)
+
+##### Examples
+
+```javascript
+const both = true;
+sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar'
+```
+
+### Advanced API
+
+If these functions make life easier for you, go ahead and use them; they
+are just as well supported as the above. But if you aren't certain you
+need them, you probably don't.
+
+
+#### quoteForShell
+
+Quotes a string for injecting into a shell command.
+
+This function is exposed for some hypothetical case when the `sh` DSL simply
+won't do; `sh` is expected to be the more convenient option almost always.
+Compare:
+
+```javascript
+console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join(''));
+console.log(sh`cmd ${args}`); // same as above
+
+console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join(''));
+console.log(sh`cmd "${args}"`); // same as above
+```
+
+Additionally, on Windows, `sh` checks the entire command string for pipes,
+which subtly change how arguments need to be quoted. If your commands may
+involve pipes, you are strongly encouraged to use `sh` and not try to roll
+your own with `quoteForShell`.
+
+##### Parameters
+
+- `text` **[String][2]** to be quoted
+- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string
+ is already safe. Defaults to `false`.
+- `platform` **[String][2]?** a value that `process.platform` might take:
+ `'win32'`, `'linux'`, etc.; determines how the string is to be formatted.
+ When omitted, effectively the same as `process.platform`.
+
+Returns **[String][2]** a string that is safe for the current (or specified)
+platform.
+
+#### quoteForCmd
+
+A Windows-specific version of [quoteForShell][4].
+
+##### Parameters
+
+- `text` **[String][2]** to be quoted
+- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string
+ is already safe. Defaults to `false`.
+
+#### quoteForSh
+
+A Unix-specific version of [quoteForShell][4].
+
+##### Parameters
+
+- `text` **[String][2]** to be quoted
+- `forceQuote` **[Boolean][3]?** whether to always add quotes even if the string
+ is already safe. Defaults to `false`.
+
+#### ShellString
+
+A ShellString represents a shell command after it has been interpolated, but
+before it has been formatted for a particular platform. ShellStrings are
+useful if you want to prepare a command for a different platform than the
+current one, for instance.
+
+To create a ShellString, use `ShellString.sh` the same way you would use
+top-level `sh`.
+
+##### toString
+
+A method to format a ShellString into a regular String formatted for a
+particular platform.
+
+###### Parameters
+
+- `platform` **[String][2]?** a value that `process.platform` might take:
+ `'win32'`, `'linux'`, etc.; determines how the string is to be formatted.
+ When omitted, effectively the same as `process.platform`.
+
+Returns **[String][2]**
+
+##### sh
+
+`ShellString.sh` is a template tag just like `sh`; the only difference is
+that this function returns a ShellString which has not yet been formatted
+into a String.
+
+Returns **[ShellString][5]**
+
+### Secret API
+
+Some internals of string formatting have been exposed for the ambitious and
+brave souls who want to try to extend Puka to handle more shells or custom
+interpolated values. This ‘secret’ API is partially documented in the code
+but not here, and the semantic versioning guarantees on this API are bumped
+down by one level: in other words, minor version releases of Puka can change
+the secret API in backward-incompatible ways, and patch releases can add or
+deprecate functionality.
+
+If it's not even documented in the code, use at your own risk—no semver
+guarantees apply.
+
+
+[1]: #the-sh-dsl
+
+[2]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String
+
+[3]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean
+
+[4]: #quoteforshell
+
+[5]: #shellstring
+
+## The sh DSL
+
+### Syntax
+
+An `sh` template comprises words, separated by whitespace. Words can contain:
+
+- text, which is composed of any characters that are not whitespace, single or
+ double quotes, or any of the special characters
+ ``# $ & ( ) ; < > \ ` |``;
+- quotations, which are matching single or double quotes surrounding any
+ characters other than the delimiting quote; and
+- placeholders, using the standard JavaScript template syntax (`${}`).
+ (Placeholders may also appear inside quotations.)
+
+The special characters ``# $ & ( ) ; < > \ ` |``, if unquoted, form their own
+words.
+
+Redirect operators (`<`, `>`, `>>`, `2>`, etc.) receive their own special
+handling, as do semicolons. Other than these two exceptions, no attempt is made
+to understand any more sophisticated features of shell syntax.
+
+Standard JavaScript escape sequences, such as `\t`, are honored in the template
+literal, and are treated equivalently to the characters they represent. There
+is no further mechanism for escaping within the `sh` DSL itself; in particular,
+if you want to put quotes inside quotes, you have to use interpolation, like
+this:
+
+```javascript
+sh`echo "${'single = \', double = "'}"` // => "echo 'single = '\\'', double = \"'"
+```
+
+### Semantics
+
+Words that do not contain placeholders are emitted mostly verbatim to the
+output string. Quotations are formatted in the expected style for the target
+platform (single quotes for Unix, double quotes for Windows) regardless of the
+quotes used in the template literal—as with JavaScript, single and double quotes
+are interchangeable, except for the requirement to pair like with like. Unquoted
+semicolons are translated to ampersands on Windows; all other special characters
+(as enumerated above), when unquoted, are passed as-is to the output for the
+shell to interpret.
+
+Puka may still quote words not containing the above special characters, if they
+contain characters that need quoting on the target platform. For example, on
+Windows, the character `%` is used for variable interpolation in `cmd.exe`, and
+Puka quotes it on on that platform even if it appears unquoted in the template
+literal. Consequently, there is no need to be paranoid about quoting anything
+that doesn't look alphanumeric inside a `sh` template literal, for fear of being
+burned on a different operating system; anything that matches the definition of
+‘text’ above will never need manual quoting.
+
+#### Types of placeholders
+
+##### Strings
+
+If a word contains a string placeholder, then the value of the placeholder is
+interpolated into the word and the entire word, if necessary, is quoted. If
+the placeholder occurs within quotes, no further quoting is performed:
+
+```javascript
+sh`script --file="${'herp derp'}.txt"`; // => "script --file='herp derp.txt'"
+```
+
+This behavior can be exploited to force consistent quoting, if desired; but
+both of the examples below are safe on all platforms:
+
+```javascript
+const words = ['oneword', 'two words'];
+sh`minimal ${words[0]}`; // => "minimal oneword"
+sh`minimal ${words[1]}`; // => "minimal 'two words'"
+sh`consistent '${words[0]}'`; // => "consistent 'oneword'"
+sh`consistent '${words[1]}'`; // => "consistent 'two words'"
+```
+
+##### Arrays and iterables
+
+If a word contains a placeholder for an array (or other iterable object), then
+the entire word is repeated once for each value in the array, separated by
+spaces. If the array is empty, then the word is not emitted at all, and neither
+is any leading whitespace.
+
+```javascript
+const files = ['foo', 'bar'];
+sh`script ${files}`; // => "script foo bar"
+sh`script --file=${files}`; // => "script --file=foo --file=bar"
+sh`script --file=${[]}`; // => "script"
+```
+
+Note that, since special characters are their own words, the pipe operator here
+is not repeated:
+
+```javascript
+const cmd = ['script', 'foo', 'bar'];
+sh`${cmd}|another-script`; // => "script foo bar|another-script"
+```
+
+Multiple arrays in the same word generate a Cartesian product:
+
+```javascript
+const names = ['foo', 'bar'], exts = ['log', 'txt'];
+// Same word
+sh`... ${names}.${exts}`; // => "... foo.log foo.txt bar.log bar.txt"
+sh`... "${names} ${exts}"`; // => "... 'foo log' 'foo txt' 'bar log' 'bar txt'"
+
+// Not the same word (extra space just for emphasis):
+sh`... ${names} ${exts}`; // => "... foo bar log txt"
+sh`... ${names};${exts}`; // => "... foo bar;log txt"
+```
+
+Finally, if a placeholder appears in the object of a redirect operator, the
+entire redirect is repeated as necessary:
+
+```javascript
+sh`script > ${['foo', 'bar']}.txt`; // => "script > foo.txt > bar.txt"
+sh`script > ${[]}.txt`; // => "script"
+```
+
+##### unquoted
+
+The `unquoted` function returns a value that will skip being quoted when used
+in a placeholder, alone or in an array.
+
+```javascript
+const cmd = 'script < input.txt';
+const fields = ['foo', 'bar'];
+sh`${unquoted(cmd)} | json ${fields}`; // => "script < input.txt | json foo bar"
+```
+
+##### ShellString
+
+If `ShellString.sh` is used to construct an unformatted ShellString, that value
+can be used in a placeholder to insert the contents of the ShellString into the
+outer template literal. This is safer than using `unquoted` as in the previous
+example, but `unquoted` can be used when all you have is a string from another
+(trusted!) source.
+
+```javascript
+const url = 'http://example.com/data.json?x=1&y=2';
+const curl = ShellString.sh`curl -L ${url}`;
+const fields = ['foo', 'bar'];
+sh`${curl} | json ${fields}`; // => "curl -L 'http://example.com/data.json?x=1&y=2' | json foo bar"
+```
+
+##### Anything else
+
+... is treated like a string—namely, a value `x` is equivalent to `'' + x`, if
+not in one of the above categories.
diff --git a/deps/npm/node_modules/puka/index.js b/deps/npm/node_modules/puka/index.js
new file mode 100644
index 0000000000..b69e47d763
--- /dev/null
+++ b/deps/npm/node_modules/puka/index.js
@@ -0,0 +1,804 @@
+'use strict';
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+/**
+ * Key a method on your object with this symbol and you can get special
+ * formatting for that value! See ShellStringText, ShellStringUnquoted, or
+ * shellStringSemicolon for examples.
+ * @ignore
+ */
+const formatSymbol = Symbol('format');
+/**
+ * This symbol is for implementing advanced behaviors like the need for extra
+ * carets in Windows shell strings that use pipes. If present, it's called in
+ * an earlier phase than formatSymbol, and is passed a mutable context that can
+ * be read during the format phase to influence formatting.
+ * @ignore
+ */
+const preformatSymbol = Symbol('preformat');
+
+// When minimum Node version becomes 6, replace calls to sticky with /.../y and
+// inline execFrom.
+let stickySupported = true;
+try {
+ new RegExp('', 'y');
+} catch (e) {
+ stickySupported = false;
+}
+const sticky = stickySupported ? source => new RegExp(source, 'y') : source => new RegExp(`^(?:${source})`);
+const execFrom = stickySupported ? (re, haystack, index) => (re.lastIndex = index, re.exec(haystack)) : (re, haystack, index) => re.exec(haystack.substr(index));
+
+function quoteForCmd(text, forceQuote) {
+ let caretDepth = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
+ // See the below blog post for an explanation of this function and
+ // quoteForWin32:
+ // https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+ if (!text.length) {
+ return '""';
+ }
+ if (/[\n\r]/.test(text)) {
+ throw new Error("Line breaks can't be quoted on Windows");
+ }
+ const caretEscape = /["%]/.test(text);
+ text = quoteForWin32(text, forceQuote || !caretEscape && /[&()<>^|]/.test(text));
+ if (caretEscape) {
+ // See Win32Context for explanation of what caretDepth is for.
+ do {
+ text = text.replace(/[\t "%&()<>^|]/g, '^$&');
+ } while (caretDepth--);
+ }
+ return text;
+}
+const quoteForWin32 = (text, forceQuote) => forceQuote || /[\t "]/.test(text) ? `"${text.replace(/\\+(?=$|")/g, '$&$&').replace(/"/g, '\\"')}"` : text;
+const cmdMetaChars = /[\t\n\r "%&()<>^|]/;
+class Win32Context {
+ constructor() {
+ this.currentScope = newScope(null);
+ this.scopesByObject = new Map();
+ this.argDetectState = 0;
+ this.argSet = new Set();
+ }
+ read(text) {
+ // When cmd.exe executes a batch file, or pipes to or from one, it spawns a
+ // second copy of itself to run the inner command. This necessitates
+ // doubling up on carets so that escaped characters survive both cmd.exe
+ // invocations. See:
+ // https://stackoverflow.com/questions/8192318/why-does-delayed-expansion-fail-when-inside-a-piped-block-of-code#8194279
+ // https://ss64.com/nt/syntax-redirection.html
+ //
+ // Parentheses can create an additional subshell, requiring additional
+ // escaping... it's a mess.
+ //
+ // So here's what we do about it: we read all unquoted text in a shell
+ // string and put it through this tiny parser that looks for pipes,
+ // sequence operators (&, &&, ||), redirects, and parentheses. This can't
+ // be part of the main Puka parsing, because it can be affected by
+ // `unquoted(...)` values provided at evaluation time.
+ //
+ // Then, after associating each thing that needs to be quoted with a scope
+ // (via `mark()`), and identifying whether or not it's an argument to a
+ // command, we can determine the depth of caret escaping required in each
+ // scope and pass it (via `Formatter::quote()`) to `quoteForCmd()`.
+ //
+ // See also `ShellStringText`, which holds the logic for the previous
+ // paragraph.
+ const length = text.length;
+ for (let pos = 0, match; pos < length;) {
+ while (match = execFrom(reUnimportant, text, pos)) {
+ if (match[2] == null) {
+ // (not whitespace)
+ if (match[1] != null) {
+ // (>&)
+ this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0;
+ } else if (this.argDetectState !== ADS_FLAG_ARGS) {
+ this.argDetectState |= ADS_FLAG_WORD;
+ }
+ } else {
+ // (whitespace)
+ if ((this.argDetectState & ADS_FLAG_WORD) !== 0) {
+ this.argDetectState = ADS_FLAG_ARGS & ~this.argDetectState >> 1;
+ }
+ }
+ pos += match[0].length;
+ }
+ if (pos >= length) break;
+ if (match = execFrom(reSeqOp, text, pos)) {
+ this.seq();
+ pos += match[0].length;
+ } else {
+ const char = text.charCodeAt(pos);
+ if (char === CARET) {
+ pos += 2;
+ } else if (char === QUOTE) {
+ // If you were foolish enough to leave a dangling quotation mark in
+ // an unquoted span... you're likely to have bigger problems than
+ // incorrect escaping. So we just do the simplest thing of looking for
+ // the end quote only in this piece of text.
+ pos += execFrom(reNotQuote, text, pos + 1)[0].length + 2;
+ } else {
+ if (char === OPEN_PAREN) {
+ this.enterScope();
+ } else if (char === CLOSE_PAREN) {
+ this.exitScope();
+ } else if (char === PIPE) {
+ this.pipe();
+ } else {
+ // (char === '<' or '>')
+ this.argDetectState = this.argDetectState === 0 ? ADS_FLAG_INITIAL_REDIRECT : 0;
+ }
+ pos++;
+ }
+ }
+ }
+ }
+ enterScope() {
+ this.currentScope = newScope(this.currentScope);
+ this.argDetectState = 0;
+ }
+ exitScope() {
+ this.currentScope = this.currentScope.parent || (this.currentScope.parent = newScope(null));
+ this.argDetectState = ADS_FLAG_ARGS;
+ }
+ seq() {
+ // | binds tighter than sequence operators, so the latter create new sibling
+ // scopes for future |s to mutate.
+ this.currentScope = newScope(this.currentScope.parent);
+ this.argDetectState = 0;
+ }
+ pipe() {
+ this.currentScope.depthDelta = 1;
+ this.argDetectState = 0;
+ }
+ mark(obj) {
+ this.scopesByObject.set(obj, this.currentScope);
+ if (this.argDetectState === ADS_FLAG_ARGS) {
+ this.argSet.add(obj);
+ } else {
+ this.argDetectState |= ADS_FLAG_WORD;
+ }
+ }
+ at(obj) {
+ const scope = this.scopesByObject.get(obj);
+ return {
+ depth: getDepth(scope),
+ isArgument: this.argSet.has(obj),
+ isNative: scope.isNative
+ };
+ }
+}
+// These flags span the Win32Context's argument detection state machine. WORD
+// is set when the context is inside a word that is not an argument (meaning it
+// is either the first word in the command, or it is the object of a redirect).
+// ARGS is set when the context has reached the arguments of a command.
+// INITIAL_REDIRECT tracks the edge case when a redirect occurs before the
+// first word of the command (if this flag is set, reaching the end of a word
+// should take the state machine back to 0 instead of setting ADS_FLAG_ARGS).
+const ADS_FLAG_WORD = 0x1;
+const ADS_FLAG_ARGS = 0x2;
+const ADS_FLAG_INITIAL_REDIRECT = 0x4;
+const getDepth = scope => scope === null ? 0 : scope.depth !== -1 ? scope.depth : scope.depth = getDepth(scope.parent) + scope.depthDelta;
+const newScope = parent => ({
+ parent,
+ depthDelta: 0,
+ depth: -1,
+ isNative: false
+});
+const CARET = '^'.charCodeAt();
+const QUOTE = '"'.charCodeAt();
+const OPEN_PAREN = '('.charCodeAt();
+const CLOSE_PAREN = ')'.charCodeAt();
+const PIPE = '|'.charCodeAt();
+const reNotQuote = sticky('[^"]*');
+const reSeqOp = sticky('&&?|\\|\\|');
+const reUnimportant = sticky('(\\d*>&)|[^\\s"$&()<>^|]+|(\\s+)');
+
+const quoteForSh = (text, forceQuote) => text.length ? forceQuote || shMetaChars.test(text) ? `'${text.replace(/'/g, "'\\''")}'`.replace(/^(?:'')+(?!$)/, '').replace(/\\'''/g, "\\'") : text : "''";
+const shMetaChars = /[\t\n\r "#$&'()*;<>?\\`|~]/;
+
+/**
+ * To get a Formatter, call `Formatter.for`.
+ *
+ * To create a new Formatter, pass an object to `Formatter.declare`.
+ *
+ * To set the global default Formatter, assign to `Formatter.default`.
+ *
+ * @class
+ * @property {Formatter} default - The Formatter to be used when no platform
+ * is provided—for example, when creating strings with `sh`.
+ * @ignore
+ */
+function Formatter() {}
+Object.assign(Formatter,
+/** @lends Formatter */
+{
+ /**
+ * Gets a Formatter that has been declared for the provided platform, or
+ * the base `'sh'` formatter if there is no Formatter specific to this
+ * platform, or the Formatter for the current platform if no specific platform
+ * is provided.
+ */
+ for(platform) {
+ return platform == null ? Formatter.default || (Formatter.default = Formatter.for(process.platform)) : Formatter._registry.get(platform) || Formatter._registry.get('sh');
+ },
+ /**
+ * Creates a new Formatter or mutates the properties on an existing
+ * Formatter. The `platform` key on the provided properties object determines
+ * when the Formatter is retrieved.
+ */
+ declare(props) {
+ const platform = props && props.platform || 'sh';
+ const existingFormatter = Formatter._registry.get(platform);
+ const formatter = Object.assign(existingFormatter || new Formatter(), props);
+ formatter.emptyString === void 0 && (formatter.emptyString = formatter.quote('', true));
+ existingFormatter || Formatter._registry.set(formatter.platform, formatter);
+ },
+ _registry: new Map(),
+ prototype: {
+ platform: 'sh',
+ quote: quoteForSh,
+ metaChars: shMetaChars,
+ hasExtraMetaChars: false,
+ statementSeparator: ';',
+ createContext() {
+ return defaultContext;
+ }
+ }
+});
+const defaultContext = {
+ at() {}
+};
+Formatter.declare();
+Formatter.declare({
+ platform: 'win32',
+ quote(text, forceQuote, opts) {
+ const caretDepth = opts ? (opts.depth || 0) + (opts.isArgument && !opts.isNative ? 1 : 0) : 0;
+ return quoteForCmd(text, forceQuote, caretDepth);
+ },
+ metaChars: cmdMetaChars,
+ hasExtraMetaChars: true,
+ statementSeparator: '&',
+ createContext(root) {
+ const context = new this.Context();
+ root[preformatSymbol](context);
+ return context;
+ },
+ Context: Win32Context
+});
+
+const isObject = any => any === Object(any);
+function memoize(f) {
+ const cache = new WeakMap();
+ return arg => {
+ let result = cache.get(arg);
+ if (result === void 0) {
+ result = f(arg);
+ cache.set(arg, result);
+ }
+ return result;
+ };
+}
+
+/**
+ * Represents a contiguous span of text that may or must be quoted. The contents
+ * may already contain quoted segments, which will always be quoted. If unquoted
+ * segments also require quoting, the entire span will be quoted together.
+ * @ignore
+ */
+class ShellStringText {
+ constructor(contents, untested) {
+ this.contents = contents;
+ this.untested = untested;
+ }
+ [formatSymbol](formatter, context) {
+ const unformattedContents = this.contents;
+ const length = unformattedContents.length;
+ const contents = new Array(length);
+ for (let i = 0; i < length; i++) {
+ const c = unformattedContents[i];
+ contents[i] = isObject(c) && formatSymbol in c ? c[formatSymbol](formatter) : c;
+ }
+ for (let unquoted = true, i = 0; i < length; i++) {
+ const content = contents[i];
+ if (content === null) {
+ unquoted = !unquoted;
+ } else {
+ if (unquoted && (formatter.hasExtraMetaChars || this.untested && this.untested.has(i)) && formatter.metaChars.test(content)) {
+ return formatter.quote(contents.join(''), false, context.at(this));
+ }
+ }
+ }
+ const parts = [];
+ for (let quoted = null, i = 0; i < length; i++) {
+ const content = contents[i];
+ if (content === null) {
+ quoted = quoted ? (parts.push(formatter.quote(quoted.join(''), true, context.at(this))), null) : [];
+ } else {
+ (quoted || parts).push(content);
+ }
+ }
+ const result = parts.join('');
+ return result.length ? result : formatter.emptyString;
+ }
+ [preformatSymbol](context) {
+ context.mark(this);
+ }
+}
+
+/**
+ * Represents a contiguous span of text that will not be quoted.
+ * @ignore
+ */
+class ShellStringUnquoted {
+ constructor(value) {
+ this.value = value;
+ }
+ [formatSymbol]() {
+ return this.value;
+ }
+ [preformatSymbol](context) {
+ context.read(this.value);
+ }
+}
+
+/**
+ * Represents a semicolon... or an ampersand, on Windows.
+ * @ignore
+ */
+const shellStringSemicolon = {
+ [formatSymbol](formatter) {
+ return formatter.statementSeparator;
+ },
+ [preformatSymbol](context) {
+ context.seq();
+ }
+};
+
+const PLACEHOLDER = {};
+const parse = memoize(templateSpans => {
+ // These are the token types our DSL can recognize. Their values won't escape
+ // this function.
+ const TOKEN_TEXT = 0;
+ const TOKEN_QUOTE = 1;
+ const TOKEN_SEMI = 2;
+ const TOKEN_UNQUOTED = 3;
+ const TOKEN_SPACE = 4;
+ const TOKEN_REDIRECT = 5;
+ const result = [];
+ let placeholderCount = 0;
+ let prefix = null;
+ let onlyPrefixOnce = false;
+ let contents = [];
+ let quote = 0;
+ const lastSpan = templateSpans.length - 1;
+ for (let spanIndex = 0; spanIndex <= lastSpan; spanIndex++) {
+ const templateSpan = templateSpans[spanIndex];
+ const posEnd = templateSpan.length;
+ let tokenStart = 0;
+ if (spanIndex) {
+ placeholderCount++;
+ contents.push(PLACEHOLDER);
+ }
+ // For each span, we first do a recognizing pass in which we use regular
+ // expressions to identify the positions of tokens in the text, and then
+ // a second pass that actually splits the text into the minimum number of
+ // substrings necessary.
+ const recognized = []; // [type1, index1, type2, index2...]
+ let firstWordBreak = -1;
+ let lastWordBreak = -1;
+ {
+ let pos = 0,
+ match;
+ while (pos < posEnd) {
+ if (quote) {
+ if (match = execFrom(quote === CHAR_SQUO ? reQuotation1 : reQuotation2, templateSpan, pos)) {
+ recognized.push(TOKEN_TEXT, pos);
+ pos += match[0].length;
+ }
+ if (pos < posEnd) {
+ recognized.push(TOKEN_QUOTE, pos++);
+ quote = 0;
+ }
+ } else {
+ if (match = execFrom(reRedirectOrSpace, templateSpan, pos)) {
+ firstWordBreak < 0 && (firstWordBreak = pos);
+ lastWordBreak = pos;
+ recognized.push(match[1] ? TOKEN_REDIRECT : TOKEN_SPACE, pos);
+ pos += match[0].length;
+ }
+ if (match = execFrom(reText, templateSpan, pos)) {
+ const setBreaks = match[1] != null;
+ setBreaks && firstWordBreak < 0 && (firstWordBreak = pos);
+ recognized.push(setBreaks ? TOKEN_UNQUOTED : TOKEN_TEXT, pos);
+ pos += match[0].length;
+ setBreaks && (lastWordBreak = pos);
+ }
+ const char = templateSpan.charCodeAt(pos);
+ if (char === CHAR_SEMI) {
+ firstWordBreak < 0 && (firstWordBreak = pos);
+ recognized.push(TOKEN_SEMI, pos++);
+ lastWordBreak = pos;
+ } else if (char === CHAR_SQUO || char === CHAR_DQUO) {
+ recognized.push(TOKEN_QUOTE, pos++);
+ quote = char;
+ }
+ }
+ }
+ }
+ // Word breaks are only important if they separate words with placeholders,
+ // so we can ignore the first/last break if this is the first/last span.
+ spanIndex === 0 && (firstWordBreak = -1);
+ spanIndex === lastSpan && (lastWordBreak = posEnd);
+ // Here begins the second pass mentioned above. This loop runs one more
+ // iteration than there are tokens in recognized, because it handles tokens
+ // on a one-iteration delay; hence the i <= iEnd instead of i < iEnd.
+ const iEnd = recognized.length;
+ for (let i = 0, type = -1; i <= iEnd; i += 2) {
+ let typeNext = -1,
+ pos;
+ if (i === iEnd) {
+ pos = posEnd;
+ } else {
+ typeNext = recognized[i];
+ pos = recognized[i + 1];
+ // If the next token is space or redirect, but there's another word
+ // break in this span, then we can handle that token the same way we
+ // would handle unquoted text because it isn't being attached to a
+ // placeholder.
+ typeNext >= TOKEN_SPACE && pos !== lastWordBreak && (typeNext = TOKEN_UNQUOTED);
+ }
+ const breakHere = pos === firstWordBreak || pos === lastWordBreak;
+ if (pos && (breakHere || typeNext !== type)) {
+ let value = type === TOKEN_QUOTE ? null : type === TOKEN_SEMI ? shellStringSemicolon : templateSpan.substring(tokenStart, pos);
+ if (type >= TOKEN_SEMI) {
+ // This branch handles semicolons, unquoted text, spaces, and
+ // redirects. shellStringSemicolon is already a formatSymbol object;
+ // the rest need to be wrapped.
+ type === TOKEN_SEMI || (value = new ShellStringUnquoted(value));
+ // We don't need to check placeholderCount here like we do below;
+ // that's only relevant during the first word break of the span, and
+ // because this iteration of the loop is processing the token that
+ // was checked for breaks in the previous iteration, it will have
+ // already been handled. For the same reason, prefix is guaranteed to
+ // be null.
+ if (contents.length) {
+ result.push(new ShellStringText(contents, null));
+ contents = [];
+ }
+ // Only spaces and redirects become prefixes, but not if they've been
+ // rewritten to unquoted above.
+ if (type >= TOKEN_SPACE) {
+ prefix = value;
+ onlyPrefixOnce = type === TOKEN_SPACE;
+ } else {
+ result.push(value);
+ }
+ } else {
+ contents.push(value);
+ }
+ tokenStart = pos;
+ }
+ if (breakHere) {
+ if (placeholderCount) {
+ result.push({
+ contents,
+ placeholderCount,
+ prefix,
+ onlyPrefixOnce
+ });
+ } else {
+ // There's no prefix to handle in this branch; a prefix prior to this
+ // span would mean placeholderCount > 0, and a prefix in this span
+ // can't be created because spaces and redirects get rewritten to
+ // unquoted before the last word break.
+ contents.length && result.push(new ShellStringText(contents, null));
+ }
+ placeholderCount = 0;
+ prefix = null;
+ onlyPrefixOnce = false;
+ contents = [];
+ }
+ type = typeNext;
+ }
+ }
+ if (quote) {
+ throw new SyntaxError(`String is missing a ${String.fromCharCode(quote)} character`);
+ }
+ return result;
+});
+const CHAR_SEMI = ';'.charCodeAt();
+const CHAR_SQUO = "'".charCodeAt();
+const CHAR_DQUO = '"'.charCodeAt();
+const reQuotation1 = sticky("[^']+");
+const reQuotation2 = sticky('[^"]+');
+const reText = sticky('[^\\s"#$&\'();<>\\\\`|]+|([#$&()\\\\`|]+)');
+const reRedirectOrSpace = sticky('(\\s*\\d*[<>]+\\s*)|\\s+');
+
+class BitSet {
+ constructor() {
+ this.vector = new Int32Array(1);
+ }
+ has(n) {
+ return (this.vector[n >>> 5] & 1 << n) !== 0;
+ }
+ add(n) {
+ const i = n >>> 5,
+ requiredLength = i + 1;
+ let vector = this.vector,
+ _vector = vector,
+ length = _vector.length;
+ if (requiredLength > length) {
+ while (requiredLength > (length *= 2));
+ const oldValues = vector;
+ vector = new Int32Array(length);
+ vector.set(oldValues);
+ this.vector = vector;
+ }
+ vector[i] |= 1 << n;
+ }
+}
+
+function evaluate(template, values) {
+ values = values.map(toStringishArray);
+ const children = [];
+ let valuesStart = 0;
+ for (let i = 0, iMax = template.length; i < iMax; i++) {
+ const word = template[i];
+ if (formatSymbol in word) {
+ children.push(word);
+ continue;
+ }
+ const contents = word.contents,
+ placeholderCount = word.placeholderCount,
+ prefix = word.prefix,
+ onlyPrefixOnce = word.onlyPrefixOnce;
+ const kMax = contents.length;
+ const valuesEnd = valuesStart + placeholderCount;
+ const tuples = cartesianProduct(values, valuesStart, valuesEnd);
+ valuesStart = valuesEnd;
+ for (let j = 0, jMax = tuples.length; j < jMax; j++) {
+ const needSpace = j > 0;
+ const tuple = tuples[j];
+ (needSpace || prefix) && children.push(needSpace && (onlyPrefixOnce || !prefix) ? unquotedSpace : prefix);
+ let interpolatedContents = [];
+ let untested = null;
+ let quoting = false;
+ let tupleIndex = 0;
+ for (let k = 0; k < kMax; k++) {
+ const content = contents[k];
+ if (content === PLACEHOLDER) {
+ const value = tuple[tupleIndex++];
+ if (quoting) {
+ interpolatedContents.push(value);
+ } else {
+ if (isObject(value) && formatSymbol in value) {
+ if (interpolatedContents.length) {
+ children.push(new ShellStringText(interpolatedContents, untested));
+ interpolatedContents = [];
+ untested = null;
+ }
+ children.push(value);
+ } else {
+ (untested || (untested = new BitSet())).add(interpolatedContents.length);
+ interpolatedContents.push(value);
+ }
+ }
+ } else {
+ interpolatedContents.push(content);
+ content === null && (quoting = !quoting);
+ }
+ }
+ if (interpolatedContents.length) {
+ children.push(new ShellStringText(interpolatedContents, untested));
+ }
+ }
+ }
+ return children;
+}
+const primToStringish = value => value == null ? '' + value : value;
+function toStringishArray(value) {
+ let array;
+ switch (true) {
+ default:
+ if (isObject(value)) {
+ if (Array.isArray(value)) {
+ array = value;
+ break;
+ }
+ if (Symbol.iterator in value) {
+ array = Array.from(value);
+ break;
+ }
+ }
+ array = [value];
+ }
+ return array.map(primToStringish);
+}
+function cartesianProduct(arrs, start, end) {
+ const size = end - start;
+ let resultLength = 1;
+ for (let i = start; i < end; i++) {
+ resultLength *= arrs[i].length;
+ }
+ if (resultLength > 1e6) {
+ throw new RangeError("Far too many elements to interpolate");
+ }
+ const result = new Array(resultLength);
+ const indices = new Array(size).fill(0);
+ for (let i = 0; i < resultLength; i++) {
+ const value = result[i] = new Array(size);
+ for (let j = 0; j < size; j++) {
+ value[j] = arrs[j + start][indices[j]];
+ }
+ for (let j = size - 1; j >= 0; j--) {
+ if (++indices[j] < arrs[j + start].length) break;
+ indices[j] = 0;
+ }
+ }
+ return result;
+}
+const unquotedSpace = new ShellStringUnquoted(' ');
+
+/**
+ * A ShellString represents a shell command after it has been interpolated, but
+ * before it has been formatted for a particular platform. ShellStrings are
+ * useful if you want to prepare a command for a different platform than the
+ * current one, for instance.
+ *
+ * To create a ShellString, use `ShellString.sh` the same way you would use
+ * top-level `sh`.
+ */
+class ShellString {
+ /** @hideconstructor */
+ constructor(children) {
+ this.children = children;
+ }
+ /**
+ * `ShellString.sh` is a template tag just like `sh`; the only difference is
+ * that this function returns a ShellString which has not yet been formatted
+ * into a String.
+ * @returns {ShellString}
+ * @function sh
+ * @static
+ * @memberof ShellString
+ */
+ static sh(templateSpans) {
+ for (var _len = arguments.length, values = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+ values[_key - 1] = arguments[_key];
+ }
+ return new ShellString(evaluate(parse(templateSpans), values));
+ }
+ /**
+ * A method to format a ShellString into a regular String formatted for a
+ * particular platform.
+ *
+ * @param {String} [platform] a value that `process.platform` might take:
+ * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted.
+ * When omitted, effectively the same as `process.platform`.
+ * @returns {String}
+ */
+ toString(platform) {
+ return this[formatSymbol](Formatter.for(platform));
+ }
+ [formatSymbol](formatter) {
+ let context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : formatter.createContext(this);
+ return this.children.map(child => child[formatSymbol](formatter, context)).join('');
+ }
+ [preformatSymbol](context) {
+ const children = this.children;
+ for (let i = 0, iMax = children.length; i < iMax; i++) {
+ const child = children[i];
+ if (preformatSymbol in child) {
+ child[preformatSymbol](context);
+ }
+ }
+ }
+}
+
+/**
+ * A Windows-specific version of {@link quoteForShell}.
+ * @param {String} text to be quoted
+ * @param {Boolean} [forceQuote] whether to always add quotes even if the string
+ * is already safe. Defaults to `false`.
+ */
+
+/**
+ * A Unix-specific version of {@link quoteForShell}.
+ * @param {String} text to be quoted
+ * @param {Boolean} [forceQuote] whether to always add quotes even if the string
+ * is already safe. Defaults to `false`.
+ */
+
+/**
+ * Quotes a string for injecting into a shell command.
+ *
+ * This function is exposed for some hypothetical case when the `sh` DSL simply
+ * won't do; `sh` is expected to be the more convenient option almost always.
+ * Compare:
+ *
+ * ```javascript
+ * console.log('cmd' + args.map(a => ' ' + quoteForShell(a)).join(''));
+ * console.log(sh`cmd ${args}`); // same as above
+ *
+ * console.log('cmd' + args.map(a => ' ' + quoteForShell(a, true)).join(''));
+ * console.log(sh`cmd "${args}"`); // same as above
+ * ```
+ *
+ * Additionally, on Windows, `sh` checks the entire command string for pipes,
+ * which subtly change how arguments need to be quoted. If your commands may
+ * involve pipes, you are strongly encouraged to use `sh` and not try to roll
+ * your own with `quoteForShell`.
+ *
+ * @param {String} text to be quoted
+ * @param {Boolean} [forceQuote] whether to always add quotes even if the string
+ * is already safe. Defaults to `false`.
+ * @param {String} [platform] a value that `process.platform` might take:
+ * `'win32'`, `'linux'`, etc.; determines how the string is to be formatted.
+ * When omitted, effectively the same as `process.platform`.
+ *
+ * @returns {String} a string that is safe for the current (or specified)
+ * platform.
+ */
+function quoteForShell(text, forceQuote, platform) {
+ return Formatter.for(platform).quote(text, forceQuote);
+}
+
+/**
+ * A string template tag for safely constructing cross-platform shell commands.
+ *
+ * An `sh` template is not actually treated as a literal string to be
+ * interpolated; instead, it is a tiny DSL designed to make working with shell
+ * strings safe, simple, and straightforward. To get started quickly, see the
+ * examples below. {@link #the-sh-dsl More detailed documentation} is available
+ * further down.
+ *
+ * @name sh
+ * @example
+ * const title = '"this" & "that"';
+ * sh`script --title=${title}`; // => "script '--title=\"this\" & \"that\"'"
+ * // Note: these examples show results for non-Windows platforms.
+ * // On Windows, the above would instead be
+ * // 'script ^^^"--title=\\^^^"this\\^^^" ^^^& \\^^^"that\\^^^"^^^"'.
+ *
+ * const names = ['file1', 'file 2'];
+ * sh`rimraf ${names}.txt`; // => "rimraf file1.txt 'file 2.txt'"
+ *
+ * const cmd1 = ['cat', 'file 1.txt', 'file 2.txt'];
+ * const cmd2 = ['use-input', '-abc'];
+ * sh`${cmd1}|${cmd2}`; // => "cat 'file 1.txt' 'file 2.txt'|use-input -abc"
+ *
+ * @returns {String} - a string formatted for the platform Node is currently
+ * running on.
+ */
+const sh = function () {
+ return ShellString.sh.apply(ShellString, arguments).toString();
+};
+
+/**
+ * This function permits raw strings to be interpolated into a `sh` template.
+ *
+ * **IMPORTANT**: If you're using Puka due to security concerns, make sure you
+ * don't pass any untrusted content to `unquoted`. This may be obvious, but
+ * stray punctuation in an `unquoted` section can compromise the safety of the
+ * entire shell command.
+ *
+ * @param value - any value (it will be treated as a string)
+ *
+ * @example
+ * const both = true;
+ * sh`foo ${unquoted(both ? '&&' : '||')} bar`; // => 'foo && bar'
+ */
+const unquoted = value => new ShellStringUnquoted(value);
+
+exports.Formatter = Formatter;
+exports.ShellString = ShellString;
+exports.ShellStringText = ShellStringText;
+exports.ShellStringUnquoted = ShellStringUnquoted;
+exports.quoteForCmd = quoteForCmd;
+exports.quoteForSh = quoteForSh;
+exports.quoteForShell = quoteForShell;
+exports.sh = sh;
+exports.shellStringSemicolon = shellStringSemicolon;
+exports.formatSymbol = formatSymbol;
+exports.preformatSymbol = preformatSymbol;
+exports.unquoted = unquoted;
diff --git a/deps/npm/node_modules/puka/package.json b/deps/npm/node_modules/puka/package.json
new file mode 100644
index 0000000000..41798dc249
--- /dev/null
+++ b/deps/npm/node_modules/puka/package.json
@@ -0,0 +1,38 @@
+{
+ "name": "puka",
+ "version": "1.0.1",
+ "description": "A cross-platform library for safely passing strings through shells",
+ "keywords": [
+ "args",
+ "arguments",
+ "cmd",
+ "command",
+ "command-line",
+ "cross-platform",
+ "escape",
+ "escaping",
+ "exec",
+ "linux",
+ "mac",
+ "macos",
+ "osx",
+ "quote",
+ "quoting",
+ "sh",
+ "shell",
+ "spawn",
+ "unix",
+ "win",
+ "win32",
+ "windows"
+ ],
+ "homepage": "https://gitlab.com/rhendric/puka",
+ "bugs": "https://gitlab.com/rhendric/puka/issues",
+ "license": "MIT",
+ "author": "Ryan Hendrickson <ryan.hendrickson@alum.mit.edu>",
+ "repository": "gitlab:rhendric/puka",
+ "dependencies": {},
+ "engines": {
+ "node": ">=4"
+ }
+} \ No newline at end of file
diff --git a/deps/npm/package.json b/deps/npm/package.json
index 5dfcd6807f..aa6e5a7d34 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "7.0.11",
+ "version": "7.0.12",
"name": "npm",
"description": "a package manager for JavaScript",
"keywords": [
@@ -45,7 +45,7 @@
"@npmcli/arborist": "^1.0.11",
"@npmcli/ci-detect": "^1.2.0",
"@npmcli/config": "^1.2.1",
- "@npmcli/run-script": "^1.7.5",
+ "@npmcli/run-script": "^1.8.0",
"abbrev": "~1.1.1",
"ansicolors": "~0.3.2",
"ansistyles": "~0.1.3",
@@ -189,7 +189,7 @@
"jsdom": "^16.4.0",
"marked-man": "^0.7.0",
"require-inject": "^1.4.4",
- "tap": "^14.10.8",
+ "tap": "^14.11.0",
"yaml": "^1.10.0"
},
"scripts": {
@@ -205,9 +205,11 @@
"posttest": "npm run lint",
"eslint": "eslint",
"lint": "npm run eslint -- \"lib/**/*.js\"",
+ "linttest": "npm run eslint -- test/lib test/bin --fix",
"lintfix": "npm run lint -- --fix",
"prelint": "rimraf test/npm_cache*",
- "resetdeps": "bash scripts/resetdeps.sh"
+ "resetdeps": "bash scripts/resetdeps.sh",
+ "prepublishOnly": "npm run lint && npm run linttest"
},
"//": [
"XXX temporarily only run unit tests while v7 beta is in progress",
diff --git a/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js b/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js
new file mode 100644
index 0000000000..25015aab65
--- /dev/null
+++ b/deps/npm/tap-snapshots/test-lib-init.js-TAP.test.js
@@ -0,0 +1,19 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports[`test/lib/init.js TAP classic npm init no args > should print helper info 1`] = `
+This utility will walk you through creating a package.json file.
+It only covers the most common items, and tries to guess sensible defaults.
+
+See \`npm help init\` for definitive documentation on these fields
+and exactly what they do.
+
+Use \`npm install <pkg>\` afterwards to install a package and
+save it as a dependency in the package.json file.
+
+Press ^C at any time to quit.
+`
diff --git a/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js b/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js
new file mode 100644
index 0000000000..a82905a399
--- /dev/null
+++ b/deps/npm/tap-snapshots/test-lib-utils-reify-finish.js-TAP.test.js
@@ -0,0 +1,15 @@
+/* IMPORTANT
+ * This snapshot file is auto-generated, but designed for humans.
+ * It should be checked into source control and tracked carefully.
+ * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
+ * Make sure to inspect the output below. Do not ignore changes!
+ */
+'use strict'
+exports[`test/lib/utils/reify-finish.js TAP should write if everything above passes > written config 1`] = `
+hasBuiltinConfig=true
+x=y
+
+[nested]
+foo=bar
+
+`
diff --git a/deps/npm/test/bin/npm-cli.js b/deps/npm/test/bin/npm-cli.js
index b68d291858..bcca99c8c8 100644
--- a/deps/npm/test/bin/npm-cli.js
+++ b/deps/npm/test/bin/npm-cli.js
@@ -5,6 +5,6 @@ t.test('loading the bin calls the implementation', t => {
'../../lib/cli.js': proc => {
t.equal(proc, process, 'called implementation with process object')
t.end()
- }
+ },
})
})
diff --git a/deps/npm/test/bin/npx-cli.js b/deps/npm/test/bin/npx-cli.js
index fc85f63668..2b7b488297 100644
--- a/deps/npm/test/bin/npx-cli.js
+++ b/deps/npm/test/bin/npx-cli.js
@@ -64,7 +64,7 @@ t.test('use a bunch of deprecated switches and options', t => {
'--shell-auto-fallback',
'--ignore-existing',
'-q',
- 'foobar'
+ 'foobar',
]
const expect = [
@@ -78,18 +78,18 @@ t.test('use a bunch of deprecated switches and options', t => {
'--loglevel',
'warn',
'--',
- 'foobar'
+ 'foobar',
]
requireInject(npx, { [cli]: () => {} })
t.strictSame(process.argv, expect)
t.strictSame(logs, [
- [ 'npx: the --npm argument has been removed.' ],
- [ 'npx: the --node-arg argument has been removed.' ],
- [ 'npx: the --n argument has been removed.' ],
- [ 'npx: the --always-spawn argument has been removed.' ],
- [ 'npx: the --shell-auto-fallback argument has been removed.' ],
- [ 'npx: the --ignore-existing argument has been removed.' ],
- [ 'See `npm help exec` for more information' ]
+ ['npx: the --npm argument has been removed.'],
+ ['npx: the --node-arg argument has been removed.'],
+ ['npx: the --n argument has been removed.'],
+ ['npx: the --always-spawn argument has been removed.'],
+ ['npx: the --shell-auto-fallback argument has been removed.'],
+ ['npx: the --ignore-existing argument has been removed.'],
+ ['See `npm help exec` for more information'],
])
t.end()
})
diff --git a/deps/npm/test/lib/access.js b/deps/npm/test/lib/access.js
index 5d5a910f94..3063b6c532 100644
--- a/deps/npm/test/lib/access.js
+++ b/deps/npm/test/lib/access.js
@@ -3,8 +3,8 @@ const requireInject = require('require-inject')
const emptyMock = requireInject('../../lib/access.js', {
'../../lib/npm.js': {
- flatOptions: {}
- }
+ flatOptions: {},
+ },
})
test('completion', t => {
@@ -27,7 +27,7 @@ test('completion', t => {
'ls-collaborators',
'edit',
'2fa-required',
- '2fa-not-required'
+ '2fa-not-required',
])
testComp(['npm', 'access', 'grant'], ['read-only', 'read-write'])
@@ -75,7 +75,7 @@ test('edit', (t) => {
access([
'edit',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -89,14 +89,14 @@ test('edit', (t) => {
test('access public on unscoped package', (t) => {
const prefix = t.testdir({
'package.json': JSON.stringify({
- name: 'npm-access-public-pkg'
- })
+ name: 'npm-access-public-pkg',
+ }),
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'public'
+ 'public',
], (err) => {
t.match(
err,
@@ -111,10 +111,10 @@ test('access public on scoped package', (t) => {
t.plan(4)
const name = '@scoped/npm-access-public-pkg'
const prefix = t.testdir({
- 'package.json': JSON.stringify({ name })
+ 'package.json': JSON.stringify({ name }),
})
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
public: (pkg, { registry }) => {
t.equal(pkg, name, 'should use pkg name ref')
t.equal(
@@ -123,17 +123,17 @@ test('access public on scoped package', (t) => {
'should forward correct options'
)
return true
- }
+ },
},
'../../lib/npm.js': {
flatOptions: {
- registry: 'https://registry.npmjs.org'
+ registry: 'https://registry.npmjs.org',
},
- prefix
- }
+ prefix,
+ },
})
access([
- 'public'
+ 'public',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access public on scoped package')
@@ -142,13 +142,13 @@ test('access public on scoped package', (t) => {
test('access public on missing package.json', (t) => {
const prefix = t.testdir({
- 'node_modules': {}
+ node_modules: {},
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'public'
+ 'public',
], (err) => {
t.match(
err,
@@ -162,13 +162,13 @@ test('access public on missing package.json', (t) => {
test('access public on invalid package.json', (t) => {
const prefix = t.testdir({
'package.json': '{\n',
- 'node_modules': {}
+ node_modules: {},
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'public'
+ 'public',
], (err) => {
t.match(
err,
@@ -182,14 +182,14 @@ test('access public on invalid package.json', (t) => {
test('access restricted on unscoped package', (t) => {
const prefix = t.testdir({
'package.json': JSON.stringify({
- name: 'npm-access-restricted-pkg'
- })
+ name: 'npm-access-restricted-pkg',
+ }),
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'restricted'
+ 'restricted',
], (err) => {
t.match(
err,
@@ -204,10 +204,10 @@ test('access restricted on scoped package', (t) => {
t.plan(4)
const name = '@scoped/npm-access-restricted-pkg'
const prefix = t.testdir({
- 'package.json': JSON.stringify({ name })
+ 'package.json': JSON.stringify({ name }),
})
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
restricted: (pkg, { registry }) => {
t.equal(pkg, name, 'should use pkg name ref')
t.equal(
@@ -216,17 +216,17 @@ test('access restricted on scoped package', (t) => {
'should forward correct options'
)
return true
- }
+ },
},
'../../lib/npm.js': {
flatOptions: {
- registry: 'https://registry.npmjs.org'
+ registry: 'https://registry.npmjs.org',
},
- prefix
- }
+ prefix,
+ },
})
access([
- 'restricted'
+ 'restricted',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access restricted on scoped package')
@@ -235,13 +235,13 @@ test('access restricted on scoped package', (t) => {
test('access restricted on missing package.json', (t) => {
const prefix = t.testdir({
- 'node_modules': {}
+ node_modules: {},
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'restricted'
+ 'restricted',
], (err) => {
t.match(
err,
@@ -255,13 +255,13 @@ test('access restricted on missing package.json', (t) => {
test('access restricted on invalid package.json', (t) => {
const prefix = t.testdir({
'package.json': '{\n',
- 'node_modules': {}
+ node_modules: {},
})
const access = requireInject('../../lib/access.js', {
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'restricted'
+ 'restricted',
], (err) => {
t.match(
err,
@@ -275,21 +275,21 @@ test('access restricted on invalid package.json', (t) => {
test('access grant read-only', (t) => {
t.plan(5)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
grant: (spec, team, permissions) => {
t.equal(spec, '@scoped/another', 'should use expected spec')
t.equal(team, 'myorg:myteam', 'should use expected team')
t.equal(permissions, 'read-only', 'should forward permissions')
return true
- }
+ },
},
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
'grant',
'read-only',
'myorg:myteam',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access grant read-only')
@@ -299,21 +299,21 @@ test('access grant read-only', (t) => {
test('access grant read-write', (t) => {
t.plan(5)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
grant: (spec, team, permissions) => {
t.equal(spec, '@scoped/another', 'should use expected spec')
t.equal(team, 'myorg:myteam', 'should use expected team')
t.equal(permissions, 'read-write', 'should forward permissions')
return true
- }
+ },
},
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
'grant',
'read-write',
'myorg:myteam',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access grant read-write')
@@ -324,24 +324,24 @@ test('access grant current cwd', (t) => {
t.plan(5)
const prefix = t.testdir({
'package.json': JSON.stringify({
- name: 'yargs'
- })
+ name: 'yargs',
+ }),
})
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
grant: (spec, team, permissions) => {
t.equal(spec, 'yargs', 'should use expected spec')
t.equal(team, 'myorg:myteam', 'should use expected team')
t.equal(permissions, 'read-write', 'should forward permissions')
return true
- }
+ },
},
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
'grant',
'read-write',
- 'myorg:myteam'
+ 'myorg:myteam',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access grant current cwd')
@@ -355,7 +355,7 @@ test('access grant others', (t) => {
'grant',
'rerere',
'myorg:myteam',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -373,7 +373,7 @@ test('access grant missing team args', (t) => {
'grant',
'read-only',
undefined,
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -391,7 +391,7 @@ test('access grant malformed team arg', (t) => {
'grant',
'read-only',
'foo',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -403,11 +403,9 @@ test('access grant malformed team arg', (t) => {
})
test('access 2fa-required/2fa-not-required', t => {
- let pkg
-
t.plan(2)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
tfaRequired: (spec) => {
t.equal(spec, '@scope/pkg', 'should use expected spec')
return true
@@ -415,9 +413,9 @@ test('access 2fa-required/2fa-not-required', t => {
tfaNotRequired: (spec) => {
t.equal(spec, 'unscoped-pkg', 'should use expected spec')
return true
- }
+ },
},
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access(['2fa-required', '@scope/pkg'], er => {
@@ -434,19 +432,19 @@ test('access 2fa-required/2fa-not-required', t => {
test('access revoke', (t) => {
t.plan(4)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
revoke: (spec, team) => {
t.equal(spec, '@scoped/another', 'should use expected spec')
t.equal(team, 'myorg:myteam', 'should use expected team')
return true
- }
+ },
},
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
'revoke',
'myorg:myteam',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access revoke')
@@ -459,7 +457,7 @@ test('access revoke missing team args', (t) => {
access([
'revoke',
undefined,
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -476,7 +474,7 @@ test('access revoke malformed team arg', (t) => {
access([
'revoke',
'foo',
- '@scoped/another'
+ '@scoped/another',
], (err) => {
t.match(
err,
@@ -490,18 +488,18 @@ test('access revoke malformed team arg', (t) => {
test('npm access ls-packages with no team', (t) => {
t.plan(3)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
lsPackages: (entity) => {
t.equal(entity, 'foo', 'should use expected entity')
return {}
- }
+ },
},
'../../lib/utils/get-identity.js': () => Promise.resolve('foo'),
'../../lib/utils/output.js': () => null,
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
- 'ls-packages'
+ 'ls-packages',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access ls-packages with no team')
@@ -511,18 +509,18 @@ test('npm access ls-packages with no team', (t) => {
test('access ls-packages on team', (t) => {
t.plan(3)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
lsPackages: (entity) => {
t.equal(entity, 'myorg:myteam', 'should use expected entity')
return {}
- }
+ },
},
'../../lib/utils/output.js': () => null,
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
'ls-packages',
- 'myorg:myteam'
+ 'myorg:myteam',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access ls-packages on team')
@@ -533,21 +531,21 @@ test('access ls-collaborators on current', (t) => {
t.plan(3)
const prefix = t.testdir({
'package.json': JSON.stringify({
- name: 'yargs'
- })
+ name: 'yargs',
+ }),
})
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
lsCollaborators: (spec) => {
t.equal(spec, 'yargs', 'should use expected spec')
return {}
- }
+ },
},
'../../lib/utils/output.js': () => null,
- '../../lib/npm.js': { prefix }
+ '../../lib/npm.js': { prefix },
})
access([
- 'ls-collaborators'
+ 'ls-collaborators',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access ls-collaborators on current')
@@ -557,18 +555,18 @@ test('access ls-collaborators on current', (t) => {
test('access ls-collaborators on spec', (t) => {
t.plan(3)
const access = requireInject('../../lib/access.js', {
- 'libnpmaccess': {
+ libnpmaccess: {
lsCollaborators: (spec) => {
t.equal(spec, 'yargs', 'should use expected spec')
return {}
- }
+ },
},
'../../lib/utils/output.js': () => null,
- '../../lib/npm.js': {}
+ '../../lib/npm.js': {},
})
access([
'ls-collaborators',
- 'yargs'
+ 'yargs',
], (err) => {
t.ifError(err, 'npm access')
t.ok('should successfully access ls-packages with no team')
diff --git a/deps/npm/test/lib/adduser.js b/deps/npm/test/lib/adduser.js
index 22c7c49cfa..4e6a56fc19 100644
--- a/deps/npm/test/lib/adduser.js
+++ b/deps/npm/test/lib/adduser.js
@@ -8,7 +8,7 @@ let result = ''
const _flatOptions = {
authType: 'legacy',
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}
let failSave = false
@@ -21,14 +21,14 @@ const authDummy = () => Promise.resolve({
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
- }
+ alwaysAuth: false,
+ },
})
const deleteMock = (key, where) => {
deletedConfig = {
...deletedConfig,
- [key]: where
+ [key]: where,
}
}
const adduser = requireInject('../../lib/adduser.js', {
@@ -43,30 +43,30 @@ const adduser = requireInject('../../lib/adduser.js', {
config: {
delete: deleteMock,
get (key, where) {
- if (!where || where === 'user') {
+ if (!where || where === 'user')
return _flatOptions[key]
- }
},
getCredentialsByURI,
async save () {
- if (failSave) {
+ if (failSave)
throw new Error('error saving user config')
- }
},
set (key, value, where) {
setConfig = {
...setConfig,
[key]: {
value,
- where
- }
+ where,
+ },
}
},
- setCredentialsByURI
- }
+ setCredentialsByURI,
+ },
+ },
+ '../../lib/utils/output.js': msg => {
+ result = msg
},
- '../../lib/utils/output.js': msg => { result = msg },
- '../../lib/auth/legacy.js': authDummy
+ '../../lib/auth/legacy.js': authDummy,
})
test('simple login', (t) => {
@@ -90,7 +90,7 @@ test('simple login', (t) => {
_authtoken: 'user',
_authToken: 'user',
'//registry.npmjs.org/:-authtoken': undefined,
- '//registry.npmjs.org/:_authToken': 'user'
+ '//registry.npmjs.org/:_authToken': 'user',
},
'should delete token in user config'
)
@@ -101,7 +101,7 @@ test('simple login', (t) => {
'//registry.npmjs.org/:_password': { value: 'cA==', where: 'user' },
'//registry.npmjs.org/:username': { value: 'u', where: 'user' },
'//registry.npmjs.org/:email': { value: 'u@npmjs.org', where: 'user' },
- '//registry.npmjs.org/:always-auth': { value: false, where: 'user' }
+ '//registry.npmjs.org/:always-auth': { value: false, where: 'user' },
},
'should set expected user configs'
)
diff --git a/deps/npm/test/lib/audit.js b/deps/npm/test/lib/audit.js
index 4918cb2fc2..cc7379394b 100644
--- a/deps/npm/test/lib/audit.js
+++ b/deps/npm/test/lib/audit.js
@@ -5,7 +5,7 @@ const audit = require('../../lib/audit.js')
t.test('should audit using Arborist', t => {
let ARB_ARGS = null
let AUDIT_CALLED = false
- let REIFY_OUTPUT_CALLED = false
+ let REIFY_FINISH_CALLED = false
let AUDIT_REPORT_CALLED = false
let OUTPUT_CALLED = false
let ARB_OBJ = null
@@ -14,14 +14,14 @@ t.test('should audit using Arborist', t => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- json: false
+ json: false,
},
},
'npm-audit-report': () => {
AUDIT_REPORT_CALLED = true
return {
report: 'there are vulnerabilities',
- exitCode: 0
+ exitCode: 0,
}
},
'@npmcli/arborist': function (args) {
@@ -32,15 +32,15 @@ t.test('should audit using Arborist', t => {
this.auditReport = {}
}
},
- '../../lib/utils/reify-output.js': arb => {
- if (arb !== ARB_OBJ) {
+ '../../lib/utils/reify-finish.js': arb => {
+ if (arb !== ARB_OBJ)
throw new Error('got wrong object passed to reify-output')
- }
- REIFY_OUTPUT_CALLED = true
+
+ REIFY_FINISH_CALLED = true
},
'../../lib/utils/output.js': () => {
OUTPUT_CALLED = true
- }
+ },
})
t.test('audit', t => {
@@ -55,7 +55,7 @@ t.test('should audit using Arborist', t => {
t.test('audit fix', t => {
audit(['fix'], () => {
- t.equal(REIFY_OUTPUT_CALLED, true, 'called reify output')
+ t.equal(REIFY_FINISH_CALLED, true, 'called reify output')
t.end()
})
})
@@ -68,12 +68,12 @@ t.test('should audit - json', t => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- json: true
+ json: true,
},
},
'npm-audit-report': () => ({
report: 'there are vulnerabilities',
- exitCode: 0
+ exitCode: 0,
}),
'@npmcli/arborist': function () {
this.audit = () => {
@@ -81,7 +81,7 @@ t.test('should audit - json', t => {
}
},
'../../lib/utils/reify-output.js': () => {},
- '../../lib/utils/output.js': () => {}
+ '../../lib/utils/output.js': () => {},
})
audit([], (err) => {
@@ -100,11 +100,11 @@ t.test('report endpoint error', t => {
prefix: 'foo',
command: 'audit',
flatOptions: {
- json
+ json,
},
log: {
- warn: (...warning) => LOGS.push(warning)
- }
+ warn: (...warning) => LOGS.push(warning),
+ },
},
'npm-audit-report': () => {
throw new Error('should not call audit report when there are errors')
@@ -117,25 +117,25 @@ t.test('report endpoint error', t => {
method: 'POST',
uri: 'https://example.com/',
headers: {
- head: ['ers']
+ head: ['ers'],
},
statusCode: 420,
body: json ? { nope: 'lol' }
- : Buffer.from('i had a vuln but i eated it lol')
- }
+ : Buffer.from('i had a vuln but i eated it lol'),
+ },
}
}
},
'../../lib/utils/reify-output.js': () => {},
'../../lib/utils/output.js': (...msg) => {
OUTPUT.push(msg)
- }
+ },
}
// have to pass mocks to both to get the npm and output set right
const auditError = requireInject('../../lib/utils/audit-error.js', mocks)
const audit = requireInject('../../lib/audit.js', {
...mocks,
- '../../lib/utils/audit-error.js': auditError
+ '../../lib/utils/audit-error.js': auditError,
})
audit([], (err) => {
@@ -156,8 +156,8 @@ t.test('report endpoint error', t => {
' "nope": "lol"\n' +
' }\n' +
'}'
- : 'i had a vuln but i eated it lol'
- ]
+ : 'i had a vuln but i eated it lol',
+ ],
])
t.strictSame(LOGS, [['audit', 'hello, this didnt work']])
t.end()
@@ -170,8 +170,10 @@ t.test('report endpoint error', t => {
t.test('completion', t => {
t.test('fix', t => {
audit.completion({
- conf: { argv: { remain: ['npm', 'audit'] } }
+ conf: { argv: { remain: ['npm', 'audit'] } },
}, (err, res) => {
+ if (err)
+ throw err
const subcmd = res.pop()
t.equals('fix', subcmd, 'completes to fix')
t.end()
@@ -180,16 +182,17 @@ t.test('completion', t => {
t.test('subcommand fix', t => {
audit.completion({
- conf: { argv: { remain: ['npm', 'audit', 'fix'] } }
+ conf: { argv: { remain: ['npm', 'audit', 'fix'] } },
}, (err) => {
- t.notOk(err, 'no errors')
+ if (err)
+ throw err
t.end()
})
})
t.test('subcommand not recognized', t => {
audit.completion({
- conf: { argv: { remain: ['npm', 'audit', 'repare'] } }
+ conf: { argv: { remain: ['npm', 'audit', 'repare'] } },
}, (err) => {
t.ok(err, 'not recognized')
t.end()
diff --git a/deps/npm/test/lib/auth/legacy.js b/deps/npm/test/lib/auth/legacy.js
index 1607641d83..f926ae1306 100644
--- a/deps/npm/test/lib/auth/legacy.js
+++ b/deps/npm/test/lib/auth/legacy.js
@@ -10,20 +10,20 @@ const legacy = requireInject('../../../lib/auth/legacy.js', {
npmlog: {
info: (...msgs) => {
log += msgs.join(' ')
- }
+ },
},
'npm-profile': profile,
'../../../lib/utils/open-url.js': (url, msg, cb) => {
- if (url) {
+ if (url)
cb()
- } else {
+ else {
cb(Object.assign(
new Error('failed open url'),
{ code: 'ERROR' }
))
}
},
- '../../../lib/utils/read-user-info.js': read
+ '../../../lib/utils/read-user-info.js': read,
})
test('login using username/password with token result', async (t) => {
@@ -33,16 +33,16 @@ test('login using username/password with token result', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await legacy({
creds: {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -74,16 +74,16 @@ test('login using username/password with user info result', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await legacy({
creds: {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -98,7 +98,7 @@ test('login using username/password with user info result', async (t) => {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
'should return used credentials'
)
@@ -125,16 +125,16 @@ test('login otp requested', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await legacy({
creds: {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -165,10 +165,10 @@ test('login missing basic credential info', async (t) => {
legacy({
creds: {
username: 'u',
- password: 'p'
+ password: 'p',
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ code: 'ERROR' },
'should throw server response error'
@@ -195,16 +195,16 @@ test('create new user when user not found', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await legacy({
creds: {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -245,13 +245,13 @@ test('prompts for user info if required', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await legacy({
creds: {
- alwaysAuth: true
+ alwaysAuth: true,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -272,7 +272,7 @@ test('prompts for user info if required', async (t) => {
username: 'foo',
password: 'pass',
email: 'foo@npmjs.org',
- alwaysAuth: true
+ alwaysAuth: true,
},
'should return result from profile.login containing prompt info'
)
@@ -309,10 +309,10 @@ test('request otp when creating new user', async (t) => {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
log = ''
@@ -338,10 +338,10 @@ test('unknown error during user creation', async (t) => {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ code: 'ERROR' },
'should throw unknown error'
@@ -353,16 +353,18 @@ test('unknown error during user creation', async (t) => {
})
test('open url error', async (t) => {
- profile.login = async (opener, prompt, opts) => { await opener() }
+ profile.login = async (opener, prompt, opts) => {
+ await opener()
+ }
await t.rejects(
legacy({
creds: {
username: 'u',
- password: 'p'
+ password: 'p',
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ message: 'failed open url', code: 'ERROR' },
'should throw unknown error'
@@ -380,10 +382,10 @@ test('login no credentials provided', async (t) => {
username: undefined,
password: undefined,
email: undefined,
- alwaysAuth: undefined
+ alwaysAuth: undefined,
},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -404,10 +406,10 @@ test('scoped login', async (t) => {
username: 'u',
password: 'p',
email: 'u@npmjs.org',
- alwaysAuth: false
+ alwaysAuth: false,
},
registry: 'https://diff-registry.npmjs.org/',
- scope: 'myscope'
+ scope: 'myscope',
})
t.equal(
diff --git a/deps/npm/test/lib/auth/oauth.js b/deps/npm/test/lib/auth/oauth.js
index a8461d235e..82d478b52c 100644
--- a/deps/npm/test/lib/auth/oauth.js
+++ b/deps/npm/test/lib/auth/oauth.js
@@ -6,7 +6,7 @@ test('oauth login', (t) => {
const oauthOpts = {
creds: {},
registry: 'https://diff-registry.npmjs.org/',
- scope: 'myscope'
+ scope: 'myscope',
}
const oauth = requireInject('../../../lib/auth/oauth.js', {
@@ -18,9 +18,9 @@ test('oauth login', (t) => {
set: (key, value) => {
t.equal(key, 'sso-type', 'should define sso-type')
t.equal(value, 'oauth', 'should set sso-type to oauth')
- }
- }
- }
+ },
+ },
+ },
})
oauth(oauthOpts)
diff --git a/deps/npm/test/lib/auth/saml.js b/deps/npm/test/lib/auth/saml.js
index 3e0015bf39..87fa6688b5 100644
--- a/deps/npm/test/lib/auth/saml.js
+++ b/deps/npm/test/lib/auth/saml.js
@@ -6,7 +6,7 @@ test('saml login', (t) => {
const samlOpts = {
creds: {},
registry: 'https://diff-registry.npmjs.org/',
- scope: 'myscope'
+ scope: 'myscope',
}
const saml = requireInject('../../../lib/auth/saml.js', {
@@ -18,9 +18,9 @@ test('saml login', (t) => {
set: (key, value) => {
t.equal(key, 'sso-type', 'should define sso-type')
t.equal(value, 'saml', 'should set sso-type to saml')
- }
- }
- }
+ },
+ },
+ },
})
saml(samlOpts)
diff --git a/deps/npm/test/lib/auth/sso.js b/deps/npm/test/lib/auth/sso.js
index 0e04309c82..1fc04c64cd 100644
--- a/deps/npm/test/lib/auth/sso.js
+++ b/deps/npm/test/lib/auth/sso.js
@@ -5,7 +5,7 @@ let log = ''
let warn = ''
const _flatOptions = {
- ssoType: 'oauth'
+ ssoType: 'oauth',
}
const token = '24528a24f240'
const SSO_URL = 'https://registry.npmjs.org/{SSO_URL}'
@@ -18,17 +18,17 @@ const sso = requireInject('../../../lib/auth/sso.js', {
},
warn: (...msgs) => {
warn += msgs.join(' ')
- }
+ },
},
'npm-profile': profile,
'npm-registry-fetch': npmFetch,
'../../../lib/npm.js': {
- flatOptions: _flatOptions
+ flatOptions: _flatOptions,
},
'../../../lib/utils/open-url.js': (url, msg, cb) => {
- if (url) {
+ if (url)
cb()
- } else {
+ else {
cb(Object.assign(
new Error('failed open url'),
{ code: 'ERROR' }
@@ -36,15 +36,15 @@ const sso = requireInject('../../../lib/auth/sso.js', {
}
},
'../../../lib/utils/otplease.js': (opts, fn) => {
- if (opts) {
+ if (opts)
return fn({ ...opts, otp: '1234' })
- } else {
+ else {
throw Object.assign(
new Error('failed retrieving otp'),
{ code: 'ERROR' }
)
}
- }
+ },
})
test('empty login', async (t) => {
@@ -80,7 +80,7 @@ test('simple login', async (t) => {
otp: '1234',
registry: 'https://registry.npmjs.org/',
scope: '',
- ssoType: 'oauth'
+ ssoType: 'oauth',
},
'should use dummy password'
)
@@ -91,11 +91,11 @@ test('simple login', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await sso({
creds: {},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
t.equal(
@@ -160,7 +160,7 @@ test('polling retry', async (t) => {
await sso({
creds: {},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
})
log = ''
@@ -180,7 +180,7 @@ test('polling error', async (t) => {
sso({
creds: {},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ message: 'unknown error', code: 'ERROR' },
'should throw unknown error'
@@ -199,7 +199,7 @@ test('no token retrieved from loginCouch', async (t) => {
sso({
creds: {},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ message: 'no SSO token returned' },
'should throw no SSO token returned error'
@@ -217,7 +217,7 @@ test('no sso url retrieved from loginCouch', async (t) => {
sso({
creds: {},
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}),
{ message: 'no SSO URL returned by services' },
'should throw no SSO url returned error'
@@ -234,11 +234,11 @@ test('scoped login', async (t) => {
const {
message,
- newCreds
+ newCreds,
} = await sso({
creds: {},
registry: 'https://diff-registry.npmjs.org/',
- scope: 'myscope'
+ scope: 'myscope',
})
t.equal(
diff --git a/deps/npm/test/lib/bin.js b/deps/npm/test/lib/bin.js
index 05fc1e21e0..c5ed2a91b9 100644
--- a/deps/npm/test/lib/bin.js
+++ b/deps/npm/test/lib/bin.js
@@ -9,7 +9,7 @@ test('bin', (t) => {
'../../lib/npm.js': { bin: dir, flatOptions: { global: false } },
'../../lib/utils/output.js': (output) => {
t.equal(output, dir, 'prints the correct directory')
- }
+ },
})
bin([], (err) => {
@@ -35,7 +35,7 @@ test('bin -g', (t) => {
'../../lib/utils/path.js': [dir],
'../../lib/utils/output.js': (output) => {
t.equal(output, dir, 'prints the correct directory')
- }
+ },
})
bin([], (err) => {
@@ -61,7 +61,7 @@ test('bin -g (not in path)', (t) => {
'../../lib/utils/path.js': ['/not/my/dir'],
'../../lib/utils/output.js': (output) => {
t.equal(output, dir, 'prints the correct directory')
- }
+ },
})
bin([], (err) => {
diff --git a/deps/npm/test/lib/birthday.js b/deps/npm/test/lib/birthday.js
index 35255f97aa..21b60b4c79 100644
--- a/deps/npm/test/lib/birthday.js
+++ b/deps/npm/test/lib/birthday.js
@@ -38,6 +38,7 @@ test('birthday (nope again)', (t) => {
const d = new D()
return d[B[f]('Z2V0RnVsbFllYXI=', _6)[l]()]() + 1
}
+
[B[f]('Z2V0VVRDTW9udGg=', _6)[l]()] () {
return 9
}
@@ -66,6 +67,7 @@ test('birthday (yup)', (t) => {
[B[f]('Z2V0VVRDTW9udGg=', _6)[l]()] () {
return 8
}
+
[B[f]('Z2V0VVRDRGF0ZQ==', _6)[l]()] () {
return 29
}
diff --git a/deps/npm/test/lib/bugs.js b/deps/npm/test/lib/bugs.js
index 79d5089724..df64349878 100644
--- a/deps/npm/test/lib/bugs.js
+++ b/deps/npm/test/lib/bugs.js
@@ -5,40 +5,40 @@ const pacote = {
manifest: async (spec, options) => {
return spec === 'nobugs' ? {
name: 'nobugs',
- version: '1.2.3'
- }
- : spec === 'bugsurl' ? {
- name: 'bugsurl',
- version: '1.2.3',
- bugs: 'https://bugzilla.localhost/bugsurl'
- }
- : spec === 'bugsobj' ? {
- name: 'bugsobj',
- version: '1.2.3',
- bugs: { url: 'https://bugzilla.localhost/bugsobj' }
- }
- : spec === 'bugsobj-nourl' ? {
- name: 'bugsobj-nourl',
- version: '1.2.3',
- bugs: { no: 'url here' }
- }
- : spec === 'repourl' ? {
- name: 'repourl',
- version: '1.2.3',
- repository: 'https://github.com/foo/repourl'
- }
- : spec === 'repoobj' ? {
- name: 'repoobj',
version: '1.2.3',
- repository: { url: 'https://github.com/foo/repoobj' }
}
- : spec === '.' ? {
- name: 'thispkg',
- version: '1.2.3',
- bugs: 'https://example.com'
- }
- : null
- }
+ : spec === 'bugsurl' ? {
+ name: 'bugsurl',
+ version: '1.2.3',
+ bugs: 'https://bugzilla.localhost/bugsurl',
+ }
+ : spec === 'bugsobj' ? {
+ name: 'bugsobj',
+ version: '1.2.3',
+ bugs: { url: 'https://bugzilla.localhost/bugsobj' },
+ }
+ : spec === 'bugsobj-nourl' ? {
+ name: 'bugsobj-nourl',
+ version: '1.2.3',
+ bugs: { no: 'url here' },
+ }
+ : spec === 'repourl' ? {
+ name: 'repourl',
+ version: '1.2.3',
+ repository: 'https://github.com/foo/repourl',
+ }
+ : spec === 'repoobj' ? {
+ name: 'repoobj',
+ version: '1.2.3',
+ repository: { url: 'https://github.com/foo/repoobj' },
+ }
+ : spec === '.' ? {
+ name: 'thispkg',
+ version: '1.2.3',
+ bugs: 'https://example.com',
+ }
+ : null
+ },
}
// keep a tally of which urls got opened
@@ -51,7 +51,7 @@ const openUrl = (url, errMsg, cb) => {
const bugs = requireInject('../../lib/bugs.js', {
pacote,
- '../../lib/utils/open-url.js': openUrl
+ '../../lib/utils/open-url.js': openUrl,
})
t.test('completion', t => {
@@ -70,7 +70,7 @@ t.test('open bugs urls', t => {
bugsobj: 'https://bugzilla.localhost/bugsobj',
repourl: 'https://github.com/foo/repourl/issues',
repoobj: 'https://github.com/foo/repoobj/issues',
- '.': 'https://example.com'
+ '.': 'https://example.com',
}
const keys = Object.keys(expect)
t.plan(keys.length)
diff --git a/deps/npm/test/lib/cache.js b/deps/npm/test/lib/cache.js
index 9c27386ed8..2e9ad346bb 100644
--- a/deps/npm/test/lib/cache.js
+++ b/deps/npm/test/lib/cache.js
@@ -5,12 +5,12 @@ const path = require('path')
const usageUtil = () => 'usage instructions'
const flatOptions = {
- force: false
+ force: false,
}
const npm = {
flatOptions,
- cache: '/fake/path'
+ cache: '/fake/path',
}
let rimrafPath = ''
@@ -23,22 +23,22 @@ let logOutput = []
const npmlog = {
silly: (...args) => {
logOutput.push(['silly', ...args])
- }
+ },
}
let tarballStreamSpec = ''
let tarballStreamOpts = {}
const pacote = {
tarball: {
- stream: (spec, cb, opts) => {
+ stream: (spec, handler, opts) => {
tarballStreamSpec = spec
tarballStreamOpts = opts
- return cb({
+ return handler({
resume: () => {},
- promise: () => Promise.resolve()
+ promise: () => Promise.resolve(),
})
- }
- }
+ },
+ },
}
let outputOutput = []
@@ -46,18 +46,16 @@ const output = (msg) => {
outputOutput.push(msg)
}
-let cacacheVerifyPath = ''
const cacacheVerifyStats = {
keptSize: 100,
verifiedContent: 1,
totalEntries: 1,
- runTime: { total: 2000 }
+ runTime: { total: 2000 },
}
const cacache = {
verify: (path) => {
- cacacheVerifyPath = path
return cacacheVerifyStats
- }
+ },
}
const mocks = {
@@ -67,7 +65,7 @@ const mocks = {
rimraf,
'../../lib/npm.js': npm,
'../../lib/utils/output.js': output,
- '../../lib/utils/usage.js': usageUtil
+ '../../lib/utils/usage.js': usageUtil,
}
const cache = requireInject('../../lib/cache.js', mocks)
@@ -132,7 +130,7 @@ t.test('cache add pkg only', t => {
t.ifError(err)
t.strictSame(logOutput, [
['silly', 'cache add', 'args', ['mypkg']],
- ['silly', 'cache add', 'spec', 'mypkg']
+ ['silly', 'cache add', 'spec', 'mypkg'],
], 'logs correctly')
t.equal(tarballStreamSpec, 'mypkg', 'passes the correct spec to pacote')
t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote')
@@ -151,7 +149,7 @@ t.test('cache add pkg w/ spec modifier', t => {
t.ifError(err)
t.strictSame(logOutput, [
['silly', 'cache add', 'args', ['mypkg', 'latest']],
- ['silly', 'cache add', 'spec', 'mypkg@latest']
+ ['silly', 'cache add', 'spec', 'mypkg@latest'],
], 'logs correctly')
t.equal(tarballStreamSpec, 'mypkg@latest', 'passes the correct spec to pacote')
t.same(tarballStreamOpts, flatOptions, 'passes the correct options to pacote')
@@ -162,7 +160,6 @@ t.test('cache add pkg w/ spec modifier', t => {
t.test('cache verify', t => {
t.teardown(() => {
outputOutput = []
- cacacheVerifyPath = ''
})
cache(['verify'], err => {
@@ -171,7 +168,7 @@ t.test('cache verify', t => {
`Cache verified and compressed (${path.join(npm.cache, '_cacache')})`,
'Content verified: 1 (100 bytes)',
'Index entries: 1',
- 'Finished in 2s'
+ 'Finished in 2s',
], 'prints correct output')
t.end()
})
@@ -186,7 +183,6 @@ t.test('cache verify w/ extra output', t => {
t.teardown(() => {
npm.cache = '/fake/path'
outputOutput = []
- cacacheVerifyPath = ''
delete cacacheVerifyStats.badContentCount
delete cacacheVerifyStats.reclaimedCount
delete cacacheVerifyStats.reclaimedSize
@@ -202,7 +198,7 @@ t.test('cache verify w/ extra output', t => {
'Content garbage-collected: 2 (200 bytes)',
'Missing content: 3',
'Index entries: 1',
- 'Finished in 2s'
+ 'Finished in 2s',
], 'prints correct output')
t.end()
})
@@ -221,7 +217,7 @@ t.test('cache completion', t => {
testComp(['npm', 'cache'], [
'add',
'clean',
- 'verify'
+ 'verify',
])
testComp(['npm', 'cache', 'add'], [])
diff --git a/deps/npm/test/lib/ci.js b/deps/npm/test/lib/ci.js
index 43ad2783b0..8ddb8f8aad 100644
--- a/deps/npm/test/lib/ci.js
+++ b/deps/npm/test/lib/ci.js
@@ -3,7 +3,6 @@ const util = require('util')
const readdir = util.promisify(fs.readdir)
const { test } = require('tap')
-const { resolve } = require('path')
const requireInject = require('require-inject')
@@ -12,9 +11,10 @@ test('should use Arborist', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- global: false
- }
+ global: false,
+ },
},
+ '../../lib/utils/reify-finish.js': async () => {},
'@npmcli/arborist': function (args) {
t.ok(args, 'gets options object')
this.loadVirtual = () => {
@@ -25,19 +25,21 @@ test('should use Arborist', (t) => {
t.ok(true, 'reify is called')
}
},
- 'util': {
- 'inherits': () => {},
- 'promisify': (fn) => fn
+ util: {
+ inherits: () => {},
+ promisify: (fn) => fn,
},
- 'rimraf': (path) => {
+ rimraf: (path) => {
t.ok(path, 'rimraf called with path')
return Promise.resolve(true)
},
'../../lib/utils/reify-output.js': function (arb) {
t.ok(arb, 'gets arborist tree')
- }
+ },
})
- ci(null, () => {
+ ci(null, er => {
+ if (er)
+ throw er
t.end()
})
})
@@ -47,37 +49,42 @@ test('should pass flatOptions to Arborist.reify', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- production: true
- }
+ production: true,
+ },
},
+ '../../lib/utils/reify-finish.js': async () => {},
'@npmcli/arborist': function () {
this.loadVirtual = () => Promise.resolve(true)
this.reify = async (options) => {
t.equal(options.production, true, 'should pass flatOptions to Arborist.reify')
t.end()
}
- }
+ },
+ })
+ ci(null, er => {
+ if (er)
+ throw er
})
- ci(null, () => {})
})
test('should throw if package-lock.json or npm-shrinkwrap missing', (t) => {
const testDir = t.testdir({
'index.js': 'some contents',
- 'package.json': 'some info'
+ 'package.json': 'some info',
})
const ci = requireInject('../../lib/ci.js', {
'../../lib/npm.js': {
prefix: testDir,
flatOptions: {
- global: false
- }
+ global: false,
+ },
},
- 'npmlog': {
+ '../../lib/utils/reify-finish.js': async () => {},
+ npmlog: {
verbose: () => {
t.ok(true, 'log fn called')
- }
+ },
},
})
ci(null, (err, res) => {
@@ -92,9 +99,10 @@ test('should throw ECIGLOBAL', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- global: true
- }
- }
+ global: true,
+ },
+ },
+ '../../lib/utils/reify-finish.js': async () => {},
})
ci(null, (err, res) => {
t.equals(err.code, 'ECIGLOBAL', 'throws error with global packages')
@@ -105,18 +113,19 @@ test('should throw ECIGLOBAL', (t) => {
test('should remove existing node_modules before installing', (t) => {
const testDir = t.testdir({
- 'node_modules': {
- 'some-file': 'some contents'
- }
+ node_modules: {
+ 'some-file': 'some contents',
+ },
})
const ci = requireInject('../../lib/ci.js', {
'../../lib/npm.js': {
prefix: testDir,
flatOptions: {
- global: false
- }
+ global: false,
+ },
},
+ '../../lib/utils/reify-finish.js': async () => {},
'@npmcli/arborist': function () {
this.loadVirtual = () => Promise.resolve(true)
this.reify = async (options) => {
@@ -127,8 +136,11 @@ test('should remove existing node_modules before installing', (t) => {
t.same(nodeModules, ['node_modules'], 'should only have the node_modules directory')
t.end()
}
- }
+ },
})
- ci(null, () => {})
+ ci(null, er => {
+ if (er)
+ throw er
+ })
})
diff --git a/deps/npm/test/lib/cli.js b/deps/npm/test/lib/cli.js
index 0d9b6ad6a5..b5441be1e4 100644
--- a/deps/npm/test/lib/cli.js
+++ b/deps/npm/test/lib/cli.js
@@ -8,14 +8,16 @@ const npmock = {
config: {
settings: {},
get: (k) => npmock.config.settings[k],
- set: (k, v) => { npmock.config.settings[k] = v },
+ set: (k, v) => {
+ npmock.config.settings[k] = v
+ },
},
- commands: {}
+ commands: {},
}
const unsupportedMock = {
checkForBrokenNode: () => {},
- checkForUnsupportedNode: () => {}
+ checkForUnsupportedNode: () => {},
}
let errorHandlerCalled = null
@@ -31,7 +33,7 @@ const logs = []
const npmlogMock = {
pause: () => logs.push('pause'),
verbose: (...msg) => logs.push(['verbose', ...msg]),
- info: (...msg) => logs.push(['info', ...msg])
+ info: (...msg) => logs.push(['info', ...msg]),
}
const requireInject = require('require-inject')
@@ -39,7 +41,7 @@ const cli = requireInject.installGlobally('../../lib/cli.js', {
'../../lib/npm.js': npmock,
'../../lib/utils/unsupported.js': unsupportedMock,
'../../lib/utils/error-handler.js': errorHandlerMock,
- npmlog: npmlogMock
+ npmlog: npmlogMock,
})
t.test('print the version, and treat npm_g to npm -g', t => {
@@ -50,7 +52,7 @@ t.test('print the version, and treat npm_g to npm -g', t => {
const proc = {
argv: ['node', 'npm_g', '-v'],
version: '420.69.lol',
- on: () => {}
+ on: () => {},
}
process.argv = proc.argv
npmock.config.settings.version = true
@@ -58,14 +60,14 @@ t.test('print the version, and treat npm_g to npm -g', t => {
cli(proc)
t.strictSame(npmock.argv, [])
- t.strictSame(proc.argv, [ 'node', 'npm', '-g', '-v' ])
+ t.strictSame(proc.argv, ['node', 'npm', '-g', '-v'])
t.strictSame(logs, [
'pause',
- [ 'verbose', 'cli', [ 'node', 'npm', '-g', '-v' ] ],
- [ 'info', 'using', 'npm@%s', '99.99.99' ],
- [ 'info', 'using', 'node@%s', '420.69.lol' ]
+ ['verbose', 'cli', ['node', 'npm', '-g', '-v']],
+ ['info', 'using', 'npm@%s', '99.99.99'],
+ ['info', 'using', 'node@%s', '420.69.lol'],
])
- t.strictSame(consoleLogs, [ [ '99.99.99' ] ])
+ t.strictSame(consoleLogs, [['99.99.99']])
t.strictSame(errorHandlerExitCalled, 0)
delete npmock.config.settings.version
@@ -87,7 +89,7 @@ t.test('calling with --versions calls npm version with no args', t => {
const processArgv = process.argv
const proc = {
argv: ['node', 'npm', 'install', 'or', 'whatever', '--versions'],
- on: () => {}
+ on: () => {},
}
process.argv = proc.argv
npmock.config.set('versions', true)
@@ -107,12 +109,12 @@ t.test('calling with --versions calls npm version with no args', t => {
npmock.commands.version = (args, cb) => {
t.equal(proc.title, 'npm')
t.strictSame(npmock.argv, [])
- t.strictSame(proc.argv, [ 'node', 'npm', 'install', 'or', 'whatever', '--versions' ])
+ t.strictSame(proc.argv, ['node', 'npm', 'install', 'or', 'whatever', '--versions'])
t.strictSame(logs, [
'pause',
- [ 'verbose', 'cli', [ 'node', 'npm', 'install', 'or', 'whatever', '--versions' ] ],
- [ 'info', 'using', 'npm@%s', '99.99.99' ],
- [ 'info', 'using', 'node@%s', undefined ]
+ ['verbose', 'cli', ['node', 'npm', 'install', 'or', 'whatever', '--versions']],
+ ['info', 'using', 'npm@%s', '99.99.99'],
+ ['info', 'using', 'node@%s', undefined],
])
t.strictSame(consoleLogs, [])
@@ -131,7 +133,7 @@ t.test('print usage if -h provided', t => {
console.log = (...msg) => consoleLogs.push(msg)
const proc = {
argv: ['node', 'npm', 'asdf'],
- on: () => {}
+ on: () => {},
}
npmock.argv = ['asdf']
@@ -150,12 +152,12 @@ t.test('print usage if -h provided', t => {
t.equal(proc.title, 'npm')
t.strictSame(args, ['asdf'])
t.strictSame(npmock.argv, ['asdf'])
- t.strictSame(proc.argv, [ 'node', 'npm', 'asdf' ])
+ t.strictSame(proc.argv, ['node', 'npm', 'asdf'])
t.strictSame(logs, [
'pause',
- [ 'verbose', 'cli', [ 'node', 'npm', 'asdf' ] ],
- [ 'info', 'using', 'npm@%s', '99.99.99' ],
- [ 'info', 'using', 'node@%s', undefined ]
+ ['verbose', 'cli', ['node', 'npm', 'asdf']],
+ ['info', 'using', 'npm@%s', '99.99.99'],
+ ['info', 'using', 'node@%s', undefined],
])
t.strictSame(consoleLogs, [])
t.strictSame(errorHandlerExitCalled, null)
@@ -170,11 +172,10 @@ t.test('load error calls error handler', t => {
LOAD_ERROR = er
const proc = {
argv: ['node', 'npm', 'asdf'],
- on: () => {}
+ on: () => {},
}
cli(proc)
t.strictSame(errorHandlerCalled, [er])
LOAD_ERROR = null
t.end()
})
-
diff --git a/deps/npm/test/lib/config.js b/deps/npm/test/lib/config.js
index 890d65731a..8a11a40c81 100644
--- a/deps/npm/test/lib/config.js
+++ b/deps/npm/test/lib/config.js
@@ -25,20 +25,20 @@ const types = {
'init-author-name': String,
'init-version': String,
'init.author.name': String,
- 'init.version': String
+ 'init.version': String,
}
const defaults = {
'init-author-name': '',
'init-version': '1.0.0',
'init.author.name': '',
- 'init.version': '1.0.0'
+ 'init.version': '1.0.0',
}
const flatOptions = {
editor: 'vi',
json: false,
long: false,
- global: false
+ global: false,
}
const npm = {
@@ -46,17 +46,21 @@ const npm = {
log: {
info: () => null,
enableProgress: () => null,
- disableProgress: () => null
+ disableProgress: () => null,
},
config: {
data: new Map(Object.entries({
default: { data: defaults, source: 'default values' },
global: { data: {}, source: '/etc/npmrc' },
- cli: { data: flatOptions, source: 'command line options' }
+ cli: { data: flatOptions, source: 'command line options' },
})),
- get (key) { return flatOptions[key] },
- validate () { return true }
- }
+ get (key) {
+ return flatOptions[key]
+ },
+ validate () {
+ return true
+ },
+ },
}
const usageUtil = () => 'usage instructions'
@@ -64,8 +68,10 @@ const usageUtil = () => 'usage instructions'
const mocks = {
'../../lib/utils/config.js': { defaults, types },
'../../lib/npm.js': npm,
- '../../lib/utils/output.js': msg => { result = msg },
- '../../lib/utils/usage.js': usageUtil
+ '../../lib/utils/output.js': msg => {
+ result = msg
+ },
+ '../../lib/utils/usage.js': usageUtil,
}
const config = requireInject('../../lib/config.js', mocks)
@@ -99,9 +105,9 @@ t.test('config list overrides', t => {
npm.config.data.set('user', {
data: {
'init.author.name': 'Foo',
- '//private-reg.npmjs.org/:_authThoken': 'f00ba1'
+ '//private-reg.npmjs.org/:_authThoken': 'f00ba1',
},
- source: '~/.npmrc'
+ source: '~/.npmrc',
})
flatOptions['init.author.name'] = 'Bar'
npm.config.find = () => 'cli'
@@ -144,7 +150,7 @@ t.test('config list --json', t => {
result = ''
npm.config.list = [{
'//private-reg.npmjs.org/:_authThoken': 'f00ba1',
- ...npm.config.data.get('cli').data
+ ...npm.config.data.get('cli').data,
}]
const npmConfigGet = npm.config.get
npm.config.get = key => npm.config.list[0][key]
@@ -164,7 +170,7 @@ t.test('config list --json', t => {
editor: 'vi',
json: true,
long: false,
- global: false
+ global: false,
},
'should list configs usin json'
)
@@ -413,7 +419,7 @@ t.test('config edit', t => {
init.author.name=Foo
sign-git-commit=true`
npm.config.data.set('user', {
- source: '~/.npmrc'
+ source: '~/.npmrc',
})
npm.config.save = async where => {
t.equal(where, 'user', 'should save to user config by default')
@@ -422,25 +428,29 @@ sign-git-commit=true`
...mocks,
'mkdirp-infer-owner': async () => null,
fs: {
- readFile (path, encoding, cb) { cb(null, npmrc) },
+ readFile (path, encoding, cb) {
+ cb(null, npmrc)
+ },
writeFile (file, data, encoding, cb) {
t.equal(file, '~/.npmrc', 'should save to expected file location')
t.matchSnapshot(data, 'should write config file')
cb()
- }
+ },
},
editor: (file, { editor }, cb) => {
t.equal(file, '~/.npmrc', 'should match user source data')
t.equal(editor, 'vi', 'should use default editor')
cb()
- }
+ },
}
const config = requireInject('../../lib/config.js', editMocks)
config(['edit'], (err) => {
t.ifError(err, 'npm config edit')
// test no config file result
- editMocks.fs.readFile = (p, e, cb) => { cb(new Error('ERR')) }
+ editMocks.fs.readFile = (p, e, cb) => {
+ cb(new Error('ERR'))
+ }
const config = requireInject('../../lib/config.js', editMocks)
config(['edit'], (err) => {
t.ifError(err, 'npm config edit')
@@ -459,7 +469,7 @@ t.test('config edit --global', t => {
flatOptions.global = true
const npmrc = 'init.author.name=Foo'
npm.config.data.set('global', {
- source: '/etc/npmrc'
+ source: '/etc/npmrc',
})
npm.config.save = async where => {
t.equal(where, 'global', 'should save to global config')
@@ -468,18 +478,20 @@ t.test('config edit --global', t => {
...mocks,
'mkdirp-infer-owner': async () => null,
fs: {
- readFile (path, encoding, cb) { cb(null, npmrc) },
+ readFile (path, encoding, cb) {
+ cb(null, npmrc)
+ },
writeFile (file, data, encoding, cb) {
t.equal(file, '/etc/npmrc', 'should save to global file location')
t.matchSnapshot(data, 'should write global config file')
cb()
- }
+ },
},
editor: (file, { editor }, cb) => {
t.equal(file, '/etc/npmrc', 'should match global source data')
t.equal(editor, 'vi', 'should use default editor')
cb()
- }
+ },
}
const config = requireInject('../../lib/config.js', editMocks)
config(['edit'], (err) => {
@@ -524,7 +536,7 @@ t.test('completion', t => {
'ls',
'rm',
'edit',
- 'list'
+ 'list',
])
testComp(['npm', 'config', 'set', 'foo'], [])
const possibleConfigKeys = [...Object.keys(types)]
@@ -539,10 +551,10 @@ t.test('completion', t => {
completion({
conf: {
argv: {
- remain: ['npm', 'config']
- }
+ remain: ['npm', 'config'],
+ },
},
- partialWord: 'l'
+ partialWord: 'l',
}, (er, res) => {
t.ifError(er)
t.strictSame(res, [
@@ -551,7 +563,7 @@ t.test('completion', t => {
'delete',
'ls',
'rm',
- 'edit'
+ 'edit',
], 'npm config')
})
diff --git a/deps/npm/test/lib/dedupe.js b/deps/npm/test/lib/dedupe.js
index a88c41f6e9..ff2d2be534 100644
--- a/deps/npm/test/lib/dedupe.js
+++ b/deps/npm/test/lib/dedupe.js
@@ -1,5 +1,4 @@
const { test } = require('tap')
-const dedupe = require('../../lib/dedupe.js')
const requireInject = require('require-inject')
test('should remove dupes using Arborist', (t) => {
@@ -7,8 +6,8 @@ test('should remove dupes using Arborist', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- 'dryRun': 'false'
- }
+ dryRun: 'false',
+ },
},
'@npmcli/arborist': function (args) {
t.ok(args, 'gets options object')
@@ -18,11 +17,13 @@ test('should remove dupes using Arborist', (t) => {
t.ok(true, 'dedupe is called')
}
},
- '../../lib/utils/reify-output.js': (arb) => {
+ '../../lib/utils/reify-finish.js': (arb) => {
t.ok(arb, 'gets arborist tree')
- }
+ },
})
- dedupe({ dryRun: true }, () => {
+ dedupe({ dryRun: true }, er => {
+ if (er)
+ throw er
t.ok(true, 'callback is called')
t.end()
})
@@ -33,17 +34,16 @@ test('should remove dupes using Arborist - no arguments', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- 'dryRun': 'true'
- }
+ dryRun: 'true',
+ },
},
'@npmcli/arborist': function (args) {
t.ok(args.dryRun, 'gets dryRun from flatOptions')
this.dedupe = () => {}
},
- '../../lib/utils/reify-output.js': () => {}
+ '../../lib/utils/reify-output.js': () => {},
})
dedupe(null, () => {
t.end()
})
})
-
diff --git a/deps/npm/test/lib/dist-tag.js b/deps/npm/test/lib/dist-tag.js
index ad08c2be13..e9dde48062 100644
--- a/deps/npm/test/lib/dist-tag.js
+++ b/deps/npm/test/lib/dist-tag.js
@@ -8,32 +8,33 @@ let log = ''
// these declared opts are used in ./utils/read-local-package.js
const _flatOptions = {
global: false,
- get prefix () { return prefix }
+ get prefix () {
+ return prefix
+ },
}
const routeMap = {
'/-/package/@scoped%2fpkg/dist-tags': {
latest: '1.0.0',
a: '0.0.1',
- b: '0.5.0'
+ b: '0.5.0',
},
'/-/package/@scoped%2fanother/dist-tags': {
latest: '2.0.0',
a: '0.0.2',
- b: '0.6.0'
+ b: '0.6.0',
},
'/-/package/@scoped%2fanother/dist-tags/c': {
latest: '7.7.7',
a: '0.0.2',
b: '0.6.0',
- c: '7.7.7'
- }
+ c: '7.7.7',
+ },
}
let npmRegistryFetchMock = (url, opts) => {
- if (url === '/-/package/foo/dist-tags') {
+ if (url === '/-/package/foo/dist-tags')
throw new Error('no package found')
- }
return routeMap[url]
}
@@ -41,9 +42,9 @@ let npmRegistryFetchMock = (url, opts) => {
npmRegistryFetchMock.json = async (url, opts) => routeMap[url]
const logger = (...msgs) => {
- for (const msg of [...msgs]) {
+ for (const msg of [...msgs])
log += msg + ' '
- }
+
log += '\n'
}
@@ -52,25 +53,29 @@ const distTag = requireInject('../../lib/dist-tag.js', {
error: logger,
info: logger,
verbose: logger,
- warn: logger
+ warn: logger,
+ },
+ get 'npm-registry-fetch' () {
+ return npmRegistryFetchMock
},
- get 'npm-registry-fetch' () { return npmRegistryFetchMock },
'../../lib/npm.js': {
flatOptions: _flatOptions,
config: {
get (key) {
return _flatOptions[key]
- }
- }
+ },
+ },
+ },
+ '../../lib/utils/output.js': msg => {
+ result = msg
},
- '../../lib/utils/output.js': msg => { result = msg }
})
test('ls in current package', (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
- name: '@scoped/pkg'
- })
+ name: '@scoped/pkg',
+ }),
})
distTag(['ls'], (err) => {
t.ifError(err, 'npm dist-tags ls')
@@ -87,8 +92,8 @@ test('ls in current package', (t) => {
test('no args in current package', (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
- name: '@scoped/pkg'
- })
+ name: '@scoped/pkg',
+ }),
})
distTag([], (err) => {
t.ifError(err, 'npm dist-tags ls')
@@ -146,8 +151,8 @@ test('ls on missing package', (t) => {
test('ls on missing name in current package', (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
})
distTag(['ls'], (err) => {
t.matchSnapshot(
@@ -294,9 +299,9 @@ test('completion', t => {
completion({
conf: {
argv: {
- remain: ['npm', 'dist-tag']
- }
- }
+ remain: ['npm', 'dist-tag'],
+ },
+ },
}, (err, res) => {
t.ifError(err, 'npm dist-tags completion')
@@ -305,7 +310,7 @@ test('completion', t => {
[
'add',
'rm',
- 'ls'
+ 'ls',
],
'should list npm dist-tag commands for completion'
)
@@ -314,9 +319,9 @@ test('completion', t => {
completion({
conf: {
argv: {
- remain: ['npm', 'dist-tag', 'foobar']
- }
- }
+ remain: ['npm', 'dist-tag', 'foobar'],
+ },
+ },
}, (err) => {
t.notOk(err, 'should ignore any unkown name')
})
diff --git a/deps/npm/test/lib/docs.js b/deps/npm/test/lib/docs.js
index 48ba9a3b57..b4ede87316 100644
--- a/deps/npm/test/lib/docs.js
+++ b/deps/npm/test/lib/docs.js
@@ -5,30 +5,30 @@ const pacote = {
manifest: async (spec, options) => {
return spec === 'nodocs' ? {
name: 'nodocs',
- version: '1.2.3'
- }
- : spec === 'docsurl' ? {
- name: 'docsurl',
- version: '1.2.3',
- homepage: 'https://bugzilla.localhost/docsurl'
- }
- : spec === 'repourl' ? {
- name: 'repourl',
- version: '1.2.3',
- repository: 'https://github.com/foo/repourl'
- }
- : spec === 'repoobj' ? {
- name: 'repoobj',
- version: '1.2.3',
- repository: { url: 'https://github.com/foo/repoobj' }
- }
- : spec === '.' ? {
- name: 'thispkg',
version: '1.2.3',
- homepage: 'https://example.com'
}
- : null
- }
+ : spec === 'docsurl' ? {
+ name: 'docsurl',
+ version: '1.2.3',
+ homepage: 'https://bugzilla.localhost/docsurl',
+ }
+ : spec === 'repourl' ? {
+ name: 'repourl',
+ version: '1.2.3',
+ repository: 'https://github.com/foo/repourl',
+ }
+ : spec === 'repoobj' ? {
+ name: 'repoobj',
+ version: '1.2.3',
+ repository: { url: 'https://github.com/foo/repoobj' },
+ }
+ : spec === '.' ? {
+ name: 'thispkg',
+ version: '1.2.3',
+ homepage: 'https://example.com',
+ }
+ : null
+ },
}
// keep a tally of which urls got opened
@@ -41,7 +41,7 @@ const openUrl = (url, errMsg, cb) => {
const docs = requireInject('../../lib/docs.js', {
pacote,
- '../../lib/utils/open-url.js': openUrl
+ '../../lib/utils/open-url.js': openUrl,
})
t.test('completion', t => {
@@ -58,7 +58,7 @@ t.test('open docs urls', t => {
docsurl: 'https://bugzilla.localhost/docsurl',
repourl: 'https://github.com/foo/repourl#readme',
repoobj: 'https://github.com/foo/repoobj#readme',
- '.': 'https://example.com'
+ '.': 'https://example.com',
}
const keys = Object.keys(expect)
t.plan(keys.length)
diff --git a/deps/npm/test/lib/exec.js b/deps/npm/test/lib/exec.js
index 08592353ce..c65f916428 100644
--- a/deps/npm/test/lib/exec.js
+++ b/deps/npm/test/lib/exec.js
@@ -10,9 +10,11 @@ class Arborist {
ARB_CTOR.push(options)
this.path = options.path
}
+
async loadActual () {
return ARB_ACTUAL_TREE[this.path]
}
+
async reify (options) {
ARB_REIFY.push(options)
}
@@ -26,18 +28,18 @@ const npm = {
yes: true,
call: '',
package: [],
- legacyPeerDeps: false
+ legacyPeerDeps: false,
},
localPrefix: 'local-prefix',
localBin: 'local-bin',
globalBin: 'global-bin',
config: {
get: k => {
- if (k !== 'cache') {
+ if (k !== 'cache')
throw new Error('unexpected config get')
- }
+
return 'cache-dir'
- }
+ },
},
log: {
disableProgress: () => {
@@ -48,23 +50,22 @@ const npm = {
},
warn: (...args) => {
LOG_WARN.push(args)
- }
- }
+ },
+ },
}
const RUN_SCRIPTS = []
const runScript = async opt => {
RUN_SCRIPTS.push(opt)
- if (!PROGRESS_IGNORED && PROGRESS_ENABLED) {
+ if (!PROGRESS_IGNORED && PROGRESS_ENABLED)
throw new Error('progress not disabled during run script!')
- }
}
const MANIFESTS = {}
const pacote = {
manifest: async (spec, options) => {
return MANIFESTS[spec]
- }
+ },
}
const MKDIRPS = []
@@ -89,7 +90,7 @@ const mocks = {
'../../lib/npm.js': npm,
pacote,
read,
- 'mkdirp-infer-owner': mkdirp
+ 'mkdirp-infer-owner': mkdirp,
}
const exec = requireInject('../../lib/exec.js', mocks)
@@ -113,7 +114,7 @@ t.afterEach(cb => {
t.test('npx foo, bin already exists locally', async t => {
const path = t.testdir({
- foo: 'just some file'
+ foo: 'just some file',
})
PROGRESS_IGNORED = true
@@ -129,15 +130,15 @@ t.test('npx foo, bin already exists locally', async t => {
stdioString: true,
event: 'npx',
env: {
- PATH: [path, ...PATH].join(delimiter)
+ PATH: [path, ...PATH].join(delimiter),
},
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
t.test('npx foo, bin already exists globally', async t => {
const path = t.testdir({
- foo: 'just some file'
+ foo: 'just some file',
})
PROGRESS_IGNORED = true
@@ -153,9 +154,9 @@ t.test('npx foo, bin already exists globally', async t => {
stdioString: true,
event: 'npx',
env: {
- PATH: [path, ...PATH].join(delimiter)
+ PATH: [path, ...PATH].join(delimiter),
},
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -163,23 +164,22 @@ t.test('npm exec foo, already present locally', async t => {
const path = t.testdir()
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
await exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [], 'no need to make any dirs')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.strictSame(ARB_REIFY, [], 'no need to reify anything')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.match(RUN_SCRIPTS, [{
@@ -189,7 +189,7 @@ t.test('npm exec foo, already present locally', async t => {
stdioString: true,
event: 'npx',
env: { PATH: process.env.PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -198,26 +198,25 @@ t.test('npm exec foo, not present locally or in central loc', async t => {
const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
await exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: false}], 'need to install foo@')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -228,7 +227,7 @@ t.test('npm exec foo, not present locally or in central loc', async t => {
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -237,26 +236,25 @@ t.test('npm exec foo, not present locally but in central loc', async t => {
const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
await exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.match(ARB_REIFY, [], 'no need to install again, already there')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -267,7 +265,7 @@ t.test('npm exec foo, not present locally but in central loc', async t => {
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -276,26 +274,25 @@ t.test('npm exec foo, present locally but wrong version', async t => {
const installDir = resolve('cache-dir/_npx/2badf4630f1cfaad')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS['foo@2.x'] = {
name: 'foo',
version: '2.3.4',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@2.x'
+ _from: 'foo@2.x',
}
await exec(['foo@2.x'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.match(ARB_REIFY, [{ add: ['foo@2.x'], legacyPeerDeps: false }], 'need to add foo@2.x')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -306,7 +303,7 @@ t.test('npm exec foo, present locally but wrong version', async t => {
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -314,24 +311,23 @@ t.test('npm exec --package=foo bar', async t => {
const path = t.testdir()
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
npm.flatOptions.package = ['foo']
await exec(['bar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [], 'no need to make any dirs')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.strictSame(ARB_REIFY, [], 'no need to reify anything')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.match(RUN_SCRIPTS, [{
@@ -341,7 +337,7 @@ t.test('npm exec --package=foo bar', async t => {
stdioString: true,
event: 'npx',
env: { PATH: process.env.PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -351,28 +347,27 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => {
version: '1.2.3',
bin: {
foo: 'foo',
- bar: 'bar'
- }
+ bar: 'bar',
+ },
}
const path = t.testdir({
node_modules: {
'@foo/bar': {
- 'package.json': JSON.stringify(foobarManifest)
- }
- }
+ 'package.json': JSON.stringify(foobarManifest),
+ },
+ },
})
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]])
+ children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]),
}
MANIFESTS['@foo/bar'] = foobarManifest
await exec(['@foo/bar', '--some=arg'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [], 'no need to make any dirs')
- t.match(ARB_CTOR, [ { package: ['@foo/bar'], path } ])
+ t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }])
t.strictSame(ARB_REIFY, [], 'no need to reify anything')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.match(RUN_SCRIPTS, [{
@@ -382,7 +377,7 @@ t.test('npm exec @foo/bar -- --some=arg, locally installed', async t => {
stdioString: true,
event: 'npx',
env: { PATH: process.env.PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -394,27 +389,26 @@ t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locall
baz: 'corge', // pick the first one
qux: 'corge',
quux: 'corge',
- }
+ },
}
const path = t.testdir({
node_modules: {
'@foo/bar': {
- 'package.json': JSON.stringify(foobarManifest)
- }
- }
+ 'package.json': JSON.stringify(foobarManifest),
+ },
+ },
})
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]])
+ children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]),
}
MANIFESTS['@foo/bar'] = foobarManifest
await exec(['@foo/bar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [], 'no need to make any dirs')
- t.match(ARB_CTOR, [ { package: ['@foo/bar'], path } ])
+ t.match(ARB_CTOR, [{ package: ['@foo/bar'], path }])
t.strictSame(ARB_REIFY, [], 'no need to reify anything')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.match(RUN_SCRIPTS, [{
@@ -424,7 +418,7 @@ t.test('npm exec @foo/bar, with same bin alias and no unscoped named bin, locall
stdioString: true,
event: 'npx',
env: { PATH: process.env.PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -432,7 +426,7 @@ t.test('npm exec @foo/bar, with different bin alias and no unscoped named bin, l
const path = t.testdir()
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]])
+ children: new Map([['@foo/bar', { name: '@foo/bar', version: '1.2.3' }]]),
}
MANIFESTS['@foo/bar'] = {
name: '@foo/bar',
@@ -443,15 +437,14 @@ t.test('npm exec @foo/bar, with different bin alias and no unscoped named bin, l
baz: 'quux',
},
_from: 'foo@',
- _id: '@foo/bar@1.2.3'
+ _id: '@foo/bar@1.2.3',
}
return t.rejects(exec(['@foo/bar'], er => {
- if (er) {
+ if (er)
throw er
- }
}), {
message: 'could not determine executable to run',
- pkgid: '@foo/bar@1.2.3'
+ pkgid: '@foo/bar@1.2.3',
})
})
@@ -468,34 +461,33 @@ t.test('run command with 2 packages, need install, verify sort', t => {
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -506,7 +498,7 @@ t.test('run command with 2 packages, need install, verify sort', t => {
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
}
@@ -516,21 +508,20 @@ t.test('npm exec foo, no bin in package', t => {
const path = t.testdir()
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
_from: 'foo@',
- _id: 'foo@1.2.3'
+ _id: 'foo@1.2.3',
}
return t.rejects(exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
}), {
message: 'could not determine executable to run',
- pkgid: 'foo@1.2.3'
+ pkgid: 'foo@1.2.3',
})
})
@@ -538,25 +529,24 @@ t.test('npm exec foo, many bins in package, none named foo', t => {
const path = t.testdir()
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
bar: 'bar',
- baz: 'baz'
+ baz: 'baz',
},
_from: 'foo@',
- _id: 'foo@1.2.3'
+ _id: 'foo@1.2.3',
}
return t.rejects(exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
}), {
message: 'could not determine executable to run',
- pkgid: 'foo@1.2.3'
+ pkgid: 'foo@1.2.3',
})
})
@@ -566,20 +556,19 @@ t.test('npm exec -p foo -c "ls -laF"', async t => {
npm.flatOptions.package = ['foo']
npm.flatOptions.call = 'ls -laF'
ARB_ACTUAL_TREE[path] = {
- children: new Map([['foo', { name: 'foo', version: '1.2.3' }]])
+ children: new Map([['foo', { name: 'foo', version: '1.2.3' }]]),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
- _from: 'foo@'
+ _from: 'foo@',
}
await exec([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [], 'no need to make any dirs')
- t.match(ARB_CTOR, [ { package: ['foo'], path } ])
+ t.match(ARB_CTOR, [{ package: ['foo'], path }])
t.strictSame(ARB_REIFY, [], 'no need to reify anything')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.match(RUN_SCRIPTS, [{
@@ -589,7 +578,7 @@ t.test('npm exec -p foo -c "ls -laF"', async t => {
stdioString: true,
event: 'npx',
env: { PATH: process.env.PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
})
@@ -621,34 +610,33 @@ t.test('prompt when installs are needed if not already present and shell is a TT
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -659,11 +647,11 @@ t.test('prompt when installs are needed if not already present and shell is a TT
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
t.strictSame(READ, [{
prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ',
- default: 'y'
+ default: 'y',
}])
})
@@ -690,34 +678,33 @@ t.test('skip prompt when installs are needed if not already present and shell is
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install both packages')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -728,7 +715,7 @@ t.test('skip prompt when installs are needed if not already present and shell is
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
t.strictSame(READ, [], 'should not have prompted')
t.strictSame(LOG_WARN, [['exec', 'The following packages were not found and will be installed: bar, foo']], 'should have printed a warning')
@@ -757,26 +744,25 @@ t.test('skip prompt when installs are needed if not already present and shell is
const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
await exec(['foobar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.match(ARB_REIFY, [{add, legacyPeerDeps: false}], 'need to install the package')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
const PATH = `${resolve(installDir, 'node_modules', '.bin')}${delimiter}${process.env.PATH}`
@@ -787,7 +773,7 @@ t.test('skip prompt when installs are needed if not already present and shell is
stdioString: true,
event: 'npx',
env: { PATH },
- stdio: 'inherit'
+ stdio: 'inherit',
}])
t.strictSame(READ, [], 'should not have prompted')
t.strictSame(LOG_WARN, [['exec', 'The following package was not found and will be installed: foo']], 'should have printed a warning')
@@ -811,43 +797,42 @@ t.test('abort if prompt rejected', async t => {
npm.flatOptions.package = packages
npm.flatOptions.yes = undefined
- const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b))
const path = t.testdir()
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
t.equal(er, 'canceled', 'should be canceled')
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.strictSame(ARB_REIFY, [], 'no install performed')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.strictSame(RUN_SCRIPTS, [])
t.strictSame(READ, [{
prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ',
- default: 'y'
+ default: 'y',
}])
})
@@ -869,43 +854,42 @@ t.test('abort if prompt false', async t => {
npm.flatOptions.package = packages
npm.flatOptions.yes = undefined
- const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b))
const path = t.testdir()
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
t.equal(er, 'canceled', 'should be canceled')
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.strictSame(ARB_REIFY, [], 'no install performed')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.strictSame(RUN_SCRIPTS, [])
t.strictSame(READ, [{
prompt: 'Need to install the following packages:\n bar\n foo\nOk to proceed? ',
- default: 'y'
+ default: 'y',
}])
})
@@ -926,37 +910,36 @@ t.test('abort if -n provided', async t => {
npm.flatOptions.package = packages
npm.flatOptions.yes = false
- const add = packages.map(p => `${p}@`).sort((a, b) => a.localeCompare(b))
const path = t.testdir()
const installDir = resolve('cache-dir/_npx/07de77790e5f40f2')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
MANIFESTS.bar = {
name: 'bar',
version: '1.2.3',
bin: {
- bar: 'bar'
+ bar: 'bar',
},
- _from: 'bar@'
+ _from: 'bar@',
}
await exec(['foobar'], er => {
t.equal(er, 'canceled', 'should be canceled')
})
t.strictSame(MKDIRPS, [installDir], 'need to make install dir')
- t.match(ARB_CTOR, [ { package: packages, path } ])
+ t.match(ARB_CTOR, [{ package: packages, path }])
t.strictSame(ARB_REIFY, [], 'no install performed')
t.equal(PROGRESS_ENABLED, true, 'progress re-enabled')
t.strictSame(RUN_SCRIPTS, [])
@@ -968,25 +951,24 @@ t.test('forward legacyPeerDeps opt', async t => {
const installDir = resolve('cache-dir/_npx/f7fbba6e0636f890')
npm.localPrefix = path
ARB_ACTUAL_TREE[path] = {
- children: new Map()
+ children: new Map(),
}
ARB_ACTUAL_TREE[installDir] = {
- children: new Map()
+ children: new Map(),
}
MANIFESTS.foo = {
name: 'foo',
version: '1.2.3',
bin: {
- foo: 'foo'
+ foo: 'foo',
},
- _from: 'foo@'
+ _from: 'foo@',
}
npm.flatOptions.yes = true
npm.flatOptions.legacyPeerDeps = true
await exec(['foo'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.match(ARB_REIFY, [{add: ['foo@'], legacyPeerDeps: true}], 'need to install foo@ using legacyPeerDeps opt')
})
diff --git a/deps/npm/test/lib/explain.js b/deps/npm/test/lib/explain.js
index a9db344f8b..1eeca8c4c4 100644
--- a/deps/npm/test/lib/explain.js
+++ b/deps/npm/test/lib/explain.js
@@ -3,7 +3,7 @@ const requireInject = require('require-inject')
const npm = {
prefix: null,
color: true,
- flatOptions: {}
+ flatOptions: {},
}
const { resolve } = require('path')
@@ -20,8 +20,8 @@ const explain = requireInject('../../lib/explain.js', {
'../../lib/utils/explain-dep.js': {
explainNode: (expl, depth, color) => {
return `${expl.name}@${expl.version} depth=${depth} color=${color}`
- }
- }
+ },
+ },
})
t.test('no args throws usage', async t => {
@@ -68,15 +68,15 @@ t.test('explain some nodes', async t => {
name: 'foo',
version: '1.2.3',
dependencies: {
- bar: '*'
- }
- })
+ bar: '*',
+ },
+ }),
},
bar: {
'package.json': JSON.stringify({
name: 'bar',
- version: '1.2.3'
- })
+ version: '1.2.3',
+ }),
},
baz: {
'package.json': JSON.stringify({
@@ -84,40 +84,39 @@ t.test('explain some nodes', async t => {
version: '1.2.3',
dependencies: {
foo: '*',
- bar: '2'
- }
+ bar: '2',
+ },
}),
node_modules: {
bar: {
'package.json': JSON.stringify({
name: 'bar',
- version: '2.3.4'
- })
+ version: '2.3.4',
+ }),
},
extra: {
'package.json': JSON.stringify({
name: 'extra',
version: '99.9999.999999',
- description: 'extraneous package'
- })
- }
- }
- }
+ description: 'extraneous package',
+ }),
+ },
+ },
+ },
},
'package.json': JSON.stringify({
dependencies: {
- baz: '1'
- }
- })
+ baz: '1',
+ },
+ }),
})
// works with either a full actual path or the location
const p = 'node_modules/foo'
for (const path of [p, resolve(npm.prefix, p)]) {
await explain([path], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(OUTPUT, [['foo@1.2.3 depth=Infinity color=true']])
OUTPUT.length = 0
@@ -125,44 +124,40 @@ t.test('explain some nodes', async t => {
// finds all nodes by name
await explain(['bar'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(OUTPUT, [[
'bar@1.2.3 depth=Infinity color=true\n\n' +
- 'bar@2.3.4 depth=Infinity color=true'
+ 'bar@2.3.4 depth=Infinity color=true',
]])
OUTPUT.length = 0
// finds only nodes that match the spec
await explain(['bar@1'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(OUTPUT, [['bar@1.2.3 depth=Infinity color=true']])
OUTPUT.length = 0
// finds extraneous nodes
await explain(['extra'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(OUTPUT, [['extra@99.9999.999999 depth=Infinity color=true']])
OUTPUT.length = 0
npm.flatOptions.json = true
await explain(['node_modules/foo'], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.match(JSON.parse(OUTPUT[0][0]), [{
name: 'foo',
version: '1.2.3',
- dependents: Array
+ dependents: Array,
}])
OUTPUT.length = 0
npm.flatOptions.json = false
@@ -174,4 +169,3 @@ t.test('explain some nodes', async t => {
})
})
})
-
diff --git a/deps/npm/test/lib/explore.js b/deps/npm/test/lib/explore.js
index 03ad230489..64c70bcce7 100644
--- a/deps/npm/test/lib/explore.js
+++ b/deps/npm/test/lib/explore.js
@@ -13,12 +13,12 @@ let SPAWN_EXIT_CODE = 0
let SPAWN_SHELL_EXEC = null
let SPAWN_SHELL_ARGS = null
const mockSpawn = (sh, shellArgs, opts) => {
- if (sh !== 'shell-command') {
+ if (sh !== 'shell-command')
throw new Error('got wrong shell command')
- }
- if (SPAWN_ERROR) {
+
+ if (SPAWN_ERROR)
return Promise.reject(SPAWN_ERROR)
- }
+
SPAWN_SHELL_EXEC = sh
SPAWN_SHELL_ARGS = shellArgs
return Promise.resolve({ code: SPAWN_EXIT_CODE })
@@ -29,28 +29,28 @@ let ERROR_HANDLER_CALLED = null
const getExplore = windows => requireInject('../../lib/explore.js', {
'../../lib/utils/is-windows.js': windows,
'../../lib/utils/escape-arg.js': requireInject('../../lib/utils/escape-arg.js', {
- '../../lib/utils/is-windows.js': windows
+ '../../lib/utils/is-windows.js': windows,
}),
path: require('path')[windows ? 'win32' : 'posix'],
'../../lib/utils/escape-exec-path.js': requireInject('../../lib/utils/escape-arg.js', {
- '../../lib/utils/is-windows.js': windows
+ '../../lib/utils/is-windows.js': windows,
}),
'../../lib/utils/error-handler.js': er => {
ERROR_HANDLER_CALLED = er
},
fs: {
- stat: mockStat
+ stat: mockStat,
},
'../../lib/npm.js': {
dir: windows ? 'c:\\npm\\dir' : '/npm/dir',
flatOptions: {
- shell: 'shell-command'
- }
+ shell: 'shell-command',
+ },
},
'@npmcli/promise-spawn': mockSpawn,
'../../lib/utils/output.js': out => {
output.push(out)
- }
+ },
})
const windowsExplore = getExplore(true)
@@ -63,42 +63,42 @@ t.test('basic interactive', t => {
})
t.test('windows', t => windowsExplore(['pkg'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: 'c:\\npm\\dir\\pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: []
+ SPAWN_SHELL_ARGS: [],
})
t.strictSame(output, [
- "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n"
+ "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n",
])
}))
t.test('posix', t => posixExplore(['pkg'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: '/npm/dir/pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: []
+ SPAWN_SHELL_ARGS: [],
})
t.strictSame(output, [
- "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n"
+ "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n",
])
}))
@@ -120,43 +120,43 @@ t.test('interactive tracks exit code', t => {
})
t.test('windows', t => windowsExplore(['pkg'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: 'c:\\npm\\dir\\pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: []
+ SPAWN_SHELL_ARGS: [],
})
t.strictSame(output, [
- "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n"
+ "\nExploring c:\\npm\\dir\\pkg\nType 'exit' or ^D when finished\n",
])
t.equal(process.exitCode, 99)
}))
t.test('posix', t => posixExplore(['pkg'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: '/npm/dir/pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: []
+ SPAWN_SHELL_ARGS: [],
})
t.strictSame(output, [
- "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n"
+ "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n",
])
t.equal(process.exitCode, 99)
}))
@@ -166,14 +166,14 @@ t.test('interactive tracks exit code', t => {
SPAWN_ERROR = null
})
SPAWN_ERROR = Object.assign(new Error('glorb'), {
- code: 33
+ code: 33,
})
return posixExplore(['pkg'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame(output, [
- "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n"
+ "\nExploring /npm/dir/pkg\nType 'exit' or ^D when finished\n",
])
t.equal(process.exitCode, 33)
})
@@ -189,14 +189,14 @@ t.test('basic non-interactive', t => {
})
t.test('windows', t => windowsExplore(['pkg', 'ls'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: 'c:\\npm\\dir\\pkg',
@@ -206,25 +206,25 @@ t.test('basic non-interactive', t => {
'/s',
'/c',
'"ls"',
- ]
+ ],
})
t.strictSame(output, [])
}))
t.test('posix', t => posixExplore(['pkg', 'ls'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: '/npm/dir/pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: ['-c', 'ls']
+ SPAWN_SHELL_ARGS: ['-c', 'ls'],
})
t.strictSame(output, [])
}))
@@ -239,19 +239,19 @@ t.test('usage if no pkg provided', t => {
})
t.plan(1)
posixExplore([], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED: null,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: '/npm/dir/pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: ['-c', 'ls']
+ SPAWN_SHELL_ARGS: ['-c', 'ls'],
})
}).catch(er => t.equal(er, 'npm explore <pkg> [ -- <command>]'))
})
@@ -261,19 +261,19 @@ t.test('pkg not installed', t => {
t.plan(1)
posixExplore(['pkg', 'ls'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame({
ERROR_HANDLER_CALLED,
STAT_CALLED,
SPAWN_SHELL_EXEC,
- SPAWN_SHELL_ARGS
+ SPAWN_SHELL_ARGS,
}, {
ERROR_HANDLER_CALLED: null,
STAT_CALLED: '/npm/dir/pkg',
SPAWN_SHELL_EXEC: 'shell-command',
- SPAWN_SHELL_ARGS: ['-c', 'ls']
+ SPAWN_SHELL_ARGS: ['-c', 'ls'],
})
t.strictSame(output, [])
}).catch(er => {
diff --git a/deps/npm/test/lib/find-dupes.js b/deps/npm/test/lib/find-dupes.js
index 2f6272b90c..73c8fa2dc2 100644
--- a/deps/npm/test/lib/find-dupes.js
+++ b/deps/npm/test/lib/find-dupes.js
@@ -1,5 +1,4 @@
const { test } = require('tap')
-const findDupes = require('../../lib/find-dupes.js')
const requireInject = require('require-inject')
test('should run dedupe in dryRun mode', (t) => {
@@ -7,11 +6,10 @@ test('should run dedupe in dryRun mode', (t) => {
'../../lib/dedupe.js': function (args, cb) {
t.ok(args.dryRun, 'dryRun is true')
cb()
- }
+ },
})
findDupes(null, () => {
t.ok(true, 'callback is called')
t.end()
})
})
-
diff --git a/deps/npm/test/lib/fund.js b/deps/npm/test/lib/fund.js
index fc6a63aa17..a23fc88ced 100644
--- a/deps/npm/test/lib/fund.js
+++ b/deps/npm/test/lib/fund.js
@@ -6,7 +6,7 @@ const requireInject = require('require-inject')
const version = '1.0.0'
const funding = {
type: 'individual',
- url: 'http://example.com/donate'
+ url: 'http://example.com/donate',
}
const maintainerOwnsAllDeps = {
@@ -16,8 +16,8 @@ const maintainerOwnsAllDeps = {
funding,
dependencies: {
'dep-foo': '*',
- 'dep-bar': '*'
- }
+ 'dep-bar': '*',
+ },
}),
node_modules: {
'dep-foo': {
@@ -26,27 +26,27 @@ const maintainerOwnsAllDeps = {
version,
funding,
dependencies: {
- 'dep-sub-foo': '*'
- }
+ 'dep-sub-foo': '*',
+ },
}),
node_modules: {
'dep-sub-foo': {
'package.json': JSON.stringify({
name: 'dep-sub-foo',
version,
- funding
- })
- }
- }
+ funding,
+ }),
+ },
+ },
},
'dep-bar': {
'package.json': JSON.stringify({
name: 'dep-bar',
version,
- funding
- })
- }
- }
+ funding,
+ }),
+ },
+ },
}
const nestedNoFundingPackages = {
@@ -54,11 +54,11 @@ const nestedNoFundingPackages = {
name: 'nested-no-funding-packages',
version,
dependencies: {
- foo: '*'
+ foo: '*',
},
devDependencies: {
- lorem: '*'
- }
+ lorem: '*',
+ },
}),
node_modules: {
foo: {
@@ -66,38 +66,38 @@ const nestedNoFundingPackages = {
name: 'foo',
version,
dependencies: {
- bar: '*'
- }
+ bar: '*',
+ },
}),
node_modules: {
bar: {
'package.json': JSON.stringify({
name: 'bar',
version,
- funding
+ funding,
}),
node_modules: {
'sub-bar': {
'package.json': JSON.stringify({
name: 'sub-bar',
version,
- funding: 'https://example.com/sponsor'
- })
- }
- }
- }
- }
+ funding: 'https://example.com/sponsor',
+ }),
+ },
+ },
+ },
+ },
},
lorem: {
'package.json': JSON.stringify({
name: 'lorem',
version,
funding: {
- url: 'https://example.com/lorem'
- }
- })
- }
- }
+ url: 'https://example.com/lorem',
+ },
+ }),
+ },
+ },
}
const nestedMultipleFundingPackages = {
@@ -106,14 +106,14 @@ const nestedMultipleFundingPackages = {
version,
funding: [
'https://one.example.com',
- 'https://two.example.com'
+ 'https://two.example.com',
],
dependencies: {
- foo: '*'
+ foo: '*',
},
devDependencies: {
- bar: '*'
- }
+ bar: '*',
+ },
}),
node_modules: {
foo: {
@@ -123,9 +123,9 @@ const nestedMultipleFundingPackages = {
funding: [
'http://example.com',
{ url: 'http://sponsors.example.com/me' },
- 'http://collective.example.com'
- ]
- })
+ 'http://collective.example.com',
+ ],
+ }),
},
bar: {
'package.json': JSON.stringify({
@@ -133,11 +133,11 @@ const nestedMultipleFundingPackages = {
version,
funding: [
'http://collective.example.com',
- { url: 'http://sponsors.example.com/you' }
- ]
- })
- }
- }
+ { url: 'http://sponsors.example.com/you' },
+ ],
+ }),
+ },
+ },
}
const conflictingFundingPackages = {
@@ -145,19 +145,19 @@ const conflictingFundingPackages = {
name: 'conflicting-funding-packages',
version,
dependencies: {
- foo: '1.0.0'
+ foo: '1.0.0',
},
devDependencies: {
- bar: '1.0.0'
- }
+ bar: '1.0.0',
+ },
}),
node_modules: {
foo: {
'package.json': JSON.stringify({
name: 'foo',
version: '1.0.0',
- funding: 'http://example.com/1'
- })
+ funding: 'http://example.com/1',
+ }),
},
bar: {
node_modules: {
@@ -165,19 +165,19 @@ const conflictingFundingPackages = {
'package.json': JSON.stringify({
name: 'foo',
version: '2.0.0',
- funding: 'http://example.com/2'
- })
- }
+ funding: 'http://example.com/2',
+ }),
+ },
},
'package.json': JSON.stringify({
name: 'bar',
version: '1.0.0',
dependencies: {
- foo: '2.0.0'
- }
- })
- }
- }
+ foo: '2.0.0',
+ },
+ }),
+ },
+ },
}
let result = ''
@@ -188,7 +188,7 @@ const _flatOptions = {
global: false,
prefix: undefined,
unicode: false,
- which: undefined
+ which: undefined,
}
const openUrl = (url, msg, cb) => {
if (url === 'http://npmjs.org') {
@@ -198,35 +198,39 @@ const openUrl = (url, msg, cb) => {
if (_flatOptions.json) {
printUrl = JSON.stringify({
title: msg,
- url: url
+ url: url,
})
- } else {
+ } else
printUrl = `${msg}:\n ${url}`
- }
+
cb()
}
const fund = requireInject('../../lib/fund.js', {
'../../lib/npm.js': {
flatOptions: _flatOptions,
- get prefix () { return _flatOptions.prefix }
+ get prefix () {
+ return _flatOptions.prefix
+ },
},
'../../lib/utils/open-url.js': openUrl,
- '../../lib/utils/output.js': msg => { result += msg + '\n' },
+ '../../lib/utils/output.js': msg => {
+ result += msg + '\n'
+ },
pacote: {
manifest: (arg) => arg.name === 'ntl'
? Promise.resolve({
- funding: 'http://example.com/pacote'
+ funding: 'http://example.com/pacote',
})
- : Promise.reject(new Error('ERROR'))
- }
+ : Promise.reject(new Error('ERROR')),
+ },
})
test('fund with no package containing funding', t => {
_flatOptions.prefix = t.testdir({
'package.json': JSON.stringify({
name: 'no-funding-package',
- version: '0.0.0'
- })
+ version: '0.0.0',
+ }),
})
fund([], (err) => {
@@ -264,7 +268,7 @@ test('fund in which same maintainer owns all its deps, using --json option', t =
dependencies: {
'dep-bar': {
version: '1.0.0',
- funding: { type: 'individual', url: 'http://example.com/donate' }
+ funding: { type: 'individual', url: 'http://example.com/donate' },
},
'dep-foo': {
version: '1.0.0',
@@ -272,11 +276,11 @@ test('fund in which same maintainer owns all its deps, using --json option', t =
dependencies: {
'dep-sub-foo': {
version: '1.0.0',
- funding: { type: 'individual', url: 'http://example.com/donate' }
- }
- }
- }
- }
+ funding: { type: 'individual', url: 'http://example.com/donate' },
+ },
+ },
+ },
+ },
},
'should print stack packages together'
)
@@ -317,13 +321,13 @@ test('fund containing multi-level nested deps with no funding, using --json opti
dependencies: {
lorem: {
version: '1.0.0',
- funding: { url: 'https://example.com/lorem' }
+ funding: { url: 'https://example.com/lorem' },
},
bar: {
version: '1.0.0',
- funding: { type: 'individual', url: 'http://example.com/donate' }
- }
- }
+ funding: { type: 'individual', url: 'http://example.com/donate' },
+ },
+ },
},
'should omit dependencies with no funding declared in json output'
)
@@ -348,39 +352,39 @@ test('fund containing multi-level nested deps with no funding, using --json opti
version: '1.0.0',
funding: [
{
- url: 'https://one.example.com'
+ url: 'https://one.example.com',
},
{
- url: 'https://two.example.com'
- }
+ url: 'https://two.example.com',
+ },
],
dependencies: {
bar: {
version: '1.0.0',
funding: [
{
- url: 'http://collective.example.com'
+ url: 'http://collective.example.com',
},
{
- url: 'http://sponsors.example.com/you'
- }
- ]
+ url: 'http://sponsors.example.com/you',
+ },
+ ],
},
foo: {
version: '1.0.0',
funding: [
{
- url: 'http://example.com'
+ url: 'http://example.com',
},
{
- url: 'http://sponsors.example.com/me'
+ url: 'http://sponsors.example.com/me',
},
{
- url: 'http://collective.example.com'
- }
- ]
- }
- }
+ url: 'http://collective.example.com',
+ },
+ ],
+ },
+ },
},
'should list multiple funding entries in json output'
)
@@ -440,8 +444,8 @@ test('fund using string shorthand', t => {
'package.json': JSON.stringify({
name: 'funding-string-shorthand',
version: '0.0.0',
- funding: 'https://example.com/sponsor'
- })
+ funding: 'https://example.com/sponsor',
+ }),
})
fund(['.'], (err) => {
@@ -469,18 +473,18 @@ test('fund using symlink ref', t => {
_flatOptions.prefix = t.testdir({
'package.json': JSON.stringify({
name: 'using-symlink-ref',
- version: '1.0.0'
+ version: '1.0.0',
}),
a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
- funding: 'http://example.com/a'
- })
+ funding: 'http://example.com/a',
+ }),
},
node_modules: {
- a: t.fixture('symlink', '../a')
- }
+ a: t.fixture('symlink', '../a'),
+ },
})
// using symlinked ref
@@ -515,33 +519,33 @@ test('fund using data from actual tree', t => {
_flatOptions.prefix = t.testdir({
'package.json': JSON.stringify({
name: 'using-actual-tree',
- version: '1.0.0'
+ version: '1.0.0',
}),
node_modules: {
a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
- funding: 'http://example.com/a'
- })
+ funding: 'http://example.com/a',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
- funding: 'http://example.com/b'
+ funding: 'http://example.com/b',
}),
node_modules: {
a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.1',
- funding: 'http://example.com/_AAA'
- })
- }
- }
- }
- }
+ funding: 'http://example.com/_AAA',
+ }),
+ },
+ },
+ },
+ },
})
// using symlinked ref
@@ -595,7 +599,7 @@ test('fund using package argument with no browser, using --json option', t => {
JSON.parse(printUrl),
{
title: 'individual funding available at the following URL',
- url: 'http://example.com/donate'
+ url: 'http://example.com/donate',
},
'should open funding url using json output'
)
@@ -676,8 +680,8 @@ test('fund pkg missing version number', t => {
_flatOptions.prefix = t.testdir({
'package.json': JSON.stringify({
name: 'foo',
- funding: 'http://example.com/foo'
- })
+ funding: 'http://example.com/foo',
+ }),
})
fund([], (err) => {
@@ -693,8 +697,8 @@ test('fund a package throws on openUrl', t => {
'package.json': JSON.stringify({
name: 'foo',
version: '1.0.0',
- funding: 'http://npmjs.org'
- })
+ funding: 'http://npmjs.org',
+ }),
})
fund(['.'], (err) => {
@@ -711,14 +715,14 @@ test('fund a package with type and multiple sources', t => {
funding: [
{
type: 'Foo',
- url: 'http://example.com/foo'
+ url: 'http://example.com/foo',
},
{
type: 'Lorem',
- url: 'http://example.com/foo-lorem'
- }
- ]
- })
+ url: 'http://example.com/foo-lorem',
+ },
+ ],
+ }),
})
fund(['.'], (err) => {
@@ -738,16 +742,16 @@ test('fund colors', t => {
dependencies: {
a: '^1.0.0',
b: '^1.0.0',
- c: '^1.0.0'
- }
+ c: '^1.0.0',
+ },
}),
node_modules: {
a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
- funding: 'http://example.com/a'
- })
+ funding: 'http://example.com/a',
+ }),
},
b: {
'package.json': JSON.stringify({
@@ -756,32 +760,32 @@ test('fund colors', t => {
funding: 'http://example.com/b',
dependencies: {
d: '^1.0.0',
- e: '^1.0.0'
- }
- })
+ e: '^1.0.0',
+ },
+ }),
},
c: {
'package.json': JSON.stringify({
name: 'c',
version: '1.0.0',
- funding: 'http://example.com/b'
- })
+ funding: 'http://example.com/b',
+ }),
},
d: {
'package.json': JSON.stringify({
name: 'd',
version: '1.0.0',
- funding: 'http://example.com/d'
- })
+ funding: 'http://example.com/d',
+ }),
},
e: {
'package.json': JSON.stringify({
name: 'e',
version: '1.0.0',
- funding: 'http://example.com/e'
- })
- }
- }
+ funding: 'http://example.com/e',
+ }),
+ },
+ },
})
_flatOptions.color = true
@@ -802,8 +806,8 @@ test('sub dep with fund info and a parent with no funding info', t => {
version: '1.0.0',
dependencies: {
a: '^1.0.0',
- b: '^1.0.0'
- }
+ b: '^1.0.0',
+ },
}),
node_modules: {
a: {
@@ -811,16 +815,16 @@ test('sub dep with fund info and a parent with no funding info', t => {
name: 'a',
version: '1.0.0',
dependencies: {
- c: '^1.0.0'
- }
- })
+ c: '^1.0.0',
+ },
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
- funding: 'http://example.com/b'
- })
+ funding: 'http://example.com/b',
+ }),
},
c: {
'package.json': JSON.stringify({
@@ -828,11 +832,11 @@ test('sub dep with fund info and a parent with no funding info', t => {
version: '1.0.0',
funding: [
'http://example.com/c',
- 'http://example.com/c-other'
- ]
- })
- }
- }
+ 'http://example.com/c-other',
+ ],
+ }),
+ },
+ },
})
fund([], (err) => {
diff --git a/deps/npm/test/lib/get.js b/deps/npm/test/lib/get.js
index 5f2f29bd92..5260c00bae 100644
--- a/deps/npm/test/lib/get.js
+++ b/deps/npm/test/lib/get.js
@@ -9,9 +9,9 @@ test('should retrieve values from npm.commands.config', (t) => {
t.equal(action, 'get', 'should use config get action')
t.equal(arg, 'foo', 'should use expected key')
t.end()
- }
- }
- }
+ },
+ },
+ },
})
get(['foo'])
diff --git a/deps/npm/test/lib/init.js b/deps/npm/test/lib/init.js
new file mode 100644
index 0000000000..cb15eac8fc
--- /dev/null
+++ b/deps/npm/test/lib/init.js
@@ -0,0 +1,211 @@
+const t = require('tap')
+const requireInject = require('require-inject')
+
+let result = ''
+const npmLog = {
+ disableProgress: () => null,
+ enableProgress: () => null,
+ info: () => null,
+ pause: () => null,
+ resume: () => null,
+ silly: () => null,
+}
+const npm = {
+ config: { set () {} },
+ flatOptions: {},
+ log: npmLog,
+}
+const mocks = {
+ 'init-package-json': (dir, initFile, config, cb) => cb(null, 'data'),
+ '../../lib/npm.js': npm,
+ '../../lib/utils/usage.js': () => 'usage instructions',
+ '../../lib/utils/output.js': (...msg) => {
+ result += msg.join('\n')
+ },
+}
+const init = requireInject('../../lib/init.js', mocks)
+
+t.afterEach(cb => {
+ result = ''
+ npm.config = { get: () => '', set () {} }
+ npm.commands = {}
+ npm.flatOptions = {}
+ npm.log = npmLog
+ cb()
+})
+
+t.test('classic npm init no args', t => {
+ npm.config = {
+ get () {
+ return '~/.npm-init.js'
+ },
+ }
+ init([], err => {
+ t.ifError(err, 'npm init no args')
+ t.matchSnapshot(result, 'should print helper info')
+ t.end()
+ })
+})
+
+t.test('classic npm init -y', t => {
+ t.plan(7)
+ npm.config = {
+ get: () => '~/.npm-init.js',
+ }
+ npm.flatOptions = {
+ yes: true,
+ }
+ npm.log = { ...npm.log }
+ npm.log.silly = (title, msg) => {
+ t.equal(title, 'package data', 'should print title')
+ t.equal(msg, 'data', 'should print pkg data info')
+ }
+ npm.log.resume = () => {
+ t.ok('should resume logs')
+ }
+ npm.log.info = (title, msg) => {
+ t.equal(title, 'init', 'should print title')
+ t.equal(msg, 'written successfully', 'should print done info')
+ }
+ init([], err => {
+ t.ifError(err, 'npm init -y')
+ t.equal(result, '')
+ })
+})
+
+t.test('npm init <arg>', t => {
+ t.plan(4)
+ npm.config = {
+ set (key, val) {
+ t.equal(key, 'package', 'should set package key')
+ t.deepEqual(val, [], 'should set empty array value')
+ },
+ }
+ npm.commands.exec = (arr, cb) => {
+ t.deepEqual(
+ arr,
+ ['create-react-app'],
+ 'should npx with listed packages'
+ )
+ cb()
+ }
+ init(['react-app'], err => {
+ t.ifError(err, 'npm init react-app')
+ })
+})
+
+t.test('npm init @scope/name', t => {
+ t.plan(2)
+ npm.commands.exec = (arr, cb) => {
+ t.deepEqual(
+ arr,
+ ['@npmcli/create-something'],
+ 'should npx with scoped packages'
+ )
+ cb()
+ }
+ init(['@npmcli/something'], err => {
+ t.ifError(err, 'npm init init @scope/name')
+ })
+})
+
+t.test('npm init git spec', t => {
+ t.plan(2)
+ npm.commands.exec = (arr, cb) => {
+ t.deepEqual(
+ arr,
+ ['npm/create-something'],
+ 'should npx with git-spec packages'
+ )
+ cb()
+ }
+ init(['npm/something'], err => {
+ t.ifError(err, 'npm init init @scope/name')
+ })
+})
+
+t.test('npm init @scope', t => {
+ t.plan(2)
+ npm.commands.exec = (arr, cb) => {
+ t.deepEqual(
+ arr,
+ ['@npmcli/create'],
+ 'should npx with @scope/create pkgs'
+ )
+ cb()
+ }
+ init(['@npmcli'], err => {
+ t.ifError(err, 'npm init init @scope/create')
+ })
+})
+
+t.test('npm init tgz', t => {
+ init(['something.tgz'], err => {
+ t.match(
+ err,
+ /Error: Unrecognized initializer: something.tgz/,
+ 'should throw error when using an unsupported spec'
+ )
+ t.end()
+ })
+})
+
+t.test('npm init <arg>@next', t => {
+ t.plan(2)
+ npm.commands.exec = (arr, cb) => {
+ t.deepEqual(
+ arr,
+ ['create-something@next'],
+ 'should npx with something@next'
+ )
+ cb()
+ }
+ init(['something@next'], err => {
+ t.ifError(err, 'npm init init something@next')
+ })
+})
+
+t.test('npm init exec error', t => {
+ npm.commands.exec = (arr, cb) => {
+ cb(new Error('ERROR'))
+ }
+ init(['something@next'], err => {
+ t.match(
+ err,
+ /ERROR/,
+ 'should exit with exec error'
+ )
+ t.end()
+ })
+})
+
+t.test('npm init cancel', t => {
+ t.plan(3)
+ const init = requireInject('../../lib/init.js', {
+ ...mocks,
+ 'init-package-json': (dir, initFile, config, cb) => cb(
+ new Error('canceled')
+ ),
+ })
+ npm.log = { ...npm.log }
+ npm.log.warn = (title, msg) => {
+ t.equal(title, 'init', 'should have init title')
+ t.equal(msg, 'canceled', 'should log canceled')
+ }
+ init([], err => {
+ t.ifError(err, 'npm init cancel')
+ })
+})
+
+t.test('npm init error', t => {
+ const init = requireInject('../../lib/init.js', {
+ ...mocks,
+ 'init-package-json': (dir, initFile, config, cb) => cb(
+ new Error('Unknown Error')
+ ),
+ })
+ init([], err => {
+ t.match(err, /Unknown Error/, 'should throw error')
+ t.end()
+ })
+})
diff --git a/deps/npm/test/lib/install.js b/deps/npm/test/lib/install.js
index 1650dcb8c0..7e243e7ff3 100644
--- a/deps/npm/test/lib/install.js
+++ b/deps/npm/test/lib/install.js
@@ -14,17 +14,17 @@ test('should install using Arborist', (t) => {
globalDir: 'path/to/node_modules/',
prefix: 'foo',
flatOptions: {
- global: false
+ global: false,
},
config: {
- get: () => true
- }
+ get: () => true,
+ },
},
'@npmcli/run-script': ({ event }) => {
SCRIPTS.push(event)
},
- 'npmlog': {
- warn: () => {}
+ npmlog: {
+ warn: () => {},
},
'@npmcli/arborist': function (args) {
ARB_ARGS = args
@@ -33,15 +33,16 @@ test('should install using Arborist', (t) => {
REIFY_CALLED = true
}
},
- '../../lib/utils/reify-output.js': arb => {
- if (arb !== ARB_OBJ) {
- throw new Error('got wrong object passed to reify-output')
- }
- }
+ '../../lib/utils/reify-finish.js': arb => {
+ if (arb !== ARB_OBJ)
+ throw new Error('got wrong object passed to reify-finish')
+ },
})
t.test('with args', t => {
- install(['fizzbuzz'], () => {
+ install(['fizzbuzz'], er => {
+ if (er)
+ throw er
t.match(ARB_ARGS, { global: false, path: 'foo' })
t.equal(REIFY_CALLED, true, 'called reify')
t.strictSame(SCRIPTS, [], 'no scripts when adding dep')
@@ -50,7 +51,9 @@ test('should install using Arborist', (t) => {
})
t.test('just a local npm install', t => {
- install([], () => {
+ install([], er => {
+ if (er)
+ throw er
t.match(ARB_ARGS, { global: false, path: 'foo' })
t.equal(REIFY_CALLED, true, 'called reify')
t.strictSame(SCRIPTS, [
@@ -60,7 +63,7 @@ test('should install using Arborist', (t) => {
'prepublish',
'preprepare',
'prepare',
- 'postprepare'
+ 'postprepare',
], 'exec scripts when doing local build')
t.end()
})
@@ -71,42 +74,45 @@ test('should install using Arborist', (t) => {
test('should install globally using Arborist', (t) => {
const install = requireInject('../../lib/install.js', {
+ '../../lib/utils/reify-finish.js': async () => {},
'../../lib/npm.js': {
globalDir: 'path/to/node_modules/',
prefix: 'foo',
flatOptions: {
- 'global': 'true',
+ global: 'true',
},
config: {
- get: () => false
- }
+ get: () => false,
+ },
},
'@npmcli/arborist': function () {
this.reify = () => {}
},
})
- install([], () => {
+ install([], er => {
+ if (er)
+ throw er
t.end()
})
})
test('completion to folder', (t) => {
const install = requireInject('../../lib/install.js', {
- 'util': {
- 'promisify': (fn) => fn
+ '../../lib/utils/reify-finish.js': async () => {},
+ util: {
+ promisify: (fn) => fn,
},
- 'fs': {
- 'readdir': (path) => {
- if (path === '/') {
+ fs: {
+ readdir: (path) => {
+ if (path === '/')
return ['arborist']
- } else {
+ else
return ['package.json']
- }
- }
- }
+ },
+ },
})
install.completion({
- partialWord: '/ar'
+ partialWord: '/ar',
}, (er, res) => {
t.equal(er, null)
const expect = process.platform === 'win32' ? '\\arborist' : '/arborist'
@@ -117,17 +123,18 @@ test('completion to folder', (t) => {
test('completion to folder - invalid dir', (t) => {
const install = requireInject('../../lib/install.js', {
- 'util': {
- 'promisify': (fn) => fn
+ '../../lib/utils/reify-finish.js': async () => {},
+ util: {
+ promisify: (fn) => fn,
},
- 'fs': {
- 'readdir': () => {
+ fs: {
+ readdir: () => {
throw new Error('EONT')
- }
- }
+ },
+ },
})
install.completion({
- partialWord: 'path/to/folder'
+ partialWord: 'path/to/folder',
}, (er, res) => {
t.equal(er, null)
t.strictSame(res, [], 'invalid dir: no matching')
@@ -137,17 +144,18 @@ test('completion to folder - invalid dir', (t) => {
test('completion to folder - no matches', (t) => {
const install = requireInject('../../lib/install.js', {
- 'util': {
- 'promisify': (fn) => fn
+ '../../lib/utils/reify-finish.js': async () => {},
+ util: {
+ promisify: (fn) => fn,
},
- 'fs': {
- 'readdir': (path) => {
+ fs: {
+ readdir: (path) => {
return ['foobar']
- }
- }
+ },
+ },
})
install.completion({
- partialWord: '/pa'
+ partialWord: '/pa',
}, (er, res) => {
t.equal(er, null)
t.strictSame(res, [], 'no name match')
@@ -157,21 +165,21 @@ test('completion to folder - no matches', (t) => {
test('completion to folder - match is not a package', (t) => {
const install = requireInject('../../lib/install.js', {
- 'util': {
- 'promisify': (fn) => fn
+ '../../lib/utils/reify-finish.js': async () => {},
+ util: {
+ promisify: (fn) => fn,
},
- 'fs': {
- 'readdir': (path) => {
- if (path === '/') {
+ fs: {
+ readdir: (path) => {
+ if (path === '/')
return ['arborist']
- } else {
+ else
throw new Error('EONT')
- }
- }
- }
+ },
+ },
})
install.completion({
- partialWord: '/ar'
+ partialWord: '/ar',
}, (er, res) => {
t.equal(er, null)
t.strictSame(res, [], 'no name match')
@@ -181,7 +189,7 @@ test('completion to folder - match is not a package', (t) => {
test('completion to url', (t) => {
install.completion({
- partialWord: 'http://path/to/url'
+ partialWord: 'http://path/to/url',
}, (er, res) => {
t.equal(er, null)
t.strictSame(res, [])
@@ -191,7 +199,7 @@ test('completion to url', (t) => {
test('completion', (t) => {
install.completion({
- partialWord: 'toto'
+ partialWord: 'toto',
}, (er, res) => {
t.notOk(er)
t.notOk(res)
diff --git a/deps/npm/test/lib/link.js b/deps/npm/test/lib/link.js
index aafdb8188e..9b7c5df642 100644
--- a/deps/npm/test/lib/link.js
+++ b/deps/npm/test/lib/link.js
@@ -20,8 +20,10 @@ const npm = {
prefix: null,
flatOptions: {},
config: {
- get () { return false }
- }
+ get () {
+ return false
+ },
+ },
}
const printLinks = async (opts) => {
let res = ''
@@ -30,16 +32,15 @@ const printLinks = async (opts) => {
const linkedItems = [...tree.inventory.values()]
.sort((a, b) => a.pkgid.localeCompare(b.pkgid))
for (const item of linkedItems) {
- if (item.target) {
+ if (item.target)
res += `${item.path} -> ${item.target.path}\n`
- }
}
return res
}
const mocks = {
'../../lib/npm.js': npm,
- '../../lib/utils/reify-output.js': () => reifyOutput()
+ '../../lib/utils/reify-output.js': () => reifyOutput(),
}
const link = requireInject('../../lib/link.js', mocks)
@@ -54,18 +55,18 @@ t.test('link to globalDir when in current working dir of pkg and no args', (t) =
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
},
'test-pkg-link': {
'package.json': JSON.stringify({
name: 'test-pkg-link',
- version: '1.0.0'
- })
- }
+ version: '1.0.0',
+ }),
+ },
})
npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules')
npm.prefix = resolve(testdir, 'test-pkg-link')
@@ -75,7 +76,7 @@ t.test('link to globalDir when in current working dir of pkg and no args', (t) =
const links = await printLinks({
path: resolve(npm.globalDir, '..'),
- global: true
+ global: true,
})
t.matchSnapshot(links, 'should create a global link to current pkg')
@@ -97,68 +98,68 @@ t.test('link global linked pkg to local nm when using args', (t) => {
foo: {
'package.json': JSON.stringify({
name: '@myscope/foo',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
bar: {
'package.json': JSON.stringify({
name: '@myscope/bar',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
- linked: t.fixture('symlink', '../../../../scoped-linked')
+ linked: t.fixture('symlink', '../../../../scoped-linked'),
},
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
- 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link')
- }
- }
+ 'test-pkg-link': t.fixture('symlink', '../../../test-pkg-link'),
+ },
+ },
},
'test-pkg-link': {
'package.json': JSON.stringify({
name: 'test-pkg-link',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
'link-me-too': {
'package.json': JSON.stringify({
name: 'link-me-too',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
'scoped-linked': {
'package.json': JSON.stringify({
name: '@myscope/linked',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
'my-project': {
'package.json': JSON.stringify({
name: 'my-project',
version: '1.0.0',
dependencies: {
- foo: '^1.0.0'
- }
+ foo: '^1.0.0',
+ },
}),
node_modules: {
foo: {
'package.json': JSON.stringify({
name: 'foo',
- version: '1.0.0'
- })
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
})
npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules')
npm.prefix = resolve(testdir, 'my-project')
@@ -171,7 +172,7 @@ t.test('link global linked pkg to local nm when using args', (t) => {
process.chdir(_cwd)
const links = await printLinks({
- path: npm.prefix
+ path: npm.prefix,
})
t.matchSnapshot(links, 'should create a local symlink to global pkg')
@@ -188,7 +189,7 @@ t.test('link global linked pkg to local nm when using args', (t) => {
'@myscope/linked',
'@myscope/bar',
'a',
- 'file:../link-me-too'
+ 'file:../link-me-too',
], (err) => {
t.ifError(err, 'should not error out')
})
@@ -202,23 +203,23 @@ t.test('link pkg already in global space', (t) => {
lib: {
node_modules: {
'@myscope': {
- linked: t.fixture('symlink', '../../../../scoped-linked')
- }
- }
- }
+ linked: t.fixture('symlink', '../../../../scoped-linked'),
+ },
+ },
+ },
},
'scoped-linked': {
'package.json': JSON.stringify({
name: '@myscope/linked',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
'my-project': {
'package.json': JSON.stringify({
name: 'my-project',
- version: '1.0.0'
- })
- }
+ version: '1.0.0',
+ }),
+ },
})
npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules')
npm.prefix = resolve(testdir, 'my-project')
@@ -231,7 +232,7 @@ t.test('link pkg already in global space', (t) => {
process.chdir(_cwd)
const links = await printLinks({
- path: npm.prefix
+ path: npm.prefix,
})
t.matchSnapshot(links, 'should create a local symlink to global pkg')
@@ -256,10 +257,10 @@ t.test('completion', (t) => {
foo: {},
bar: {},
lorem: {},
- ipsum: {}
- }
- }
- }
+ ipsum: {},
+ },
+ },
+ },
})
npm.globalDir = resolve(testdir, 'global-prefix', 'lib', 'node_modules')
@@ -276,7 +277,9 @@ t.test('completion', (t) => {
t.test('--global option', (t) => {
const _config = npm.config
- npm.config = { get () { return true } }
+ npm.config = { get () {
+ return true
+ } }
link([], (err) => {
npm.config = _config
diff --git a/deps/npm/test/lib/ll.js b/deps/npm/test/lib/ll.js
index 989800944f..7d4e2b94f2 100644
--- a/deps/npm/test/lib/ll.js
+++ b/deps/npm/test/lib/ll.js
@@ -7,15 +7,15 @@ const ll = requireInject('../../lib/ll.js', {
config: {
set: (k, v) => {
configs[k] = v
- }
+ },
},
commands: {
ls: (args, cb) => {
lsCalled = true
cb()
- }
- }
- }
+ },
+ },
+ },
})
const ls = require('../../lib/ls.js')
diff --git a/deps/npm/test/lib/load-all-commands.js b/deps/npm/test/lib/load-all-commands.js
index 1669f435e9..fa73b8a78d 100644
--- a/deps/npm/test/lib/load-all-commands.js
+++ b/deps/npm/test/lib/load-all-commands.js
@@ -4,9 +4,8 @@ const t = require('tap')
const { cmdList } = require('../../lib/utils/cmd-list.js')
t.test('load npm', t => npm.load(er => {
- if (er) {
+ if (er)
throw er
- }
}))
t.test('load each command', t => {
diff --git a/deps/npm/test/lib/load-all.js b/deps/npm/test/lib/load-all.js
index 72879c2c44..02736c18cc 100644
--- a/deps/npm/test/lib/load-all.js
+++ b/deps/npm/test/lib/load-all.js
@@ -4,9 +4,9 @@ const { resolve } = require('path')
const full = process.env.npm_lifecycle_event === 'check-coverage'
-if (!full) {
+if (!full)
t.pass('nothing to do here, not checking for full coverage')
-} else {
+else {
// some files do config.get() on load, so have to load npm first
const npm = require('../../lib/npm.js')
t.test('load npm first', t => npm.load(t.end))
diff --git a/deps/npm/test/lib/logout.js b/deps/npm/test/lib/logout.js
index 0d00422dc8..96b1bcc7fe 100644
--- a/deps/npm/test/lib/logout.js
+++ b/deps/npm/test/lib/logout.js
@@ -3,7 +3,7 @@ const { test } = require('tap')
const _flatOptions = {
registry: 'https://registry.npmjs.org/',
- scope: ''
+ scope: '',
}
const config = {}
@@ -19,8 +19,8 @@ const mocks = {
'npm-registry-fetch': npmFetch,
'../../lib/npm.js': {
flatOptions: _flatOptions,
- config
- }
+ config,
+ },
}
const logout = requireInject('../../lib/logout.js', mocks)
@@ -64,8 +64,8 @@ test('token logout', async (t) => {
scope: '',
token: '@foo/',
method: 'DELETE',
- ignoreBody: true
- }
+ ignoreBody: true,
+ },
},
'should call npm-registry-fetch with expected values'
)
@@ -134,8 +134,8 @@ test('token scoped logout', async (t) => {
scope: '@myscope',
token: '@foo/',
method: 'DELETE',
- ignoreBody: true
- }
+ ignoreBody: true,
+ },
},
'should call npm-registry-fetch with expected values'
)
@@ -241,8 +241,8 @@ test('ignore invalid scoped registry config', async (t) => {
'@myscope:registry': '',
token: '@foo/',
method: 'DELETE',
- ignoreBody: true
- }
+ ignoreBody: true,
+ },
},
'should call npm-registry-fetch with expected values'
)
diff --git a/deps/npm/test/lib/ls.js b/deps/npm/test/lib/ls.js
index 6a91e8c352..256ebf3534 100644
--- a/deps/npm/test/lib/ls.js
+++ b/deps/npm/test/lib/ls.js
@@ -7,28 +7,28 @@ t.cleanSnapshot = str => str.split(/\r\n/).join('\n')
const simpleNmFixture = {
node_modules: {
- 'foo': {
+ foo: {
'package.json': JSON.stringify({
name: 'foo',
version: '1.0.0',
dependencies: {
- 'bar': '^1.0.0'
- }
- })
+ bar: '^1.0.0',
+ },
+ }),
},
- 'bar': {
+ bar: {
'package.json': JSON.stringify({
name: 'bar',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
- 'lorem': {
+ lorem: {
'package.json': JSON.stringify({
name: 'lorem',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
}
const diffDepTypesNmFixture = {
@@ -39,9 +39,9 @@ const diffDepTypesNmFixture = {
description: 'A DEV dep kind of dep',
version: '1.0.0',
dependencies: {
- 'foo': '^1.0.0'
- }
- })
+ foo: '^1.0.0',
+ },
+ }),
},
'prod-dep': {
'package.json': JSON.stringify({
@@ -49,35 +49,35 @@ const diffDepTypesNmFixture = {
description: 'A PROD dep kind of dep',
version: '1.0.0',
dependencies: {
- 'bar': '^2.0.0'
- }
+ bar: '^2.0.0',
+ },
}),
node_modules: {
bar: {
'package.json': JSON.stringify({
name: 'bar',
description: 'A dep that bars',
- version: '2.0.0'
- })
- }
- }
+ version: '2.0.0',
+ }),
+ },
+ },
},
'optional-dep': {
'package.json': JSON.stringify({
name: 'optional-dep',
description: 'Maybe a dep?',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
'peer-dep': {
'package.json': JSON.stringify({
name: 'peer-dep',
description: 'Peer-dep description here',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
- ...simpleNmFixture.node_modules
- }
+ ...simpleNmFixture.node_modules,
+ },
}
let prefix
@@ -95,24 +95,32 @@ const _flatOptions = {
link: false,
only: null,
parseable: false,
- get prefix () { return prefix },
- production: false
+ get prefix () {
+ return prefix
+ },
+ production: false,
}
const ls = requireInject('../../lib/ls.js', {
'../../lib/npm.js': {
flatOptions: _flatOptions,
limit: {
- fetch: 3
+ fetch: 3,
+ },
+ get prefix () {
+ return _flatOptions.prefix
+ },
+ get globalDir () {
+ return globalDir
},
- get prefix () { return _flatOptions.prefix },
- get globalDir () { return globalDir },
config: {
get (key) {
return _flatOptions[key]
- }
- }
+ },
+ },
+ },
+ '../../lib/utils/output.js': msg => {
+ result = msg
},
- '../../lib/utils/output.js': msg => { result = msg }
})
const redactCwd = res =>
@@ -136,10 +144,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -150,7 +158,7 @@ t.test('ls', (t) => {
t.test('missing package.json', (t) => {
prefix = t.testdir({
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -169,10 +177,10 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- foo: '^1.0.0'
- }
+ foo: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(err.code, 'ELSPROBLEMS', 'should have error code')
@@ -194,10 +202,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['lorem'], (err) => {
t.ifError(err, 'npm ls')
@@ -216,10 +224,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['.'], (err) => {
t.ifError(err, 'should not throw on missing dep above current level')
@@ -237,10 +245,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['bar'], (err) => {
t.ifError(err, 'npm ls')
@@ -257,18 +265,18 @@ t.test('ls', (t) => {
dependencies: {
foo: '^1.0.0',
lorem: '^1.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
node_modules: {
...simpleNmFixture.node_modules,
ipsum: {
'package.json': JSON.stringify({
name: 'ipsum',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
})
ls(['bar@*', 'lorem@1.0.0'], (err) => {
t.ifError(err, 'npm ls')
@@ -284,10 +292,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['notadep'], (err) => {
t.ifError(err, 'npm ls')
@@ -311,10 +319,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -334,10 +342,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -357,8 +365,8 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
a: '^1.0.0',
- e: '^1.0.0'
- }
+ e: '^1.0.0',
+ },
}),
node_modules: {
a: {
@@ -366,9 +374,9 @@ t.test('ls', (t) => {
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
b: {
'package.json': JSON.stringify({
@@ -376,29 +384,29 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
c: '^1.0.0',
- d: '*'
- }
- })
+ d: '*',
+ },
+ }),
},
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
d: {
'package.json': JSON.stringify({
name: 'd',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
e: {
'package.json': JSON.stringify({
name: 'e',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -416,10 +424,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^2.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(err.code, 'ELSPROBLEMS', 'should have error code')
@@ -443,10 +451,10 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
foo: '^2.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(err.code, 'ELSPROBLEMS', 'should have error code')
@@ -464,19 +472,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps')
@@ -493,19 +501,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps')
@@ -522,29 +530,29 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0',
- 'linked-dep': '^1.0.0'
+ lorem: '^1.0.0',
+ 'linked-dep': '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
'linked-dep': {
'package.json': JSON.stringify({
name: 'linked-dep',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
node_modules: {
'linked-dep': t.fixture('symlink', '../linked-dep'),
- ...diffDepTypesNmFixture.node_modules
- }
+ ...diffDepTypesNmFixture.node_modules,
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps')
@@ -559,15 +567,15 @@ t.test('ls', (t) => {
name: 'print-deduped-symlinks',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0',
- 'b': '^1.0.0'
- }
+ a: '^1.0.0',
+ b: '^1.0.0',
+ },
}),
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
node_modules: {
a: {
@@ -575,12 +583,12 @@ t.test('ls', (t) => {
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
- 'b': t.fixture('symlink', '../b')
- }
+ b: t.fixture('symlink', '../b'),
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps')
@@ -597,19 +605,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing production deps')
@@ -626,19 +634,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps')
@@ -655,19 +663,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions')
@@ -686,19 +694,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions')
@@ -711,7 +719,7 @@ t.test('ls', (t) => {
t.test('json read problems', (t) => {
prefix = t.testdir({
- 'package.json': '{broken json'
+ 'package.json': '{broken json',
})
ls([], (err) => {
t.match(err, { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error')
@@ -736,19 +744,19 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^2.0.0' // mismatching version #
- }
+ 'peer-dep': '^2.0.0', // mismatching version #
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems')
@@ -763,9 +771,9 @@ t.test('ls', (t) => {
name: 'invalid-deduped-dep',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0',
- 'b': '^2.0.0'
- }
+ a: '^1.0.0',
+ b: '^2.0.0',
+ },
}),
node_modules: {
a: {
@@ -773,17 +781,17 @@ t.test('ls', (t) => {
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^2.0.0'
- }
- })
+ b: '^2.0.0',
+ },
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree signaling mismatching peer dep in problems')
@@ -798,9 +806,9 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0',
- 'b': '^1.0.0'
- }
+ a: '^1.0.0',
+ b: '^1.0.0',
+ },
}),
node_modules: {
a: {
@@ -808,11 +816,11 @@ t.test('ls', (t) => {
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
- }
- }
+ b: '^1.0.0',
+ },
+ }),
+ },
+ },
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -828,9 +836,9 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
peerDependencies: {
- 'peer-dep': '*'
- }
- })
+ 'peer-dep': '*',
+ },
+ }),
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -848,20 +856,20 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
'missing-optional-dep': '^1.0.0',
- 'optional-dep': '^2.0.0' // mismatching version #
+ 'optional-dep': '^2.0.0', // mismatching version #
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -878,29 +886,29 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0'
- }
+ a: '^1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
dependencies: {
- a: '^1.0.0'
- }
- })
- }
- }
+ a: '^1.0.0',
+ },
+ }),
+ },
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -916,29 +924,29 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0'
- }
+ a: '^1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
dependencies: {
- a: '^1.0.0'
- }
- })
- }
- }
+ a: '^1.0.0',
+ },
+ }),
+ },
+ },
})
ls(['a'], (err) => {
t.ifError(err, 'npm ls')
@@ -956,37 +964,37 @@ t.test('ls', (t) => {
dependencies: {
'@npmcli/a': '^1.0.0',
'@npmcli/b': '^1.0.0',
- '@npmcli/c': '^1.0.0'
- }
+ '@npmcli/c': '^1.0.0',
+ },
}),
node_modules: {
'@npmcli': {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: '@npmcli/a',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: '@npmcli/b',
- version: '1.1.2'
- })
+ version: '1.1.2',
+ }),
},
- 'c': {
+ c: {
'package.json': JSON.stringify({
name: '@npmcli/c',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
- }
- }
- }
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
+ },
+ },
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1005,37 +1013,37 @@ t.test('ls', (t) => {
dependencies: {
'@npmcli/a': '^1.0.0',
'@npmcli/b': '^1.0.0',
- '@npmcli/c': '^1.0.0'
- }
+ '@npmcli/c': '^1.0.0',
+ },
}),
node_modules: {
'@npmcli': {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: '@npmcli/a',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: '@npmcli/b',
- version: '1.1.2'
- })
+ version: '1.1.2',
+ }),
},
- 'c': {
+ c: {
'package.json': JSON.stringify({
name: '@npmcli/c',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
- }
- }
- }
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
+ },
+ },
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1055,37 +1063,37 @@ t.test('ls', (t) => {
dependencies: {
'@npmcli/a': '^1.0.0',
'@npmcli/b': '^1.0.0',
- '@npmcli/c': '^1.0.0'
- }
+ '@npmcli/c': '^1.0.0',
+ },
}),
node_modules: {
'@npmcli': {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: '@npmcli/a',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: '@npmcli/b',
- version: '1.1.2'
- })
+ version: '1.1.2',
+ }),
},
- 'c': {
+ c: {
'package.json': JSON.stringify({
name: '@npmcli/c',
version: '1.0.0',
dependencies: {
- '@npmcli/b': '^1.0.0'
- }
- })
- }
- }
- }
+ '@npmcli/b': '^1.0.0',
+ },
+ }),
+ },
+ },
+ },
})
ls(['@npmcli/b'], (err) => {
t.ifError(err, 'npm ls')
@@ -1103,37 +1111,37 @@ t.test('ls', (t) => {
dependencies: {
'@npmcli/a': '^1.0.0',
'@npmcli/b': '^1.0.0',
- '@npmcli/c': '^1.0.0'
- }
+ '@npmcli/c': '^1.0.0',
+ },
}),
node_modules: {
'@npmcli': {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: '@npmcli/a',
version: '1.0.0',
dependencies: {
- '@npmcli/c': '^1.0.0'
- }
- })
+ '@npmcli/c': '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: '@npmcli/b',
version: '1.1.2',
dependencies: {
- '@npmcli/c': '^1.0.0'
- }
- })
+ '@npmcli/c': '^1.0.0',
+ },
+ }),
},
- 'c': {
+ c: {
'package.json': JSON.stringify({
name: '@npmcli/c',
- version: '1.0.0'
- })
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
})
ls(['@npmcli/c'], (err) => {
t.ifError(err, 'npm ls')
@@ -1148,22 +1156,22 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- a: 'npm:b@1.0.0'
- }
+ a: 'npm:b@1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
_from: 'a@npm:b',
_resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz',
_requested: {
- type: 'alias'
- }
- })
- }
- }
+ type: 'alias',
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing aliases')
@@ -1177,11 +1185,11 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git'
- }
+ abbrev: 'git+https://github.com/isaacs/abbrev-js.git',
+ },
}),
node_modules: {
- 'abbrev': {
+ abbrev: {
'package.json': JSON.stringify({
name: 'abbrev',
version: '1.1.1',
@@ -1194,11 +1202,11 @@ t.test('ls', (t) => {
rawSpec: 'git+https:github.com/isaacs/abbrev-js.git',
saveSpec: 'git+https://github.com/isaacs/abbrev-js.git',
fetchSpec: 'https://github.com/isaacs/abbrev-js.git',
- gitCommittish: null
- }
- })
- }
- }
+ gitCommittish: null,
+ },
+ }),
+ },
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1213,36 +1221,36 @@ t.test('ls', (t) => {
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.1'
- })
- }
+ version: '1.0.1',
+ }),
+ },
},
'package-lock.json': JSON.stringify({
- 'name': 'npm-broken-resolved-field-test',
- 'version': '1.0.0',
- 'lockfileVersion': 2,
- 'requires': true,
- 'packages': {
+ name: 'npm-broken-resolved-field-test',
+ version: '1.0.0',
+ lockfileVersion: 2,
+ requires: true,
+ packages: {
'': {
- 'name': 'a',
- 'version': '1.0.1'
- }
+ name: 'a',
+ version: '1.0.1',
+ },
},
- 'dependencies': {
+ dependencies: {
a: {
- 'version': '1.0.1',
- 'resolved': 'foo@bar://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c',
- 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ=='
- }
- }
- }),
- 'package.json': JSON.stringify({
- 'name': 'npm-broken-resolved-field-test',
- 'version': '1.0.0',
- 'dependencies': {
- 'a': '^1.0.1'
- }
- })
+ version: '1.0.1',
+ resolved: 'foo@bar://b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c',
+ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==',
+ },
+ },
+ }),
+ 'package.json': JSON.stringify({
+ name: 'npm-broken-resolved-field-test',
+ version: '1.0.0',
+ dependencies: {
+ a: '^1.0.1',
+ },
+ }),
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1257,8 +1265,8 @@ t.test('ls', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'simple-output': '^2.0.0'
- }
+ 'simple-output': '^2.0.0',
+ },
}),
node_modules: {
'simple-output': {
@@ -1276,17 +1284,17 @@ t.test('ls', (t) => {
escapedName: 'simple-output',
rawSpec: '',
saveSpec: null,
- fetchSpec: 'latest'
+ fetchSpec: 'latest',
},
_requiredBy: [
'#USER',
- '/'
+ '/',
],
_shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc',
- _spec: 'simple-output'
- })
- }
- }
+ _spec: 'simple-output',
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should not be printed in tree output')
@@ -1301,24 +1309,24 @@ t.test('ls', (t) => {
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
+ version: '1.0.0',
}),
node_modules: {
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
- }
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
+ },
})
// mimics lib/npm.js globalDir getter but pointing to fixtures
@@ -1338,8 +1346,8 @@ t.test('ls', (t) => {
name: 'filter-by-child-of-missing-dep',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0'
- }
+ a: '^1.0.0',
+ },
}),
node_modules: {
b: {
@@ -1347,34 +1355,34 @@ t.test('ls', (t) => {
name: 'b',
version: '1.0.0',
dependencies: {
- c: '^1.0.0'
- }
- })
+ c: '^1.0.0',
+ },
+ }),
},
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
d: {
'package.json': JSON.stringify({
name: 'd',
version: '1.0.0',
dependencies: {
- c: '^2.0.0'
- }
+ c: '^2.0.0',
+ },
}),
node_modules: {
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '2.0.0'
- })
- }
- }
- }
- }
+ version: '2.0.0',
+ }),
+ },
+ },
+ },
+ },
})
ls(['c'], (err) => {
@@ -1391,8 +1399,8 @@ t.test('ls', (t) => {
version: '1.0.0',
workspaces: [
'./a',
- './b'
- ]
+ './b',
+ ],
}),
node_modules: {
a: t.fixture('symlink', '../a'),
@@ -1400,25 +1408,25 @@ t.test('ls', (t) => {
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
- }
+ version: '1.0.0',
+ }),
+ },
},
a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
dependencies: {
- c: '^1.0.0'
- }
- })
+ c: '^1.0.0',
+ },
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
- })
- }
+ version: '1.0.0',
+ }),
+ },
})
ls([], (err) => {
@@ -1443,44 +1451,44 @@ t.test('ls', (t) => {
version: '1.0.0',
dependencies: {
a: '^1.0.0',
- b: '^1.0.0'
- }
+ b: '^1.0.0',
+ },
}),
node_modules: {
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
dependencies: {
- c: '^1.0.0'
- }
- })
+ c: '^1.0.0',
+ },
+ }),
},
c: {
'package.json': JSON.stringify({
name: 'c',
version: '1.0.0',
dependencies: {
- d: '^1.0.0'
- }
- })
+ d: '^1.0.0',
+ },
+ }),
},
d: {
'package.json': JSON.stringify({
name: 'd',
version: '1.0.0',
dependencies: {
- a: '^1.0.0'
- }
- })
- }
- }
+ a: '^1.0.0',
+ },
+ }),
+ },
+ },
})
t.plan(6)
@@ -1521,10 +1529,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1535,7 +1543,7 @@ t.test('ls --parseable', (t) => {
t.test('missing package.json', (t) => {
prefix = t.testdir({
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -1554,10 +1562,10 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- foo: '^1.0.0'
- }
+ foo: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(err.code, 'ELSPROBLEMS', 'should have error code')
@@ -1573,10 +1581,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['lorem'], (err) => {
t.ifError(err, 'npm ls')
@@ -1592,10 +1600,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['bar'], (err) => {
t.ifError(err, 'npm ls')
@@ -1612,18 +1620,18 @@ t.test('ls --parseable', (t) => {
dependencies: {
foo: '^1.0.0',
lorem: '^1.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
node_modules: {
...simpleNmFixture.node_modules,
ipsum: {
'package.json': JSON.stringify({
name: 'ipsum',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
})
ls(['bar@*', 'lorem@1.0.0'], (err) => {
t.ifError(err, 'npm ls')
@@ -1639,10 +1647,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['notadep'], (err) => {
t.ifError(err, 'npm ls')
@@ -1666,10 +1674,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1689,10 +1697,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1712,10 +1720,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -1733,10 +1741,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^2.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems')
@@ -1753,19 +1761,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing dev deps')
@@ -1782,19 +1790,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing only development deps')
@@ -1811,29 +1819,29 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0',
- 'linked-dep': '^1.0.0'
+ lorem: '^1.0.0',
+ 'linked-dep': '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
'linked-dep': {
'package.json': JSON.stringify({
name: 'linked-dep',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
node_modules: {
'linked-dep': t.fixture('symlink', '../linked-dep'),
- ...diffDepTypesNmFixture.node_modules
- }
+ ...diffDepTypesNmFixture.node_modules,
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing linked deps')
@@ -1850,19 +1858,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing production deps')
@@ -1879,19 +1887,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing only prod deps')
@@ -1908,19 +1916,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree info with descriptions')
@@ -1935,10 +1943,10 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- foo: '^1.0.0'
- }
+ foo: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(err.code, 'ELSPROBLEMS', 'should have error code')
@@ -1956,10 +1964,10 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
foo: '^2.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems')
@@ -1977,29 +1985,29 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0',
- 'linked-dep': '^1.0.0'
+ lorem: '^1.0.0',
+ 'linked-dep': '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
'linked-dep': {
'package.json': JSON.stringify({
name: 'linked-dep',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
node_modules: {
'linked-dep': t.fixture('symlink', '../linked-dep'),
- ...diffDepTypesNmFixture.node_modules
- }
+ ...diffDepTypesNmFixture.node_modules,
+ },
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -2019,19 +2027,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing top-level deps with descriptions')
@@ -2044,7 +2052,7 @@ t.test('ls --parseable', (t) => {
t.test('json read problems', (t) => {
prefix = t.testdir({
- 'package.json': '{broken json'
+ 'package.json': '{broken json',
})
ls([], (err) => {
t.match(err, { code: 'EJSONPARSE' }, 'should throw EJSONPARSE error')
@@ -2069,19 +2077,19 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^2.0.0' // mismatching version #
- }
+ 'peer-dep': '^2.0.0', // mismatching version #
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output parseable signaling missing peer dep in problems')
@@ -2096,20 +2104,20 @@ t.test('ls --parseable', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
'missing-optional-dep': '^1.0.0',
- 'optional-dep': '^2.0.0' // mismatching version #
+ 'optional-dep': '^2.0.0', // mismatching version #
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -2125,29 +2133,29 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0'
- }
+ a: '^1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
dependencies: {
- a: '^1.0.0'
- }
- })
- }
- }
+ a: '^1.0.0',
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should print tree output ommiting deduped ref')
@@ -2161,22 +2169,22 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- a: 'npm:b@1.0.0'
- }
+ a: 'npm:b@1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
_from: 'a@npm:b',
_resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz',
_requested: {
- type: 'alias'
- }
- })
- }
- }
+ type: 'alias',
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing aliases')
@@ -2190,11 +2198,11 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git'
- }
+ abbrev: 'git+https://github.com/isaacs/abbrev-js.git',
+ },
}),
node_modules: {
- 'abbrev': {
+ abbrev: {
'package.json': JSON.stringify({
name: 'abbrev',
version: '1.1.1',
@@ -2207,11 +2215,11 @@ t.test('ls --parseable', (t) => {
rawSpec: 'git+https:github.com/isaacs/abbrev-js.git',
saveSpec: 'git+https://github.com/isaacs/abbrev-js.git',
fetchSpec: 'https://github.com/isaacs/abbrev-js.git',
- gitCommittish: null
- }
- })
- }
- }
+ gitCommittish: null,
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should output tree containing git refs')
@@ -2225,8 +2233,8 @@ t.test('ls --parseable', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'simple-output': '^2.0.0'
- }
+ 'simple-output': '^2.0.0',
+ },
}),
node_modules: {
'simple-output': {
@@ -2244,17 +2252,17 @@ t.test('ls --parseable', (t) => {
escapedName: 'simple-output',
rawSpec: '',
saveSpec: null,
- fetchSpec: 'latest'
+ fetchSpec: 'latest',
},
_requiredBy: [
'#USER',
- '/'
+ '/',
],
_shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc',
- _spec: 'simple-output'
- })
- }
- }
+ _spec: 'simple-output',
+ }),
+ },
+ },
})
ls([], () => {
t.matchSnapshot(redactCwd(result), 'should not be printed in tree output')
@@ -2269,24 +2277,24 @@ t.test('ls --parseable', (t) => {
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
+ version: '1.0.0',
}),
node_modules: {
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
- }
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
+ },
})
// mimics lib/npm.js globalDir getter but pointing to fixtures
@@ -2314,10 +2322,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -2326,19 +2334,19 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0',
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ dependencies: {
+ foo: {
+ version: '1.0.0',
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json representation of dependencies structure'
)
@@ -2348,46 +2356,46 @@ t.test('ls --json', (t) => {
t.test('missing package.json', (t) => {
prefix = t.testdir({
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems')
t.deepEqual(
jsonParse(result),
{
- 'problems': [
+ problems: [
'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar',
'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo',
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem'
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem',
],
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0',
- 'extraneous': true,
- 'problems': [
- 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar'
- ]
- },
- 'foo': {
- 'version': '1.0.0',
- 'extraneous': true,
- 'problems': [
- 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo'
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ extraneous: true,
+ problems: [
+ 'extraneous: bar@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/bar',
+ ],
+ },
+ foo: {
+ version: '1.0.0',
+ extraneous: true,
+ problems: [
+ 'extraneous: foo@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/foo',
+ ],
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ extraneous: true,
+ problems: [
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem',
],
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0',
- 'extraneous': true,
- 'problems': [
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-package-json/node_modules/lorem'
- ]
- }
- }
+ },
+ },
},
'should output json missing name/version of top-level package'
)
@@ -2401,10 +2409,10 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- foo: '^1.0.0'
- }
+ foo: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.equal(
@@ -2422,26 +2430,26 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'problems': [
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem'
+ problems: [
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem',
],
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0',
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0',
- 'extraneous': true,
- 'problems': [
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem'
- ]
- }
- }
+ dependencies: {
+ foo: {
+ version: '1.0.0',
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ extraneous: true,
+ problems: [
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-extraneous-deps/node_modules/lorem',
+ ],
+ },
+ },
},
'should output json containing problems info'
)
@@ -2456,10 +2464,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['lorem'], (err) => {
t.ifError(err, 'npm ls')
@@ -2468,11 +2476,11 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'dependencies': {
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ dependencies: {
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json contaning only occurences of filtered by package'
)
@@ -2492,10 +2500,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['bar'], (err) => {
t.ifError(err, 'npm ls')
@@ -2509,11 +2517,11 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
bar: {
- version: '1.0.0'
- }
- }
- }
- }
+ version: '1.0.0',
+ },
+ },
+ },
+ },
},
'should output json contaning only occurences of filtered by package'
)
@@ -2529,39 +2537,39 @@ t.test('ls --json', (t) => {
dependencies: {
foo: '^1.0.0',
lorem: '^1.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
node_modules: {
...simpleNmFixture.node_modules,
ipsum: {
'package.json': JSON.stringify({
name: 'ipsum',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
})
ls(['bar@*', 'lorem@1.0.0'], (err) => {
t.ifError(err, 'npm ls')
t.deepEqual(
jsonParse(result),
{
- 'version': '1.0.0',
- 'name': 'test-npm-ls',
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0',
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ version: '1.0.0',
+ name: 'test-npm-ls',
+ dependencies: {
+ foo: {
+ version: '1.0.0',
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json contaning only occurences of multiple filtered packages and their ancestors'
)
@@ -2576,10 +2584,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls(['notadep'], (err) => {
t.ifError(err, 'npm ls')
@@ -2587,7 +2595,7 @@ t.test('ls --json', (t) => {
jsonParse(result),
{
name: 'test-npm-ls',
- version: '1.0.0'
+ version: '1.0.0',
},
'should output json containing no dependencies info'
)
@@ -2610,10 +2618,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -2622,14 +2630,14 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0'
+ dependencies: {
+ foo: {
+ version: '1.0.0',
},
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json containing only top-level dependencies'
)
@@ -2648,10 +2656,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -2660,14 +2668,14 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0'
+ dependencies: {
+ foo: {
+ version: '1.0.0',
},
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json containing only top-level dependencies'
)
@@ -2686,10 +2694,10 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^1.0.0',
- lorem: '^1.0.0'
- }
+ lorem: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.ifError(err, 'npm ls')
@@ -2698,19 +2706,19 @@ t.test('ls --json', (t) => {
{
name: 'test-npm-ls',
version: '1.0.0',
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0',
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0'
- }
- }
+ dependencies: {
+ foo: {
+ version: '1.0.0',
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ },
+ },
},
'should output json containing top-level deps and their deps only'
)
@@ -2727,51 +2735,51 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
foo: '^2.0.0',
- ipsum: '^1.0.0'
- }
+ ipsum: '^1.0.0',
+ },
}),
- ...simpleNmFixture
+ ...simpleNmFixture,
})
ls([], (err) => {
t.match(err, { code: 'ELSPROBLEMS' }, 'should list dep problems')
t.deepEqual(
jsonParse(result),
{
- 'name': 'test-npm-ls',
- 'version': '1.0.0',
- 'problems': [
+ name: 'test-npm-ls',
+ version: '1.0.0',
+ problems: [
'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo',
'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0',
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem'
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem',
],
- 'dependencies': {
- 'foo': {
- 'version': '1.0.0',
- 'invalid': true,
- 'problems': [
- 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo'
+ dependencies: {
+ foo: {
+ version: '1.0.0',
+ invalid: true,
+ problems: [
+ 'invalid: foo@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/foo',
],
- 'dependencies': {
- 'bar': {
- 'version': '1.0.0'
- }
- }
- },
- 'lorem': {
- 'version': '1.0.0',
- 'extraneous': true,
- 'problems': [
- 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem'
- ]
- },
- 'ipsum': {
- 'required': '^1.0.0',
- 'missing': true,
- 'problems': [
- 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0'
- ]
- }
- }
+ dependencies: {
+ bar: {
+ version: '1.0.0',
+ },
+ },
+ },
+ lorem: {
+ version: '1.0.0',
+ extraneous: true,
+ problems: [
+ 'extraneous: lorem@1.0.0 {CWD}/ls-ls-json-missing-invalid-extraneous/node_modules/lorem',
+ ],
+ },
+ ipsum: {
+ required: '^1.0.0',
+ missing: true,
+ problems: [
+ 'missing: ipsum@^1.0.0, required by test-npm-ls@1.0.0',
+ ],
+ },
+ },
},
'should output json containing top-level deps and their deps only'
)
@@ -2787,19 +2795,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -2813,11 +2821,11 @@ t.test('ls --json', (t) => {
dependencies: {
foo: {
version: '1.0.0',
- dependencies: { bar: { version: '1.0.0' } }
- }
- }
- }
- }
+ dependencies: { bar: { version: '1.0.0' } },
+ },
+ },
+ },
+ },
},
'should output json containing dev deps'
)
@@ -2834,19 +2842,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -2860,11 +2868,11 @@ t.test('ls --json', (t) => {
dependencies: {
foo: {
version: '1.0.0',
- dependencies: { bar: { version: '1.0.0' } }
- }
- }
- }
- }
+ dependencies: { bar: { version: '1.0.0' } },
+ },
+ },
+ },
+ },
},
'should output json containing only development deps'
)
@@ -2881,29 +2889,29 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0',
- 'linked-dep': '^1.0.0'
+ lorem: '^1.0.0',
+ 'linked-dep': '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
'linked-dep': {
'package.json': JSON.stringify({
name: 'linked-dep',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
node_modules: {
'linked-dep': t.fixture('symlink', '../linked-dep'),
- ...diffDepTypesNmFixture.node_modules
- }
+ ...diffDepTypesNmFixture.node_modules,
+ },
})
ls([], () => {
t.deepEqual(
@@ -2914,9 +2922,9 @@ t.test('ls --json', (t) => {
dependencies: {
'linked-dep': {
version: '1.0.0',
- resolved: 'file:../linked-dep'
- }
- }
+ resolved: 'file:../linked-dep',
+ },
+ },
},
'should output json containing linked deps'
)
@@ -2933,19 +2941,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -2956,8 +2964,8 @@ t.test('ls --json', (t) => {
dependencies: {
lorem: { version: '1.0.0' },
'optional-dep': { version: '1.0.0' },
- 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }
- }
+ 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } },
+ },
},
'should output json containing production deps'
)
@@ -2974,19 +2982,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -2997,8 +3005,8 @@ t.test('ls --json', (t) => {
dependencies: {
lorem: { version: '1.0.0' },
'optional-dep': { version: '1.0.0' },
- 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }
- }
+ 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } },
+ },
},
'should output json containing only prod deps'
)
@@ -3014,121 +3022,121 @@ t.test('ls --json', (t) => {
'dedupe-tests-a': {
'package.json': JSON.stringify({
name: '@isaacs/dedupe-tests-a',
- version: '1.0.1'
+ version: '1.0.1',
}),
node_modules: {
'@isaacs': {
'dedupe-tests-b': {
name: '@isaacs/dedupe-tests-b',
- version: '1.0.0'
- }
- }
- }
+ version: '1.0.0',
+ },
+ },
+ },
},
'dedupe-tests-b': {
'package.json': JSON.stringify({
name: '@isaacs/dedupe-tests-b',
- version: '2.0.0'
- })
- }
- }
+ version: '2.0.0',
+ }),
+ },
+ },
},
'package-lock.json': JSON.stringify({
- 'name': 'dedupe-lockfile',
- 'version': '1.0.0',
- 'lockfileVersion': 2,
- 'requires': true,
- 'packages': {
+ name: 'dedupe-lockfile',
+ version: '1.0.0',
+ lockfileVersion: 2,
+ requires: true,
+ packages: {
'': {
- 'name': 'dedupe-lockfile',
- 'version': '1.0.0',
- 'dependencies': {
+ name: 'dedupe-lockfile',
+ version: '1.0.0',
+ dependencies: {
'@isaacs/dedupe-tests-a': '1.0.1',
- '@isaacs/dedupe-tests-b': '1||2'
- }
+ '@isaacs/dedupe-tests-b': '1||2',
+ },
},
'node_modules/@isaacs/dedupe-tests-a': {
- 'name': '@isaacs/dedupe-tests-a',
- 'version': '1.0.1',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
- 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==',
- 'dependencies': {
- '@isaacs/dedupe-tests-b': '1'
- }
+ name: '@isaacs/dedupe-tests-a',
+ version: '1.0.1',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
+ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==',
+ dependencies: {
+ '@isaacs/dedupe-tests-b': '1',
+ },
},
'node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b': {
- 'name': '@isaacs/dedupe-tests-b',
- 'version': '1.0.0',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
- 'integrity': 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w=='
+ name: '@isaacs/dedupe-tests-b',
+ version: '1.0.0',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
+ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==',
},
'node_modules/@isaacs/dedupe-tests-b': {
- 'name': '@isaacs/dedupe-tests-b',
- 'version': '2.0.0',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz',
- 'integrity': 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA=='
- }
+ name: '@isaacs/dedupe-tests-b',
+ version: '2.0.0',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz',
+ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==',
+ },
},
- 'dependencies': {
+ dependencies: {
'@isaacs/dedupe-tests-a': {
- 'version': '1.0.1',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
- 'integrity': 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==',
- 'requires': {
- '@isaacs/dedupe-tests-b': '1'
+ version: '1.0.1',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
+ integrity: 'sha512-8AN9lNCcBt5Xeje7fMEEpp5K3rgcAzIpTtAjYb/YMUYu8SbIVF6wz0WqACDVKvpQOUcSfNHZQNLNmue0QSwXOQ==',
+ requires: {
+ '@isaacs/dedupe-tests-b': '1',
},
- 'dependencies': {
+ dependencies: {
'@isaacs/dedupe-tests-b': {
- 'version': '1.0.0',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
- 'integrity': 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w=='
- }
- }
+ version: '1.0.0',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
+ integrity: 'sha512-3nmvzIb8QL8OXODzipwoV3U8h9OQD9g9RwOPuSBQqjqSg9JZR1CCFOWNsDUtOfmwY8HFUJV9EAZ124uhqVxq+w==',
+ },
+ },
},
'@isaacs/dedupe-tests-b': {
- 'version': '2.0.0',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz',
- 'integrity': 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA=='
- }
- }
+ version: '2.0.0',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz',
+ integrity: 'sha512-KTYkpRv9EzlmCg4Gsm/jpclWmRYFCXow8GZKJXjK08sIZBlElTZEa5Bw/UQxIvEfcKmWXczSqItD49Kr8Ax4UA==',
+ },
+ },
}),
'package.json': JSON.stringify({
- 'name': 'dedupe-lockfile',
- 'version': '1.0.0',
- 'dependencies': {
+ name: 'dedupe-lockfile',
+ version: '1.0.0',
+ dependencies: {
'@isaacs/dedupe-tests-a': '1.0.1',
- '@isaacs/dedupe-tests-b': '1||2'
- }
- })
+ '@isaacs/dedupe-tests-b': '1||2',
+ },
+ }),
})
ls([], () => {
t.deepEqual(
jsonParse(result),
{
- 'version': '1.0.0',
- 'name': 'dedupe-lockfile',
- 'dependencies': {
+ version: '1.0.0',
+ name: 'dedupe-lockfile',
+ dependencies: {
'@isaacs/dedupe-tests-a': {
- 'version': '1.0.1',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
- 'dependencies': {
+ version: '1.0.1',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-a/-/dedupe-tests-a-1.0.1.tgz',
+ dependencies: {
'@isaacs/dedupe-tests-b': {
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
- 'extraneous': true,
- 'problems': [
- 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b'
- ]
- }
- }
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-1.0.0.tgz',
+ extraneous: true,
+ problems: [
+ 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b',
+ ],
+ },
+ },
},
'@isaacs/dedupe-tests-b': {
- 'version': '2.0.0',
- 'resolved': 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz'
- }
+ version: '2.0.0',
+ resolved: 'https://registry.npmjs.org/@isaacs/dedupe-tests-b/-/dedupe-tests-b-2.0.0.tgz',
+ },
},
- 'problems': [
- 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b'
- ]
+ problems: [
+ 'extraneous: @isaacs/dedupe-tests-b@ {CWD}/ls-ls-json-from-lockfile/node_modules/@isaacs/dedupe-tests-a/node_modules/@isaacs/dedupe-tests-b',
+ ],
},
'should output json containing only prod deps'
)
@@ -3144,19 +3152,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -3174,7 +3182,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long/node_modules/peer-dep',
- extraneous: false
+ extraneous: false,
},
'dev-dep': {
name: 'dev-dep',
@@ -3193,23 +3201,23 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long/node_modules/bar',
- extraneous: false
- }
+ extraneous: false,
+ },
},
_id: 'foo@1.0.0',
devDependencies: {},
peerDependencies: {},
_dependencies: { bar: '^1.0.0' },
path: '{CWD}/ls-ls-json--long/node_modules/foo',
- extraneous: false
- }
+ extraneous: false,
+ },
},
_id: 'dev-dep@1.0.0',
devDependencies: {},
peerDependencies: {},
_dependencies: { foo: '^1.0.0' },
path: '{CWD}/ls-ls-json--long/node_modules/dev-dep',
- extraneous: false
+ extraneous: false,
},
lorem: {
name: 'lorem',
@@ -3219,7 +3227,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long/node_modules/lorem',
- extraneous: false
+ extraneous: false,
},
'optional-dep': {
name: 'optional-dep',
@@ -3230,7 +3238,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long/node_modules/optional-dep',
- extraneous: false
+ extraneous: false,
},
'prod-dep': {
name: 'prod-dep',
@@ -3246,16 +3254,16 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long/node_modules/prod-dep/node_modules/bar',
- extraneous: false
- }
+ extraneous: false,
+ },
},
_id: 'prod-dep@1.0.0',
devDependencies: {},
peerDependencies: {},
_dependencies: { bar: '^2.0.0' },
path: '{CWD}/ls-ls-json--long/node_modules/prod-dep',
- extraneous: false
- }
+ extraneous: false,
+ },
},
devDependencies: { 'dev-dep': '^1.0.0' },
optionalDependencies: { 'optional-dep': '^1.0.0' },
@@ -3263,7 +3271,7 @@ t.test('ls --json', (t) => {
_id: 'test-npm-ls@1.0.0',
_dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' },
path: '{CWD}/ls-ls-json--long',
- extraneous: false
+ extraneous: false,
},
'should output long json info'
)
@@ -3282,19 +3290,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], () => {
t.deepEqual(
@@ -3312,7 +3320,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long-depth-0/node_modules/peer-dep',
- extraneous: false
+ extraneous: false,
},
'dev-dep': {
name: 'dev-dep',
@@ -3323,7 +3331,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: { foo: '^1.0.0' },
path: '{CWD}/ls-ls-json--long-depth-0/node_modules/dev-dep',
- extraneous: false
+ extraneous: false,
},
lorem: {
name: 'lorem',
@@ -3333,7 +3341,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long-depth-0/node_modules/lorem',
- extraneous: false
+ extraneous: false,
},
'optional-dep': {
name: 'optional-dep',
@@ -3344,7 +3352,7 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: {},
path: '{CWD}/ls-ls-json--long-depth-0/node_modules/optional-dep',
- extraneous: false
+ extraneous: false,
},
'prod-dep': {
name: 'prod-dep',
@@ -3355,8 +3363,8 @@ t.test('ls --json', (t) => {
peerDependencies: {},
_dependencies: { bar: '^2.0.0' },
path: '{CWD}/ls-ls-json--long-depth-0/node_modules/prod-dep',
- extraneous: false
- }
+ extraneous: false,
+ },
},
devDependencies: { 'dev-dep': '^1.0.0' },
optionalDependencies: { 'optional-dep': '^1.0.0' },
@@ -3364,7 +3372,7 @@ t.test('ls --json', (t) => {
_id: 'test-npm-ls@1.0.0',
_dependencies: { 'prod-dep': '^1.0.0', lorem: '^1.0.0', 'optional-dep': '^1.0.0' },
path: '{CWD}/ls-ls-json--long-depth-0',
- extraneous: false
+ extraneous: false,
},
'should output json containing top-level deps in long format'
)
@@ -3377,7 +3385,7 @@ t.test('ls --json', (t) => {
t.test('json read problems', (t) => {
prefix = t.testdir({
- 'package.json': '{broken json'
+ 'package.json': '{broken json',
})
ls([], (err) => {
t.match(err.message, 'Failed to parse root package.json', 'should have missin root package.json msg')
@@ -3387,8 +3395,8 @@ t.test('ls --json', (t) => {
{
invalid: true,
problems: [
- 'error in {CWD}/ls-ls-json-json-read-problems: Failed to parse root package.json'
- ]
+ 'error in {CWD}/ls-ls-json-json-read-problems: Failed to parse root package.json',
+ ],
},
'should print empty json result'
)
@@ -3416,19 +3424,19 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
- 'optional-dep': '^1.0.0'
+ 'optional-dep': '^1.0.0',
},
peerDependencies: {
- 'peer-dep': '^2.0.0' // mismatching version #
- }
+ 'peer-dep': '^2.0.0', // mismatching version #
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'Should have ELSPROBLEMS error code')
@@ -3438,29 +3446,29 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
problems: [
- 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep'
+ 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep',
],
dependencies: {
'peer-dep': {
version: '1.0.0',
invalid: true,
problems: [
- 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep'
- ]
+ 'invalid: peer-dep@1.0.0 {CWD}/ls-ls-json-unmet-peer-dep/node_modules/peer-dep',
+ ],
},
'dev-dep': {
version: '1.0.0',
dependencies: {
foo: {
version: '1.0.0',
- dependencies: { bar: { version: '1.0.0' } }
- }
- }
+ dependencies: { bar: { version: '1.0.0' } },
+ },
+ },
},
lorem: { version: '1.0.0' },
'optional-dep': { version: '1.0.0' },
- 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } }
- }
+ 'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } },
+ },
},
'should output json signaling missing peer dep in problems'
)
@@ -3475,20 +3483,20 @@ t.test('ls --json', (t) => {
version: '1.0.0',
dependencies: {
'prod-dep': '^1.0.0',
- 'lorem': '^1.0.0'
+ lorem: '^1.0.0',
},
devDependencies: {
- 'dev-dep': '^1.0.0'
+ 'dev-dep': '^1.0.0',
},
optionalDependencies: {
'missing-optional-dep': '^1.0.0',
- 'optional-dep': '^2.0.0' // mismatching version #
+ 'optional-dep': '^2.0.0', // mismatching version #
},
peerDependencies: {
- 'peer-dep': '^1.0.0'
- }
+ 'peer-dep': '^1.0.0',
+ },
}),
- ...diffDepTypesNmFixture
+ ...diffDepTypesNmFixture,
})
ls([], (err) => {
t.match(err.code, 'ELSPROBLEMS', 'should have ELSPROBLEMS error code')
@@ -3499,32 +3507,32 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
problems: [
- 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep' // mismatching optional deps get flagged in problems
+ 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep', // mismatching optional deps get flagged in problems
],
dependencies: {
'optional-dep': {
version: '1.0.0',
invalid: true,
problems: [
- 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep'
- ]
+ 'invalid: optional-dep@1.0.0 {CWD}/ls-ls-json-unmet-optional-dep/node_modules/optional-dep',
+ ],
},
'peer-dep': {
- version: '1.0.0'
+ version: '1.0.0',
},
'dev-dep': {
version: '1.0.0',
dependencies: {
foo: {
version: '1.0.0',
- dependencies: { bar: { version: '1.0.0' } }
- }
- }
+ dependencies: { bar: { version: '1.0.0' } },
+ },
+ },
},
lorem: { version: '1.0.0' },
'prod-dep': { version: '1.0.0', dependencies: { bar: { version: '2.0.0' } } },
- 'missing-optional-dep': {} // missing optional dep has an empty entry in json output
- }
+ 'missing-optional-dep': {}, // missing optional dep has an empty entry in json output
+ },
},
'should output json with empty entry for missing optional deps'
)
@@ -3538,29 +3546,29 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'a': '^1.0.0'
- }
+ a: '^1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'a',
version: '1.0.0',
dependencies: {
- b: '^1.0.0'
- }
- })
+ b: '^1.0.0',
+ },
+ }),
},
- 'b': {
+ b: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
dependencies: {
- a: '^1.0.0'
- }
- })
- }
- }
+ a: '^1.0.0',
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.deepEqual(
@@ -3575,12 +3583,12 @@ t.test('ls --json', (t) => {
b: {
version: '1.0.0',
dependencies: {
- a: { version: '1.0.0' }
- }
- }
- }
- }
- }
+ a: { version: '1.0.0' },
+ },
+ },
+ },
+ },
+ },
},
'should print json output containing deduped ref'
)
@@ -3594,22 +3602,22 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- a: 'npm:b@1.0.0'
- }
+ a: 'npm:b@1.0.0',
+ },
}),
node_modules: {
- 'a': {
+ a: {
'package.json': JSON.stringify({
name: 'b',
version: '1.0.0',
_from: 'a@npm:b',
_resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz',
_requested: {
- type: 'alias'
- }
- })
- }
- }
+ type: 'alias',
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.deepEqual(
@@ -3620,9 +3628,9 @@ t.test('ls --json', (t) => {
dependencies: {
a: {
version: '1.0.0',
- resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz'
- }
- }
+ resolved: 'https://localhost:8080/abbrev/-/abbrev-1.1.1.tgz',
+ },
+ },
},
'should output json containing aliases'
)
@@ -3636,11 +3644,11 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'abbrev': 'git+https://github.com/isaacs/abbrev-js.git'
- }
+ abbrev: 'git+https://github.com/isaacs/abbrev-js.git',
+ },
}),
node_modules: {
- 'abbrev': {
+ abbrev: {
'package.json': JSON.stringify({
name: 'abbrev',
version: '1.1.1',
@@ -3653,11 +3661,11 @@ t.test('ls --json', (t) => {
rawSpec: 'git+https:github.com/isaacs/abbrev-js.git',
saveSpec: 'git+https://github.com/isaacs/abbrev-js.git',
fetchSpec: 'https://github.com/isaacs/abbrev-js.git',
- gitCommittish: null
- }
- })
- }
- }
+ gitCommittish: null,
+ },
+ }),
+ },
+ },
})
ls([], () => {
t.deepEqual(
@@ -3668,9 +3676,9 @@ t.test('ls --json', (t) => {
dependencies: {
abbrev: {
version: '1.1.1',
- resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c'
- }
- }
+ resolved: 'git+ssh://git@github.com/isaacs/abbrev-js.git#b8f3a2fc0c3bb8ffd8b0d0072cc6b5a3667e963c',
+ },
+ },
},
'should output json containing git refs'
)
@@ -3684,8 +3692,8 @@ t.test('ls --json', (t) => {
name: 'test-npm-ls',
version: '1.0.0',
dependencies: {
- 'simple-output': '^2.0.0'
- }
+ 'simple-output': '^2.0.0',
+ },
}),
node_modules: {
'simple-output': {
@@ -3703,17 +3711,17 @@ t.test('ls --json', (t) => {
escapedName: 'simple-output',
rawSpec: '',
saveSpec: null,
- fetchSpec: 'latest'
+ fetchSpec: 'latest',
},
_requiredBy: [
'#USER',
- '/'
+ '/',
],
_shasum: '3c07708ec9ef3e3c985cf0ddd67df09ab8ec2abc',
- _spec: 'simple-output'
- })
- }
- }
+ _spec: 'simple-output',
+ }),
+ },
+ },
})
ls([], () => {
t.deepEqual(
@@ -3724,9 +3732,9 @@ t.test('ls --json', (t) => {
dependencies: {
'simple-output': {
version: '2.1.1',
- resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz'
- }
- }
+ resolved: 'https://registry.npmjs.org/simple-output/-/simple-output-2.1.1.tgz',
+ },
+ },
},
'should be printed in json output'
)
@@ -3737,15 +3745,15 @@ t.test('ls --json', (t) => {
t.test('node.name fallback if missing root package name', (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
})
ls([], () => {
t.deepEqual(
jsonParse(result),
{
- 'version': '1.0.0',
- 'name': 'ls-ls-json-node-name-fallback-if-missing-root-package-name'
+ version: '1.0.0',
+ name: 'ls-ls-json-node-name-fallback-if-missing-root-package-name',
},
'should use node.name as key in json result obj'
)
@@ -3760,24 +3768,24 @@ t.test('ls --json', (t) => {
a: {
'package.json': JSON.stringify({
name: 'a',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
+ version: '1.0.0',
}),
node_modules: {
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
- }
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
+ },
})
// mimics lib/npm.js globalDir getter but pointing to fixtures
@@ -3787,19 +3795,19 @@ t.test('ls --json', (t) => {
t.deepEqual(
jsonParse(result),
{
- 'dependencies': {
- 'a': {
- 'version': '1.0.0'
- },
- 'b': {
- 'version': '1.0.0',
- 'dependencies': {
- 'c': {
- 'version': '1.0.0'
- }
- }
- }
- }
+ dependencies: {
+ a: {
+ version: '1.0.0',
+ },
+ b: {
+ version: '1.0.0',
+ dependencies: {
+ c: {
+ version: '1.0.0',
+ },
+ },
+ },
+ },
},
'should print json output for global deps'
)
diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js
index f6a13b90fa..0e0adcf1db 100644
--- a/deps/npm/test/lib/npm.js
+++ b/deps/npm/test/lib/npm.js
@@ -24,14 +24,14 @@ const actualPlatform = process.platform
const beWindows = () => {
Object.defineProperty(process, 'platform', {
value: 'win32',
- configurable: true
+ configurable: true,
})
}
const bePosix = () => {
Object.defineProperty(process, 'platform', {
value: 'posix',
- configurable: true
+ configurable: true,
})
}
@@ -41,9 +41,9 @@ const npmPath = resolve(__dirname, '..', '..')
const Config = require('@npmcli/config')
const { types, defaults, shorthands } = require('../../lib/utils/config.js')
const freshConfig = (opts = {}) => {
- for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e))) {
+ for (const env of Object.keys(process.env).filter(e => /^npm_/.test(e)))
delete process.env[env]
- }
+
process.env.npm_config_cache = CACHE
npm.config = new Config({
@@ -52,14 +52,13 @@ const freshConfig = (opts = {}) => {
shorthands,
npmPath,
log: npmlog,
- ...opts
+ ...opts,
})
}
const logs = []
-for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error']) {
+for (const level of ['silly', 'verbose', 'timing', 'notice', 'warn', 'error'])
npmlog[level] = (...msg) => logs.push([level, ...msg])
-}
const npm = require('../../lib/npm.js')
@@ -73,7 +72,7 @@ t.test('not yet loaded', t => {
config: {
loaded: false,
get: Function,
- set: Function
+ set: Function,
},
version: String,
})
@@ -103,7 +102,9 @@ t.test('npm.load', t => {
t.test('load error', t => {
const { load } = npm.config
const loadError = new Error('load error')
- npm.config.load = async () => { throw loadError }
+ npm.config.load = async () => {
+ throw loadError
+ }
npm.load(er => {
t.equal(er, loadError)
t.equal(npm.loadErr, loadError)
@@ -120,13 +121,13 @@ t.test('npm.load', t => {
t.test('basic loading', t => {
const dir = t.testdir({
- node_modules: {}
+ node_modules: {},
})
let firstCalled = false
const first = (er) => {
- if (er) {
+ if (er)
throw er
- }
+
firstCalled = true
t.equal(npm.loaded, true)
t.equal(npm.config.loaded, true)
@@ -134,7 +135,9 @@ t.test('npm.load', t => {
}
let secondCalled = false
- const second = () => { secondCalled = true }
+ const second = () => {
+ secondCalled = true
+ }
t.equal(npm.loading, false, 'not loading yet')
const p = npm.load(first).then(() => {
@@ -142,16 +145,18 @@ t.test('npm.load', t => {
t.match(npm, {
loaded: true,
loading: false,
- flatOptions: {}
+ flatOptions: {},
})
t.equal(firstCalled, true, 'first callback got called')
t.equal(secondCalled, true, 'second callback got called')
let thirdCalled = false
- const third = () => { thirdCalled = true }
+ const third = () => {
+ thirdCalled = true
+ }
npm.load(third)
t.equal(thirdCalled, true, 'third callbback got called')
t.match(logs, [
- ['timing', 'npm:load', /Completed in [0-9]+ms/]
+ ['timing', 'npm:load', /Completed in [0-9]+ms/],
])
logs.length = 0
@@ -216,22 +221,22 @@ t.test('npm.load', t => {
t.test('forceful loading', t => {
// also, don't get thrown off if argv[0] isn't found for some reason
- const [ argv0 ] = process.argv
+ const [argv0] = process.argv
t.teardown(() => {
process.argv[0] = argv0
})
freshConfig({ argv: [...process.argv, '--force', '--color', 'always'] })
process.argv[0] = 'this exe does not exist or else this test will fail'
return npm.load(er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(logs.filter(l => l[0] !== 'timing'), [
[
'warn',
'using --force',
- 'Recommended protections disabled.'
- ]
+ 'Recommended protections disabled.',
+ ],
])
logs.length = 0
})
@@ -240,7 +245,7 @@ t.test('npm.load', t => {
t.test('node is a symlink', async t => {
const node = actualPlatform === 'win32' ? 'node.exe' : 'node'
const dir = t.testdir({
- '.npmrc': 'foo = bar'
+ '.npmrc': 'foo = bar',
})
// create manually to set the 'file' option in windows
@@ -279,16 +284,16 @@ t.test('npm.load', t => {
logs.length = 0
await npm.load(er => {
- if (er) {
+ if (er)
throw er
- }
+
t.equal(npm.config.get('scope'), '@foo', 'added the @ sign to scope')
t.equal(npm.config.get('metrics-registry'), 'http://example.com')
t.match(logs.filter(l => l[0] !== 'timing' || !/^config:/.test(l[1])), [
[
'verbose',
'node symlink',
- resolve(dir, node)
+ resolve(dir, node),
],
[
'timing',
@@ -301,9 +306,9 @@ t.test('npm.load', t => {
})
await npm.commands.ll([], (er) => {
- if (er) {
+ if (er)
throw er
- }
+
t.same(consoleLogs, [[require('../../lib/ls.js').usage]], 'print usage')
consoleLogs.length = 0
npm.config.set('usage', false)
@@ -312,9 +317,9 @@ t.test('npm.load', t => {
})
await npm.commands.get(['scope', '\u2010not-a-dash'], (er) => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(logs, [
[
'error',
@@ -358,7 +363,7 @@ t.test('loading as main will load the cli', t => {
})
t.test('set process.title', t => {
- const { execPath, argv: processArgv } = process
+ const { argv: processArgv } = process
const { log } = console
const titleDesc = Object.getOwnPropertyDescriptor(process, 'title')
Object.defineProperty(process, 'title', {
diff --git a/deps/npm/test/lib/outdated.js b/deps/npm/test/lib/outdated.js
index 0cba04d547..7a5bd8f0ef 100644
--- a/deps/npm/test/lib/outdated.js
+++ b/deps/npm/test/lib/outdated.js
@@ -6,58 +6,57 @@ const packument = spec => {
alpha: {
name: 'alpha',
'dist-tags': {
- latest: '1.0.1'
+ latest: '1.0.1',
},
versions: {
'1.0.1': {
version: '1.0.1',
dependencies: {
- gamma: '2.0.0'
- }
- }
- }
+ gamma: '2.0.0',
+ },
+ },
+ },
},
beta: {
name: 'beta',
'dist-tags': {
- latest: '1.0.1'
+ latest: '1.0.1',
},
versions: {
'1.0.1': {
- version: '1.0.1'
- }
- }
+ version: '1.0.1',
+ },
+ },
},
gamma: {
name: 'gamma',
'dist-tags': {
- latest: '2.0.0'
+ latest: '2.0.0',
},
versions: {
'1.0.1': {
- version: '1.0.1'
+ version: '1.0.1',
},
'2.0.0': {
- version: '2.0.0'
- }
- }
+ version: '2.0.0',
+ },
+ },
},
theta: {
name: 'theta',
'dist-tags': {
- latest: '1.0.1'
+ latest: '1.0.1',
},
versions: {
'1.0.1': {
- version: '1.0.1'
- }
- }
- }
+ version: '1.0.1',
+ },
+ },
+ },
}
- if (spec.name === 'eta') {
+ if (spec.name === 'eta')
throw new Error('There is an error with this package.')
- }
if (!mocks[spec.name]) {
const err = new Error()
@@ -87,10 +86,10 @@ const globalDir = t.testdir({
alpha: {
'package.json': JSON.stringify({
name: 'alpha',
- version: '1.0.0'
- }, null, 2)
- }
- }
+ version: '1.0.0',
+ }, null, 2),
+ },
+ },
})
const outdated = (dir, opts) => requireInject(
@@ -99,11 +98,11 @@ const outdated = (dir, opts) => requireInject(
'../../lib/npm.js': {
prefix: dir,
globalDir: `${globalDir}/node_modules`,
- flatOptions: opts
+ flatOptions: opts,
},
pacote: {
- packument
- }
+ packument,
+ },
}
)
@@ -127,14 +126,14 @@ t.test('should display outdated deps', t => {
dependencies: {
alpha: '^1.0.0',
gamma: '^1.0.0',
- theta: '^1.0.0'
+ theta: '^1.0.0',
},
devDependencies: {
- zeta: '^1.0.0'
+ zeta: '^1.0.0',
},
peerDependencies: {
- beta: '^1.0.0'
- }
+ beta: '^1.0.0',
+ },
}, null, 2),
node_modules: {
alpha: {
@@ -142,42 +141,42 @@ t.test('should display outdated deps', t => {
name: 'alpha',
version: '1.0.0',
dependencies: {
- gamma: '2.0.0'
- }
+ gamma: '2.0.0',
+ },
}, null, 2),
node_modules: {
gamma: {
'package.json': JSON.stringify({
name: 'gamma',
- version: '2.0.0'
- }, null, 2)
- }
- }
+ version: '2.0.0',
+ }, null, 2),
+ },
+ },
},
beta: {
'package.json': JSON.stringify({
name: 'beta',
- version: '1.0.0'
- }, null, 2)
+ version: '1.0.0',
+ }, null, 2),
},
gamma: {
'package.json': JSON.stringify({
name: 'gamma',
- version: '1.0.1'
- }, null, 2)
+ version: '1.0.1',
+ }, null, 2),
},
zeta: {
'package.json': JSON.stringify({
name: 'zeta',
- version: '1.0.0'
- }, null, 2)
- }
- }
+ version: '1.0.0',
+ }, null, 2),
+ },
+ },
})
t.test('outdated global', t => {
outdated(null, {
- global: true
+ global: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -187,7 +186,7 @@ t.test('should display outdated deps', t => {
t.test('outdated', t => {
outdated(testDir, {
global: false,
- color: true
+ color: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -198,7 +197,7 @@ t.test('should display outdated deps', t => {
outdated(testDir, {
global: false,
color: true,
- omit: ['dev']
+ omit: ['dev'],
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -209,7 +208,7 @@ t.test('should display outdated deps', t => {
outdated(testDir, {
global: false,
color: true,
- omit: ['dev', 'peer']
+ omit: ['dev', 'peer'],
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -220,7 +219,7 @@ t.test('should display outdated deps', t => {
outdated(testDir, {
global: false,
color: true,
- omit: ['prod']
+ omit: ['prod'],
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -230,7 +229,7 @@ t.test('should display outdated deps', t => {
t.test('outdated --long', t => {
outdated(testDir, {
global: false,
- long: true
+ long: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -240,7 +239,7 @@ t.test('should display outdated deps', t => {
t.test('outdated --json', t => {
outdated(testDir, {
global: false,
- json: true
+ json: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -251,7 +250,7 @@ t.test('should display outdated deps', t => {
outdated(testDir, {
global: false,
json: true,
- long: true
+ long: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -261,7 +260,7 @@ t.test('should display outdated deps', t => {
t.test('outdated --parseable', t => {
outdated(testDir, {
global: false,
- parseable: true
+ parseable: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -272,7 +271,7 @@ t.test('should display outdated deps', t => {
outdated(testDir, {
global: false,
parseable: true,
- long: true
+ long: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -281,7 +280,7 @@ t.test('should display outdated deps', t => {
t.test('outdated --all', t => {
outdated(testDir, {
- all: true
+ all: true,
})([], () => {
t.matchSnapshot(logs)
t.end()
@@ -290,7 +289,7 @@ t.test('should display outdated deps', t => {
t.test('outdated specific dep', t => {
outdated(testDir, {
- global: false
+ global: false,
})(['alpha'], () => {
t.matchSnapshot(logs)
t.end()
@@ -306,21 +305,21 @@ t.test('should return if no outdated deps', t => {
name: 'delta',
version: '1.0.0',
dependencies: {
- alpha: '^1.0.0'
- }
+ alpha: '^1.0.0',
+ },
}, null, 2),
node_modules: {
alpha: {
'package.json': JSON.stringify({
name: 'alpha',
- version: '1.0.1'
- }, null, 2)
- }
- }
+ version: '1.0.1',
+ }, null, 2),
+ },
+ },
})
outdated(testDir, {
- global: false
+ global: false,
})([], () => {
t.equals(logs.length, 0, 'no logs')
t.end()
@@ -333,21 +332,21 @@ t.test('throws if error with a dep', t => {
name: 'delta',
version: '1.0.0',
dependencies: {
- eta: '^1.0.0'
- }
+ eta: '^1.0.0',
+ },
}, null, 2),
node_modules: {
eta: {
'package.json': JSON.stringify({
name: 'eta',
- version: '1.0.1'
- }, null, 2)
- }
- }
+ version: '1.0.1',
+ }, null, 2),
+ },
+ },
})
outdated(testDir, {
- global: false
+ global: false,
})([], (err) => {
t.equals(err.message, 'There is an error with this package.')
t.end()
@@ -360,14 +359,14 @@ t.test('should skip missing non-prod deps', t => {
name: 'delta',
version: '1.0.0',
devDependencies: {
- beta: '^1.0.0'
- }
+ beta: '^1.0.0',
+ },
}, null, 2),
- node_modules: {}
+ node_modules: {},
})
outdated(testDir, {
- global: false
+ global: false,
})([], () => {
t.equals(logs.length, 0, 'no logs')
t.end()
@@ -380,17 +379,17 @@ t.test('should skip invalid pkg ranges', t => {
name: 'delta',
version: '1.0.0',
dependencies: {
- alpha: '>=^2'
- }
+ alpha: '>=^2',
+ },
}, null, 2),
node_modules: {
alpha: {
'package.json': JSON.stringify({
name: 'alpha',
- version: '1.0.0'
- }, null, 2)
- }
- }
+ version: '1.0.0',
+ }, null, 2),
+ },
+ },
})
outdated(testDir, {})([], () => {
@@ -405,17 +404,17 @@ t.test('should skip git specs', t => {
name: 'delta',
version: '1.0.0',
dependencies: {
- alpha: 'github:username/foo'
- }
+ alpha: 'github:username/foo',
+ },
}, null, 2),
node_modules: {
alpha: {
'package.json': JSON.stringify({
name: 'alpha',
- version: '1.0.0'
- }, null, 2)
- }
- }
+ version: '1.0.0',
+ }, null, 2),
+ },
+ },
})
outdated(testDir, {})([], () => {
diff --git a/deps/npm/test/lib/owner.js b/deps/npm/test/lib/owner.js
index dc179e4662..e217533f0d 100644
--- a/deps/npm/test/lib/owner.js
+++ b/deps/npm/test/lib/owner.js
@@ -16,17 +16,19 @@ const mocks = {
'npm-registry-fetch': npmFetch,
pacote,
'../../lib/npm.js': npm,
- '../../lib/utils/output.js': (...msg) => { result += msg.join('\n') },
+ '../../lib/utils/output.js': (...msg) => {
+ result += msg.join('\n')
+ },
'../../lib/utils/otplease.js': async (opts, fn) => fn({ otp: '123456', opts }),
'../../lib/utils/read-local-package.js': async () => readLocalPkgResponse,
- '../../lib/utils/usage.js': () => 'usage instructions'
+ '../../lib/utils/usage.js': () => 'usage instructions',
}
const npmcliMaintainers = [
{ email: 'quitlahok@gmail.com', name: 'nlf' },
{ email: 'ruyadorno@hotmail.com', name: 'ruyadorno' },
{ email: 'darcy@darcyclarke.me', name: 'darcyclarke' },
- { email: 'i@izs.me', name: 'isaacs' }
+ { email: 'i@izs.me', name: 'isaacs' },
]
const owner = requireInject('../../lib/owner.js', mocks)
@@ -59,7 +61,7 @@ t.test('owner ls no args', t => {
opts,
{
...npm.flatOptions,
- fullMetadata: true
+ fullMetadata: true,
},
'should forward expected options to pacote.packument'
)
@@ -132,7 +134,7 @@ t.test('owner ls <pkg>', t => {
opts,
{
...npm.flatOptions,
- fullMetadata: true
+ fullMetadata: true,
},
'should forward expected options to pacote.packument'
)
@@ -178,7 +180,7 @@ t.test('owner add <user> <pkg>', t => {
return {
_id: 'org.couchdb.user:foo',
email: 'foo@github.com',
- name: 'foo'
+ name: 'foo',
}
} else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
t.ok('should put changed owner')
@@ -187,12 +189,12 @@ t.test('owner add <user> <pkg>', t => {
method: 'PUT',
body: {
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
},
otp: '123456',
spec: {
- name: '@npmcli/map-workspaces'
- }
+ name: '@npmcli/map-workspaces',
+ },
}, 'should use expected opts')
t.deepEqual(
opts.body.maintainers,
@@ -200,15 +202,14 @@ t.test('owner add <user> <pkg>', t => {
...npmcliMaintainers,
{
name: 'foo',
- email: 'foo@github.com'
- }
+ email: 'foo@github.com',
+ },
],
'should contain expected new owners, adding requested user'
)
return {}
- } else {
+ } else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => {
t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name')
@@ -216,13 +217,13 @@ t.test('owner add <user> <pkg>', t => {
opts,
{
...npm.flatOptions,
- fullMetadata: true
+ fullMetadata: true,
},
'should forward expected options to pacote.packument'
)
return {
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
}
}
t.teardown(() => {
@@ -246,17 +247,16 @@ t.test('owner add <user> cwd package', t => {
return {
_id: 'org.couchdb.user:foo',
email: 'foo@github.com',
- name: 'foo'
+ name: 'foo',
}
- } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
+ } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1')
return {}
- } else {
+ else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
})
t.teardown(() => {
result = ''
@@ -290,16 +290,15 @@ t.test('owner add <user> <pkg> already an owner', t => {
return {
_id: 'org.couchdb.user:ruyadorno',
email: 'ruyadorno@hotmail.com',
- name: 'ruyadorno'
+ name: 'ruyadorno',
}
- } else {
+ } else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => {
return {
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
}
}
t.teardown(() => {
@@ -319,17 +318,16 @@ t.test('owner add <user> <pkg> fails to retrieve user', t => {
readLocalPkgResponse =
npmFetch.json = async (uri, opts) => {
// retrieve borked user info from couchdb request
- if (uri === '/-/user/org.couchdb.user:foo') {
+ if (uri === '/-/user/org.couchdb.user:foo')
return { ok: false }
- } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
+ else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1')
return {}
- } else {
+ else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
})
t.teardown(() => {
result = ''
@@ -357,22 +355,21 @@ t.test('owner add <user> <pkg> fails to PUT updates', t => {
return {
_id: 'org.couchdb.user:foo',
email: 'foo@github.com',
- name: 'foo'
+ name: 'foo',
}
} else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
return {
error: {
status: '418',
- message: "I'm a teapot"
- }
+ message: "I'm a teapot",
+ },
}
- } else {
+ } else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
})
t.teardown(() => {
result = ''
@@ -406,13 +403,12 @@ t.test('owner add <user> <pkg> fails to retrieve user info', t => {
new Error("I'm a teapot"),
{ status: 418 }
)
- } else {
+ } else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
})
t.teardown(() => {
result = ''
@@ -438,18 +434,17 @@ t.test('owner add <user> <pkg> no previous maintainers property from server', t
return {
_id: 'org.couchdb.user:foo',
email: 'foo@github.com',
- name: 'foo'
+ name: 'foo',
}
- } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1') {
+ } else if (uri === '/@npmcli%2fno-owners-pkg/-rev/1-foobaaa1')
return {}
- } else {
+ else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => {
return {
_rev: '1-foobaaa1',
- maintainers: null
+ maintainers: null,
}
}
t.teardown(() => {
@@ -509,7 +504,7 @@ t.test('owner rm <user> <pkg>', t => {
return {
_id: 'org.couchdb.user:ruyadorno',
email: 'ruyadorno@hotmail.com',
- name: 'ruyadorno'
+ name: 'ruyadorno',
}
} else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
t.ok('should put changed owner')
@@ -517,12 +512,12 @@ t.test('owner rm <user> <pkg>', t => {
...npm.flatOptions,
method: 'PUT',
body: {
- _rev: '1-foobaaa1'
+ _rev: '1-foobaaa1',
},
otp: '123456',
spec: {
- name: '@npmcli/map-workspaces'
- }
+ name: '@npmcli/map-workspaces',
+ },
}, 'should use expected opts')
t.deepEqual(
opts.body.maintainers,
@@ -530,9 +525,8 @@ t.test('owner rm <user> <pkg>', t => {
'should contain expected new owners, removing requested user'
)
return {}
- } else {
+ } else
t.fail(`unexpected fetch json call to: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => {
t.equal(spec.name, '@npmcli/map-workspaces', 'should use expect pkg name')
@@ -540,13 +534,13 @@ t.test('owner rm <user> <pkg>', t => {
opts,
{
...npm.flatOptions,
- fullMetadata: true
+ fullMetadata: true,
},
'should forward expected options to pacote.packument'
)
return {
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
}
}
t.teardown(() => {
@@ -575,18 +569,17 @@ t.test('owner rm <user> <pkg> not a current owner', t => {
return {
_id: 'org.couchdb.user:foo',
email: 'foo@github.com',
- name: 'foo'
+ name: 'foo',
}
- } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
+ } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1')
return {}
- } else {
+ else
t.fail(`unexpected fetch json call to: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => {
return {
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
}
}
t.teardown(() => {
@@ -610,17 +603,16 @@ t.test('owner rm <user> cwd package', t => {
return {
_id: 'org.couchdb.user:ruyadorno',
email: 'ruyadorno@hotmail.com',
- name: 'ruyadorno'
+ name: 'ruyadorno',
}
- } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1') {
+ } else if (uri === '/@npmcli%2fmap-workspaces/-rev/1-foobaaa1')
return {}
- } else {
+ else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
})
t.teardown(() => {
result = ''
@@ -645,18 +637,17 @@ t.test('owner rm <user> only user', t => {
return {
_id: 'org.couchdb.user:ruyadorno',
email: 'ruyadorno@hotmail.com',
- name: 'ruyadorno'
+ name: 'ruyadorno',
}
- } else {
+ } else
t.fail(`unexpected fetch json call to uri: ${uri}`)
- }
}
pacote.packument = async (spec, opts) => ({
_rev: '1-foobaaa1',
maintainers: [{
name: 'ruyadorno',
- email: 'ruyadorno@hotmail.com'
- }]
+ email: 'ruyadorno@hotmail.com',
+ }],
})
t.teardown(() => {
result = ''
@@ -722,7 +713,7 @@ t.test('completion', t => {
testComp(['npm', 'owner'], [
'add',
'rm',
- 'ls'
+ 'ls',
])
testComp(['npm', 'owner', 'add'], [])
testComp(['npm', 'owner', 'ls'], [])
@@ -735,7 +726,7 @@ t.test('completion', t => {
pacote.packument = async spec => {
t.equal(spec.name, readLocalPkgResponse, 'should use package spec')
return {
- maintainers: npmcliMaintainers
+ maintainers: npmcliMaintainers,
}
}
t.teardown(() => {
@@ -751,7 +742,7 @@ t.test('completion', t => {
'nlf',
'ruyadorno',
'darcyclarke',
- 'isaacs'
+ 'isaacs',
],
'should return list of current owners'
)
@@ -772,7 +763,7 @@ t.test('completion', t => {
pacote.packument = async spec => {
t.equal(spec.name, readLocalPkgResponse, 'should use package spec')
return {
- maintainers: []
+ maintainers: [],
}
}
t.teardown(() => {
diff --git a/deps/npm/test/lib/pack.js b/deps/npm/test/lib/pack.js
index 097204ea92..851174d259 100644
--- a/deps/npm/test/lib/pack.js
+++ b/deps/npm/test/lib/pack.js
@@ -4,11 +4,10 @@ const requireInject = require('require-inject')
const OUTPUT = []
const output = (...msg) => OUTPUT.push(msg)
-const libnpmpackActual = require('libnpmpack')
const libnpmpack = async (spec, opts) => {
- if (!opts) {
+ if (!opts)
throw new Error('expected options object')
- }
+
return ''
}
@@ -24,21 +23,21 @@ t.test('should pack current directory with no arguments', (t) => {
flatOptions: {
unicode: false,
json: false,
- dryRun: false
- }
+ dryRun: false,
+ },
},
libnpmpack,
npmlog: {
notice: () => {},
showProgress: () => {},
- clearProgress: () => {}
- }
+ clearProgress: () => {},
+ },
})
return pack([], er => {
- if (er) {
+ if (er)
throw er
- }
+
const filename = `npm-${require('../../package.json').version}.tgz`
t.strictSame(OUTPUT, [[filename]])
})
@@ -48,8 +47,8 @@ t.test('should pack given directory', (t) => {
const testDir = t.testdir({
'package.json': JSON.stringify({
name: 'my-cool-pkg',
- version: '1.0.0'
- }, null, 2)
+ version: '1.0.0',
+ }, null, 2),
})
const pack = requireInject('../../lib/pack.js', {
@@ -58,21 +57,21 @@ t.test('should pack given directory', (t) => {
flatOptions: {
unicode: true,
json: true,
- dryRun: true
- }
+ dryRun: true,
+ },
},
libnpmpack,
npmlog: {
notice: () => {},
- 'showProgress': () => {},
- 'clearProgress': () => {}
- }
+ showProgress: () => {},
+ clearProgress: () => {},
+ },
})
return pack([testDir], er => {
- if (er) {
+ if (er)
throw er
- }
+
const filename = 'my-cool-pkg-1.0.0.tgz'
t.strictSame(OUTPUT, [[filename]])
})
@@ -82,8 +81,8 @@ t.test('should pack given directory for scoped package', (t) => {
const testDir = t.testdir({
'package.json': JSON.stringify({
name: '@cool/my-pkg',
- version: '1.0.0'
- }, null, 2)
+ version: '1.0.0',
+ }, null, 2),
})
const pack = requireInject('../../lib/pack.js', {
@@ -92,21 +91,21 @@ t.test('should pack given directory for scoped package', (t) => {
flatOptions: {
unicode: true,
json: true,
- dryRun: true
- }
+ dryRun: true,
+ },
},
libnpmpack,
npmlog: {
notice: () => {},
- 'showProgress': () => {},
- 'clearProgress': () => {}
- }
+ showProgress: () => {},
+ clearProgress: () => {},
+ },
})
return pack([testDir], er => {
- if (er) {
+ if (er)
throw er
- }
+
const filename = 'cool-my-pkg-1.0.0.tgz'
t.strictSame(OUTPUT, [[filename]])
})
@@ -119,27 +118,27 @@ t.test('should log pack contents', (t) => {
...require('../../lib/utils/tar.js'),
logTar: () => {
t.ok(true, 'logTar is called')
- }
+ },
},
'../../lib/npm.js': {
flatOptions: {
unicode: false,
json: false,
- dryRun: false
- }
+ dryRun: false,
+ },
},
libnpmpack,
npmlog: {
notice: () => {},
- 'showProgress': () => {},
- 'clearProgress': () => {}
- }
+ showProgress: () => {},
+ clearProgress: () => {},
+ },
})
return pack([], er => {
- if (er) {
+ if (er)
throw er
- }
+
const filename = `npm-${require('../../package.json').version}.tgz`
t.strictSame(OUTPUT, [[filename]])
})
diff --git a/deps/npm/test/lib/ping.js b/deps/npm/test/lib/ping.js
index d27bf4d603..a185919ddd 100644
--- a/deps/npm/test/lib/ping.js
+++ b/deps/npm/test/lib/ping.js
@@ -22,8 +22,8 @@ test('pings', (t) => {
t.equal(type, 'PONG', 'should log a PONG')
t.match(spec, /\d+ms/, 'should log the elapsed milliseconds')
}
- }
- }
+ },
+ },
})
ping([], (err) => {
@@ -51,7 +51,7 @@ test('pings and logs details', (t) => {
if (noticeCalls === 1) {
t.equal(type, 'PING', 'should log a PING')
t.equal(spec, flatOptions.registry, 'should log the registry url')
- } else if (noticeCalls == 2) {
+ } else if (noticeCalls === 2) {
t.equal(type, 'PONG', 'should log a PONG')
t.match(spec, /\d+ms/, 'should log the elapsed milliseconds')
} else {
@@ -59,8 +59,8 @@ test('pings and logs details', (t) => {
const parsed = JSON.parse(spec)
t.match(parsed, details, 'should log JSON stringified details')
}
- }
- }
+ },
+ },
})
ping([], (err) => {
@@ -98,8 +98,8 @@ test('pings and returns json', (t) => {
t.equal(type, 'PONG', 'should log a PONG')
t.match(spec, /\d+ms/, 'should log the elapsed milliseconds')
}
- }
- }
+ },
+ },
})
ping([], (err) => {
diff --git a/deps/npm/test/lib/prefix.js b/deps/npm/test/lib/prefix.js
index a6e4d731ab..83e2d63680 100644
--- a/deps/npm/test/lib/prefix.js
+++ b/deps/npm/test/lib/prefix.js
@@ -9,7 +9,7 @@ test('prefix', (t) => {
'../../lib/npm.js': { prefix: dir },
'../../lib/utils/output.js': (output) => {
t.equal(output, dir, 'prints the correct directory')
- }
+ },
})
prefix([], (err) => {
diff --git a/deps/npm/test/lib/prune.js b/deps/npm/test/lib/prune.js
index 27c16355fa..074f4eac6e 100644
--- a/deps/npm/test/lib/prune.js
+++ b/deps/npm/test/lib/prune.js
@@ -1,5 +1,4 @@
const { test } = require('tap')
-const prune = require('../../lib/prune.js')
const requireInject = require('require-inject')
test('should prune using Arborist', (t) => {
@@ -7,8 +6,8 @@ test('should prune using Arborist', (t) => {
'../../lib/npm.js': {
prefix: 'foo',
flatOptions: {
- 'foo': 'bar'
- }
+ foo: 'bar',
+ },
},
'@npmcli/arborist': function (args) {
t.ok(args, 'gets options object')
@@ -17,13 +16,14 @@ test('should prune using Arborist', (t) => {
t.ok(true, 'prune is called')
}
},
- '../../lib/utils/reify-output.js': (arb) => {
+ '../../lib/utils/reify-finish.js': (arb) => {
t.ok(arb, 'gets arborist tree')
- }
+ },
})
- prune(null, () => {
+ prune(null, er => {
+ if (er)
+ throw er
t.ok(true, 'callback is called')
t.end()
})
})
-
diff --git a/deps/npm/test/lib/repo.js b/deps/npm/test/lib/repo.js
index fcc95f3438..c4b1b46e73 100644
--- a/deps/npm/test/lib/repo.js
+++ b/deps/npm/test/lib/repo.js
@@ -5,105 +5,105 @@ const pacote = {
manifest: async (spec, options) => {
return spec === 'norepo' ? {
name: 'norepo',
- version: '1.2.3'
+ version: '1.2.3',
}
- : spec === 'repoobbj-nourl' ? {
- name: 'repoobj-nourl',
- repository: { no: 'url' }
- }
+ : spec === 'repoobbj-nourl' ? {
+ name: 'repoobj-nourl',
+ repository: { no: 'url' },
+ }
- : spec === 'hostedgit' ? {
- repository: 'git://github.com/foo/hostedgit'
- }
- : spec === 'hostedgitat' ? {
- repository: 'git@github.com:foo/hostedgitat'
- }
- : spec === 'hostedssh' ? {
- repository: 'ssh://git@github.com/foo/hostedssh'
- }
- : spec === 'hostedgitssh' ? {
- repository: 'git+ssh://git@github.com/foo/hostedgitssh'
- }
- : spec === 'hostedgithttp' ? {
- repository: 'git+http://github.com/foo/hostedgithttp'
- }
- : spec === 'hostedgithttps' ? {
- repository: 'git+https://github.com/foo/hostedgithttps'
- }
+ : spec === 'hostedgit' ? {
+ repository: 'git://github.com/foo/hostedgit',
+ }
+ : spec === 'hostedgitat' ? {
+ repository: 'git@github.com:foo/hostedgitat',
+ }
+ : spec === 'hostedssh' ? {
+ repository: 'ssh://git@github.com/foo/hostedssh',
+ }
+ : spec === 'hostedgitssh' ? {
+ repository: 'git+ssh://git@github.com/foo/hostedgitssh',
+ }
+ : spec === 'hostedgithttp' ? {
+ repository: 'git+http://github.com/foo/hostedgithttp',
+ }
+ : spec === 'hostedgithttps' ? {
+ repository: 'git+https://github.com/foo/hostedgithttps',
+ }
- : spec === 'hostedgitobj' ? {
- repository: { url: 'git://github.com/foo/hostedgitobj' }
- }
- : spec === 'hostedgitatobj' ? {
- repository: { url: 'git@github.com:foo/hostedgitatobj' }
- }
- : spec === 'hostedsshobj' ? {
- repository: { url: 'ssh://git@github.com/foo/hostedsshobj' }
- }
- : spec === 'hostedgitsshobj' ? {
- repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' }
- }
- : spec === 'hostedgithttpobj' ? {
- repository: { url: 'git+http://github.com/foo/hostedgithttpobj' }
- }
- : spec === 'hostedgithttpsobj' ? {
- repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' }
- }
+ : spec === 'hostedgitobj' ? {
+ repository: { url: 'git://github.com/foo/hostedgitobj' },
+ }
+ : spec === 'hostedgitatobj' ? {
+ repository: { url: 'git@github.com:foo/hostedgitatobj' },
+ }
+ : spec === 'hostedsshobj' ? {
+ repository: { url: 'ssh://git@github.com/foo/hostedsshobj' },
+ }
+ : spec === 'hostedgitsshobj' ? {
+ repository: { url: 'git+ssh://git@github.com/foo/hostedgitsshobj' },
+ }
+ : spec === 'hostedgithttpobj' ? {
+ repository: { url: 'git+http://github.com/foo/hostedgithttpobj' },
+ }
+ : spec === 'hostedgithttpsobj' ? {
+ repository: { url: 'git+https://github.com/foo/hostedgithttpsobj' },
+ }
- : spec === 'unhostedgit' ? {
- repository: 'git://gothib.com/foo/unhostedgit'
- }
- : spec === 'unhostedgitat' ? {
- repository: 'git@gothib.com:foo/unhostedgitat'
- }
- : spec === 'unhostedssh' ? {
- repository: 'ssh://git@gothib.com/foo/unhostedssh'
- }
- : spec === 'unhostedgitssh' ? {
- repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh'
- }
- : spec === 'unhostedgithttp' ? {
- repository: 'git+http://gothib.com/foo/unhostedgithttp'
- }
- : spec === 'unhostedgithttps' ? {
- repository: 'git+https://gothib.com/foo/unhostedgithttps'
- }
+ : spec === 'unhostedgit' ? {
+ repository: 'git://gothib.com/foo/unhostedgit',
+ }
+ : spec === 'unhostedgitat' ? {
+ repository: 'git@gothib.com:foo/unhostedgitat',
+ }
+ : spec === 'unhostedssh' ? {
+ repository: 'ssh://git@gothib.com/foo/unhostedssh',
+ }
+ : spec === 'unhostedgitssh' ? {
+ repository: 'git+ssh://git@gothib.com/foo/unhostedgitssh',
+ }
+ : spec === 'unhostedgithttp' ? {
+ repository: 'git+http://gothib.com/foo/unhostedgithttp',
+ }
+ : spec === 'unhostedgithttps' ? {
+ repository: 'git+https://gothib.com/foo/unhostedgithttps',
+ }
- : spec === 'unhostedgitobj' ? {
- repository: { url: 'git://gothib.com/foo/unhostedgitobj' }
- }
- : spec === 'unhostedgitatobj' ? {
- repository: { url: 'git@gothib.com:foo/unhostedgitatobj' }
- }
- : spec === 'unhostedsshobj' ? {
- repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' }
- }
- : spec === 'unhostedgitsshobj' ? {
- repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' }
- }
- : spec === 'unhostedgithttpobj' ? {
- repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' }
- }
- : spec === 'unhostedgithttpsobj' ? {
- repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' }
- }
+ : spec === 'unhostedgitobj' ? {
+ repository: { url: 'git://gothib.com/foo/unhostedgitobj' },
+ }
+ : spec === 'unhostedgitatobj' ? {
+ repository: { url: 'git@gothib.com:foo/unhostedgitatobj' },
+ }
+ : spec === 'unhostedsshobj' ? {
+ repository: { url: 'ssh://git@gothib.com/foo/unhostedsshobj' },
+ }
+ : spec === 'unhostedgitsshobj' ? {
+ repository: { url: 'git+ssh://git@gothib.com/foo/unhostedgitsshobj' },
+ }
+ : spec === 'unhostedgithttpobj' ? {
+ repository: { url: 'git+http://gothib.com/foo/unhostedgithttpobj' },
+ }
+ : spec === 'unhostedgithttpsobj' ? {
+ repository: { url: 'git+https://gothib.com/foo/unhostedgithttpsobj' },
+ }
- : spec === 'directory' ? {
- repository: {
- type: 'git',
- url: 'git+https://github.com/foo/test-repo-with-directory.git',
- directory: 'some/directory'
+ : spec === 'directory' ? {
+ repository: {
+ type: 'git',
+ url: 'git+https://github.com/foo/test-repo-with-directory.git',
+ directory: 'some/directory',
+ },
}
- }
- : spec === '.' ? {
- name: 'thispkg',
- version: '1.2.3',
- repository: 'https://example.com/thispkg.git'
- }
- : null
- }
+ : spec === '.' ? {
+ name: 'thispkg',
+ version: '1.2.3',
+ repository: 'https://example.com/thispkg.git',
+ }
+ : null
+ },
}
// keep a tally of which urls got opened
@@ -116,7 +116,7 @@ const openUrl = (url, errMsg, cb) => {
const repo = requireInject('../../lib/repo.js', {
pacote,
- '../../lib/utils/open-url.js': openUrl
+ '../../lib/utils/open-url.js': openUrl,
})
t.test('completion', t => {
@@ -152,7 +152,7 @@ t.test('open repo urls', t => {
unhostedgithttpobj: 'http://gothib.com/foo/unhostedgithttpobj',
unhostedgithttpsobj: 'https://gothib.com/foo/unhostedgithttpsobj',
directory: 'https://github.com/foo/test-repo-with-directory/tree/master/some/directory',
- '.': 'https://example.com/thispkg'
+ '.': 'https://example.com/thispkg',
}
const keys = Object.keys(expect)
t.plan(keys.length)
@@ -174,7 +174,7 @@ t.test('fail if cannot figure out repo url', t => {
'norepo',
'repoobbj-nourl',
'unhostedgitat',
- 'unhostedgitatobj'
+ 'unhostedgitatobj',
]
t.plan(cases.length)
diff --git a/deps/npm/test/lib/root.js b/deps/npm/test/lib/root.js
index 210e9b0291..8c23152b3e 100644
--- a/deps/npm/test/lib/root.js
+++ b/deps/npm/test/lib/root.js
@@ -9,7 +9,7 @@ test('root', (t) => {
'../../lib/npm.js': { dir },
'../../lib/utils/output.js': (output) => {
t.equal(output, dir, 'prints the correct directory')
- }
+ },
})
root([], (err) => {
diff --git a/deps/npm/test/lib/run-script.js b/deps/npm/test/lib/run-script.js
index 7ddb6ff6f6..bad8a63c0d 100644
--- a/deps/npm/test/lib/run-script.js
+++ b/deps/npm/test/lib/run-script.js
@@ -1,24 +1,23 @@
const t = require('tap')
const requireInject = require('require-inject')
-let RUN_FAIL = null
const RUN_SCRIPTS = []
const npm = {
localPrefix: __dirname,
flatOptions: {
scriptShell: undefined,
json: false,
- parseable: false
+ parseable: false,
},
config: {
settings: {
- 'if-present': false
+ 'if-present': false,
},
get: k => npm.config.settings[k],
set: (k, v) => {
npm.config.settings[k] = v
- }
- }
+ },
+ },
}
const output = []
@@ -33,7 +32,7 @@ const getRS = windows => requireInject('../../lib/run-script.js', {
npmlog,
'../../lib/npm.js': npm,
'../../lib/utils/is-windows-shell.js': windows,
- '../../lib/utils/output.js': (...msg) => output.push(msg)
+ '../../lib/utils/output.js': (...msg) => output.push(msg),
})
const runScript = getRS(false)
@@ -44,41 +43,41 @@ t.test('completion', t => {
const dir = t.testdir()
npm.localPrefix = dir
t.test('already have a script name', t => {
- runScript.completion({conf:{argv:{remain: ['npm','run','x']}}}, (er, results) => {
- if (er) {
+ runScript.completion({conf: {argv: {remain: ['npm', 'run', 'x']}}}, (er, results) => {
+ if (er)
throw er
- }
+
t.equal(results, undefined)
t.end()
})
})
t.test('no package.json', t => {
- runScript.completion({conf:{argv:{remain: ['npm','run']}}}, (er, results) => {
- if (er) {
+ runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => {
+ if (er)
throw er
- }
+
t.strictSame(results, [])
t.end()
})
})
t.test('has package.json, no scripts', t => {
writeFileSync(`${dir}/package.json`, JSON.stringify({}))
- runScript.completion({conf:{argv:{remain: ['npm', 'run']}}}, (er, results) => {
- if (er) {
+ runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => {
+ if (er)
throw er
- }
+
t.strictSame(results, [])
t.end()
})
})
t.test('has package.json, with scripts', t => {
writeFileSync(`${dir}/package.json`, JSON.stringify({
- scripts: { hello: 'echo hello', world: 'echo world' }
+ scripts: { hello: 'echo hello', world: 'echo world' },
}))
- runScript.completion({conf:{argv:{remain: ['npm', 'run']}}}, (er, results) => {
- if (er) {
+ runScript.completion({conf: {argv: {remain: ['npm', 'run']}}}, (er, results) => {
+ if (er)
throw er
- }
+
t.strictSame(results, ['hello', 'world'])
t.end()
})
@@ -99,9 +98,9 @@ t.test('default env, start, and restart scripts', async t => {
})
await runScript(['start'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{
path: npm.localPrefix,
@@ -110,16 +109,16 @@ t.test('default env, start, and restart scripts', async t => {
stdio: 'inherit',
stdioString: true,
pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {}},
- event: 'start'
- }
+ event: 'start',
+ },
])
})
RUN_SCRIPTS.length = 0
await runScript(['env'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{
path: npm.localPrefix,
@@ -127,19 +126,22 @@ t.test('default env, start, and restart scripts', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- env: 'env'
- } },
- event: 'env'
- }
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ env: 'env',
+ } },
+ event: 'env',
+ },
])
})
RUN_SCRIPTS.length = 0
await runScriptWin(['env'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{
path: npm.localPrefix,
@@ -147,19 +149,22 @@ t.test('default env, start, and restart scripts', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- env: 'SET'
- } },
- event: 'env'
- }
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ env: 'SET',
+ } },
+ event: 'env',
+ },
])
})
RUN_SCRIPTS.length = 0
await runScript(['restart'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{
path: npm.localPrefix,
@@ -167,11 +172,14 @@ t.test('default env, start, and restart scripts', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- restart: 'npm stop --if-present && npm start'
- } },
- event: 'restart'
- }
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ restart: 'npm stop --if-present && npm start',
+ } },
+ event: 'restart',
+ },
])
})
RUN_SCRIPTS.length = 0
@@ -180,29 +188,29 @@ t.test('default env, start, and restart scripts', async t => {
t.test('try to run missing script', t => {
npm.localPrefix = t.testdir({
'package.json': JSON.stringify({
- scripts: { hello: 'world' }
- })
+ scripts: { hello: 'world' },
+ }),
})
t.test('no suggestions', async t => {
await runScript(['notevenclose'], er => {
t.match(er, {
- message: 'missing script: notevenclose'
+ message: 'missing script: notevenclose',
})
})
})
t.test('suggestions', async t => {
await runScript(['helo'], er => {
t.match(er, {
- message: 'missing script: helo\n\nDid you mean this?\n hello'
+ message: 'missing script: helo\n\nDid you mean this?\n hello',
})
})
})
t.test('with --if-present', async t => {
npm.config.set('if-present', true)
await runScript(['goodbye'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame(RUN_SCRIPTS, [], 'did not try to run anything')
})
})
@@ -216,15 +224,15 @@ t.test('run pre/post hooks', async t => {
version: '1.2.3',
scripts: {
preenv: 'echo before the env',
- postenv: 'echo after the env'
- }
- })
+ postenv: 'echo after the env',
+ },
+ }),
})
await runScript(['env'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{ event: 'preenv' },
{
@@ -233,12 +241,15 @@ t.test('run pre/post hooks', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- env: 'env'
- } },
- event: 'env'
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ env: 'env',
+ } },
+ event: 'env',
},
- { event: 'postenv' }
+ { event: 'postenv' },
])
})
RUN_SCRIPTS.length = 0
@@ -253,15 +264,15 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => {
version: '1.2.3',
scripts: {
preenv: 'echo before the env',
- postenv: 'echo after the env'
- }
- })
+ postenv: 'echo after the env',
+ },
+ }),
})
await runScript(['env'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.deepEqual(RUN_SCRIPTS, [
{
path: npm.localPrefix,
@@ -269,14 +280,17 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- preenv: 'echo before the env',
- postenv: 'echo after the env',
- env: 'env'
- } },
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ preenv: 'echo before the env',
+ postenv: 'echo after the env',
+ env: 'env',
+ } },
banner: true,
- event: 'env'
- }
+ event: 'env',
+ },
])
delete npm.flatOptions.ignoreScripts
@@ -286,7 +300,9 @@ t.test('skip pre/post hooks when using ignoreScripts', async t => {
t.test('run silent', async t => {
npmlog.level = 'silent'
- t.teardown(() => { npmlog.level = 'warn' })
+ t.teardown(() => {
+ npmlog.level = 'warn'
+ })
npm.localPrefix = t.testdir({
'package.json': JSON.stringify({
@@ -294,19 +310,19 @@ t.test('run silent', async t => {
version: '1.2.3',
scripts: {
preenv: 'echo before the env',
- postenv: 'echo after the env'
- }
- })
+ postenv: 'echo after the env',
+ },
+ }),
})
await runScript(['env'], er => {
- if (er) {
+ if (er)
throw er
- }
+
t.match(RUN_SCRIPTS, [
{
event: 'preenv',
- stdio: 'inherit'
+ stdio: 'inherit',
},
{
path: npm.localPrefix,
@@ -314,16 +330,19 @@ t.test('run silent', async t => {
scriptShell: undefined,
stdio: 'inherit',
stdioString: true,
- pkg: { name: 'x', version: '1.2.3', _id: 'x@1.2.3', scripts: {
- env: 'env'
- } },
+ pkg: { name: 'x',
+ version: '1.2.3',
+ _id: 'x@1.2.3',
+ scripts: {
+ env: 'env',
+ } },
event: 'env',
- banner: false
+ banner: false,
},
{
event: 'postenv',
- stdio: 'inherit'
- }
+ stdio: 'inherit',
+ },
])
})
RUN_SCRIPTS.length = 0
@@ -335,46 +354,43 @@ t.test('list scripts', async t => {
start: 'node server.js',
stop: 'node kill-server.js',
preenv: 'echo before the env',
- postenv: 'echo after the env'
+ postenv: 'echo after the env',
}
npm.localPrefix = t.testdir({
'package.json': JSON.stringify({
name: 'x',
version: '1.2.3',
- scripts
- })
+ scripts,
+ }),
})
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [
- [ 'Lifecycle scripts included in x:' ],
- [ ' test\n exit 2' ],
- [ ' start\n node server.js' ],
- [ ' stop\n node kill-server.js' ],
- [ '\navailable via `npm run-script`:' ],
- [ ' preenv\n echo before the env' ],
- [ ' postenv\n echo after the env' ]
+ ['Lifecycle scripts included in x:'],
+ [' test\n exit 2'],
+ [' start\n node server.js'],
+ [' stop\n node kill-server.js'],
+ ['\navailable via `npm run-script`:'],
+ [' preenv\n echo before the env'],
+ [' postenv\n echo after the env'],
], 'basic report')
output.length = 0
npmlog.level = 'silent'
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [])
npmlog.level = 'warn'
npm.flatOptions.json = true
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [[JSON.stringify(scripts, 0, 2)]], 'json report')
output.length = 0
@@ -382,16 +398,15 @@ t.test('list scripts', async t => {
npm.flatOptions.parseable = true
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [
- [ 'test:exit 2' ],
- [ 'start:node server.js' ],
- [ 'stop:node kill-server.js' ],
- [ 'preenv:echo before the env' ],
- [ 'postenv:echo after the env' ]
+ ['test:exit 2'],
+ ['start:node server.js'],
+ ['stop:node kill-server.js'],
+ ['preenv:echo before the env'],
+ ['postenv:echo after the env'],
])
output.length = 0
npm.flatOptions.parseable = false
@@ -401,14 +416,13 @@ t.test('list scripts when no scripts', async t => {
npm.localPrefix = t.testdir({
'package.json': JSON.stringify({
name: 'x',
- version: '1.2.3'
- })
+ version: '1.2.3',
+ }),
})
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [], 'nothing to report')
output.length = 0
@@ -419,18 +433,17 @@ t.test('list scripts, only commands', async t => {
'package.json': JSON.stringify({
name: 'x',
version: '1.2.3',
- scripts: { preversion: 'echo doing the version dance' }
- })
+ scripts: { preversion: 'echo doing the version dance' },
+ }),
})
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [
- ["Lifecycle scripts included in x:"],
- [" preversion\n echo doing the version dance"],
+ ['Lifecycle scripts included in x:'],
+ [' preversion\n echo doing the version dance'],
])
output.length = 0
})
@@ -440,18 +453,17 @@ t.test('list scripts, only non-commands', async t => {
'package.json': JSON.stringify({
name: 'x',
version: '1.2.3',
- scripts: { glorp: 'echo doing the glerp glop' }
- })
+ scripts: { glorp: 'echo doing the glerp glop' },
+ }),
})
await runScript([], er => {
- if (er) {
+ if (er)
throw er
- }
})
t.strictSame(output, [
- ["Scripts available in x via `npm run-script`:"],
- [" glorp\n echo doing the glerp glop"]
+ ['Scripts available in x via `npm run-script`:'],
+ [' glorp\n echo doing the glerp glop'],
])
output.length = 0
})
diff --git a/deps/npm/test/lib/test.js b/deps/npm/test/lib/test.js
index 8b6d066265..9a44e4760a 100644
--- a/deps/npm/test/lib/test.js
+++ b/deps/npm/test/lib/test.js
@@ -6,11 +6,11 @@ const npmock = {
run: (args, cb) => {
RUN_ARGS = args
cb()
- }
- }
+ },
+ },
}
const test = requireInject('../../lib/test.js', {
- '../../lib/npm.js': npmock
+ '../../lib/npm.js': npmock,
})
t.test('run a test', t => {
@@ -22,7 +22,7 @@ t.test('run a test', t => {
})
const lcErr = Object.assign(new Error('should not see this'), {
- code: 'ELIFECYCLE'
+ code: 'ELIFECYCLE',
})
const otherErr = new Error('should see this')
diff --git a/deps/npm/test/lib/token.js b/deps/npm/test/lib/token.js
index dc5a8ad05e..f988810722 100644
--- a/deps/npm/test/lib/token.js
+++ b/deps/npm/test/lib/token.js
@@ -6,7 +6,7 @@ const mocks = {
profile: {},
output: () => {},
log: {},
- readUserInfo: {}
+ readUserInfo: {},
}
const tokenMock = requireInject('../../lib/token.js', {
@@ -17,28 +17,26 @@ const tokenMock = requireInject('../../lib/token.js', {
},
'../../lib/utils/read-user-info.js': mocks.readUserInfo,
'npm-profile': mocks.profile,
- 'npmlog': mocks.log
+ npmlog: mocks.log,
})
const tokenWithMocks = (mockRequests) => {
for (const mod in mockRequests) {
- if (typeof mockRequests[mod] === 'function') {
+ if (typeof mockRequests[mod] === 'function')
mocks[mod] = mockRequests[mod]
- } else {
- for (const key in mockRequests[mod]) {
+ else {
+ for (const key in mockRequests[mod])
mocks[mod][key] = mockRequests[mod][key]
- }
}
}
const reset = () => {
for (const mod in mockRequests) {
- if (typeof mockRequests[mod] === 'function') {
+ if (typeof mockRequests[mod] === 'function')
mocks[mod] = () => {}
- } else {
- for (const key in mockRequests[mod]) {
+ else {
+ for (const key in mockRequests[mod])
delete mocks[mod][key]
- }
}
}
}
@@ -51,9 +49,8 @@ test('completion', (t) => {
const testComp = (argv, expect) => {
tokenMock.completion({ conf: { argv: { remain: argv } } }, (err, res) => {
- if (err) {
+ if (err)
throw err
- }
t.strictSame(res, expect, argv.join(' '))
})
@@ -62,14 +59,14 @@ test('completion', (t) => {
testComp(['npm', 'token'], [
'list',
'revoke',
- 'create'
+ 'create',
])
testComp(['npm', 'token', 'list'], [])
testComp(['npm', 'token', 'revoke'], [])
testComp(['npm', 'token', 'create'], [])
- tokenMock.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar' ] } } }, (err) => {
+ tokenMock.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }, (err) => {
t.match(err, { message: 'foobar not recognized' })
})
})
@@ -77,14 +74,14 @@ test('completion', (t) => {
test('token foobar', (t) => {
t.plan(2)
- const [token, reset] = tokenWithMocks({
+ const [, reset] = tokenWithMocks({
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'shows a gauge')
- }
- }
- }
+ },
+ },
+ },
})
t.tearDown(reset)
@@ -104,14 +101,14 @@ test('token list', (t) => {
cidr_whitelist: null,
readonly: false,
created: now,
- updated: now
+ updated: now,
}, {
key: 'abcd1256',
token: 'hgfe8765',
cidr_whitelist: ['192.168.1.1/32'],
readonly: true,
created: now,
- updated: now
+ updated: now,
}]
const [token, reset] = tokenWithMocks({
@@ -121,25 +118,25 @@ test('token list', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
profile: {
listTokens: (conf) => {
t.same(conf.auth, { token: 'thisisnotarealtoken', otp: '123456' })
return tokens
- }
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token')
- }
+ },
},
info: (type, msg) => {
t.equal(type, 'token')
t.equal(msg, 'getting list')
- }
+ },
},
output: (spec) => {
const lines = spec.split(/\r?\n/)
@@ -152,7 +149,7 @@ test('token list', (t) => {
t.match(lines[5], ` ${now.slice(0, 10)} `, 'includes the trimmed creation timestamp')
t.match(lines[5], ' yes ', 'includes the "no" string for readonly state')
t.match(lines[5], ` ${tokens[1].cidr_whitelist.join(',')} `, 'includes the cidr whitelist')
- }
+ },
})
t.tearDown(reset)
@@ -172,7 +169,7 @@ test('token list json output', (t) => {
cidr_whitelist: null,
readonly: false,
created: now,
- updated: now
+ updated: now,
}]
const [token, reset] = tokenWithMocks({
@@ -182,31 +179,31 @@ test('token list json output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { username: 'foo', password: 'bar' }
- }
- }
+ },
+ },
},
profile: {
listTokens: (conf) => {
t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth')
return tokens
- }
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token')
- }
+ },
},
info: (type, msg) => {
t.equal(type, 'token')
t.equal(msg, 'getting list')
- }
+ },
},
output: (spec) => {
t.type(spec, 'string', 'is called with a string')
const parsed = JSON.parse(spec)
t.match(parsed, tokens, 'prints the json parsed tokens')
- }
+ },
})
t.tearDown(reset)
@@ -226,14 +223,14 @@ test('token list parseable output', (t) => {
cidr_whitelist: null,
readonly: false,
created: now,
- updated: now
+ updated: now,
}, {
key: 'efgh5678ijkl9101',
token: 'hgfe8765',
cidr_whitelist: ['192.168.1.1/32'],
readonly: true,
created: now,
- updated: now
+ updated: now,
}]
let callCount = 0
@@ -245,37 +242,36 @@ test('token list parseable output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { auth: Buffer.from('foo:bar').toString('base64') }
- }
- }
+ },
+ },
},
profile: {
listTokens: (conf) => {
t.same(conf.auth, { basic: { username: 'foo', password: 'bar' } }, 'passes the correct auth')
return tokens
- }
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token')
- }
+ },
},
info: (type, msg) => {
t.equal(type, 'token')
t.equal(msg, 'getting list')
- }
+ },
},
output: (spec) => {
++callCount
t.type(spec, 'string', 'is called with a string')
- if (callCount === 1) {
+ if (callCount === 1)
t.equal(spec, ['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'), 'prints header')
- } else if (callCount === 2) {
+ else if (callCount === 2)
t.equal(spec, [tokens[0].key, tokens[0].token, tokens[0].created, tokens[0].readonly, ''].join('\t'), 'prints token info')
- } else {
+ else
t.equal(spec, [tokens[1].key, tokens[1].token, tokens[1].created, tokens[1].readonly, tokens[1].cidr_whitelist.join(',')].join('\t'), 'prints token info')
- }
- }
+ },
})
t.tearDown(reset)
@@ -295,14 +291,14 @@ test('token revoke', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return {}
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -311,24 +307,24 @@ test('token revoke', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: (conf) => {
t.same(conf.auth, {}, 'passes the correct empty auth')
return Promise.resolve([
- { key: 'abcd1234' }
+ { key: 'abcd1234' },
])
},
removeToken: (key) => {
t.equal(key, 'abcd1234', 'deletes the correct token')
- }
+ },
},
output: (spec) => {
t.equal(spec, 'Removed 1 token')
- }
+ },
})
t.tearDown(reset)
@@ -348,14 +344,14 @@ test('token revoke multiple tokens', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -364,23 +360,23 @@ test('token revoke multiple tokens', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
{ key: 'abcd1234' },
- { key: 'efgh5678' }
+ { key: 'efgh5678' },
]),
removeToken: (key) => {
// this will run twice
t.ok(['abcd1234', 'efgh5678'].includes(key), 'deletes the correct token')
- }
+ },
},
output: (spec) => {
t.equal(spec, 'Removed 2 tokens')
- }
+ },
})
t.tearDown(reset)
@@ -400,14 +396,14 @@ test('token revoke json output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -416,23 +412,23 @@ test('token revoke json output', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
- { key: 'abcd1234' }
+ { key: 'abcd1234' },
]),
removeToken: (key) => {
t.equal(key, 'abcd1234', 'deletes the correct token')
- }
+ },
},
output: (spec) => {
t.type(spec, 'string', 'is given a string')
const parsed = JSON.parse(spec)
t.same(parsed, ['abcd1234'], 'logs the token as json')
- }
+ },
})
t.tearDown(reset)
@@ -452,14 +448,14 @@ test('token revoke parseable output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -468,21 +464,21 @@ test('token revoke parseable output', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
- { key: 'abcd1234' }
+ { key: 'abcd1234' },
]),
removeToken: (key) => {
t.equal(key, 'abcd1234', 'deletes the correct token')
- }
+ },
},
output: (spec) => {
t.equal(spec, 'abcd1234', 'logs the token as a string')
- }
+ },
})
t.tearDown(reset)
@@ -502,14 +498,14 @@ test('token revoke by token', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -518,21 +514,21 @@ test('token revoke by token', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
- { key: 'abcd1234', token: 'efgh5678' }
+ { key: 'abcd1234', token: 'efgh5678' },
]),
removeToken: (key) => {
t.equal(key, 'efgh5678', 'passes through user input')
- }
+ },
},
output: (spec) => {
t.equal(spec, 'Removed 1 token')
- }
+ },
})
t.tearDown(reset)
@@ -550,9 +546,9 @@ test('token revoke requires an id', (t) => {
gauge: {
show: (name) => {
t.equal(name, 'token')
- }
- }
- }
+ },
+ },
+ },
})
t.tearDown(reset)
@@ -572,14 +568,14 @@ test('token revoke ambiguous id errors', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -588,16 +584,16 @@ test('token revoke ambiguous id errors', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
{ key: 'abcd1234' },
- { key: 'abcd5678' }
- ])
- }
+ { key: 'abcd5678' },
+ ]),
+ },
})
t.tearDown(reset)
@@ -617,14 +613,14 @@ test('token revoke unknown id errors', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
newItem: (action, len) => {
t.equal(action, 'removing tokens')
@@ -633,15 +629,15 @@ test('token revoke unknown id errors', (t) => {
info: (name, progress) => {
t.equal(name, 'token')
t.equal(progress, 'getting existing list')
- }
+ },
}
- }
+ },
},
profile: {
listTokens: () => Promise.resolve([
- { key: 'abcd1234' }
- ])
- }
+ { key: 'abcd1234' },
+ ]),
+ },
})
t.tearDown(reset)
@@ -664,22 +660,22 @@ test('token create', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
info: (name, message) => {
t.equal(name, 'token')
t.equal(message, 'creating')
- }
+ },
},
readUserInfo: {
- password: () => Promise.resolve(password)
+ password: () => Promise.resolve(password),
},
profile: {
createToken: (pw, readonly, cidr) => {
@@ -692,9 +688,9 @@ test('token create', (t) => {
created: now,
updated: now,
readonly: false,
- cidr_whitelist: []
+ cidr_whitelist: [],
}
- }
+ },
},
output: (spec) => {
const lines = spec.split(/\r?\n/)
@@ -705,7 +701,7 @@ test('token create', (t) => {
t.match(lines[5], 'readonly')
t.match(lines[5], 'false', 'prints the readonly flag')
t.match(lines[7], 'cidr_whitelist')
- }
+ },
})
t.tearDown(reset)
@@ -728,22 +724,22 @@ test('token create json output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
info: (name, message) => {
t.equal(name, 'token')
t.equal(message, 'creating')
- }
+ },
},
readUserInfo: {
- password: () => Promise.resolve(password)
+ password: () => Promise.resolve(password),
},
profile: {
createToken: (pw, readonly, cidr) => {
@@ -756,15 +752,15 @@ test('token create json output', (t) => {
created: now,
updated: now,
readonly: false,
- cidr_whitelist: []
+ cidr_whitelist: [],
}
- }
+ },
},
output: (spec) => {
t.type(spec, 'string', 'outputs a string')
const parsed = JSON.parse(spec)
t.same(parsed, { token: 'efgh5678', created: now, readonly: false, cidr_whitelist: [] }, 'outputs the correct object')
- }
+ },
})
t.tearDown(reset)
@@ -788,22 +784,22 @@ test('token create parseable output', (t) => {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
+ },
},
info: (name, message) => {
t.equal(name, 'token')
t.equal(message, 'creating')
- }
+ },
},
readUserInfo: {
- password: () => Promise.resolve(password)
+ password: () => Promise.resolve(password),
},
profile: {
createToken: (pw, readonly, cidr) => {
@@ -816,22 +812,21 @@ test('token create parseable output', (t) => {
created: now,
updated: now,
readonly: false,
- cidr_whitelist: []
+ cidr_whitelist: [],
}
- }
+ },
},
output: (spec) => {
++callCount
- if (callCount === 1) {
+ if (callCount === 1)
t.match(spec, 'token\tefgh5678', 'prints the token')
- } else if (callCount === 2) {
+ else if (callCount === 2)
t.match(spec, `created\t${now}`, 'prints the created timestamp')
- } else if (callCount === 3) {
+ else if (callCount === 3)
t.match(spec, 'readonly\tfalse', 'prints the readonly flag')
- } else {
+ else
t.match(spec, 'cidr_whitelist\t', 'prints the cidr whitelist')
- }
- }
+ },
})
t.tearDown(reset)
@@ -844,28 +839,28 @@ test('token create parseable output', (t) => {
test('token create ipv6 cidr', (t) => {
t.plan(4)
- const now = new Date().toISOString()
const password = 'thisisnotreallyapassword'
const [token, reset] = tokenWithMocks({
npm: {
- flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' }, config: {
+ flatOptions: { registry: 'https://registry.npmjs.org', cidr: '::1/128' },
+ config: {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
- }
+ },
+ },
},
readUserInfo: {
- password: () => Promise.resolve(password)
- }
+ password: () => Promise.resolve(password),
+ },
})
t.tearDown(reset)
@@ -879,28 +874,28 @@ test('token create ipv6 cidr', (t) => {
test('token create invalid cidr', (t) => {
t.plan(4)
- const now = new Date().toISOString()
const password = 'thisisnotreallyapassword'
const [token, reset] = tokenWithMocks({
npm: {
- flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' }, config: {
+ flatOptions: { registry: 'https://registry.npmjs.org', cidr: 'apple/cider' },
+ config: {
getCredentialsByURI: (uri) => {
t.equal(uri, 'https://registry.npmjs.org', 'requests correct registry')
return { token: 'thisisnotarealtoken' }
- }
- }
+ },
+ },
},
log: {
gauge: {
show: (name) => {
t.equal(name, 'token', 'starts a gauge')
- }
- }
+ },
+ },
},
readUserInfo: {
- password: () => Promise.resolve(password)
- }
+ password: () => Promise.resolve(password),
+ },
})
t.tearDown(reset)
diff --git a/deps/npm/test/lib/utils/audit-error.js b/deps/npm/test/lib/utils/audit-error.js
index f183a16e8d..cc5f4c006e 100644
--- a/deps/npm/test/lib/utils/audit-error.js
+++ b/deps/npm/test/lib/utils/audit-error.js
@@ -6,14 +6,14 @@ const npm = {
command: null,
flatOptions: {},
log: {
- warn: (...msg) => LOGS.push(msg)
- }
+ warn: (...msg) => LOGS.push(msg),
+ },
}
const OUTPUT = []
const output = (...msg) => OUTPUT.push(msg)
const auditError = requireInject('../../../lib/utils/audit-error.js', {
'../../../lib/npm.js': npm,
- '../../../lib/utils/output.js': output
+ '../../../lib/utils/output.js': output,
})
t.afterEach(cb => {
@@ -40,10 +40,10 @@ t.test('error, not audit command', t => {
method: 'POST',
uri: 'https://example.com/not/a/registry',
headers: {
- head: ['ers']
+ head: ['ers'],
},
- statusCode: '420'
- }
+ statusCode: '420',
+ },
}), true, 'had error')
t.strictSame(OUTPUT, [], 'no output')
t.strictSame(LOGS, [], 'no warnings')
@@ -60,14 +60,14 @@ t.test('error, audit command, not json', t => {
method: 'POST',
uri: 'https://example.com/not/a/registry',
headers: {
- head: ['ers']
+ head: ['ers'],
},
- statusCode: '420'
- }
+ statusCode: '420',
+ },
}))
- t.strictSame(OUTPUT, [ [ 'body' ] ], 'some output')
- t.strictSame(LOGS, [ [ 'audit', 'message' ] ], 'some warnings')
+ t.strictSame(OUTPUT, [['body']], 'some output')
+ t.strictSame(LOGS, [['audit', 'message']], 'some warnings')
t.end()
})
@@ -81,10 +81,10 @@ t.test('error, audit command, json', t => {
method: 'POST',
uri: 'https://example.com/not/a/registry',
headers: {
- head: ['ers']
+ head: ['ers'],
},
- statusCode: '420'
- }
+ statusCode: '420',
+ },
}))
t.strictSame(OUTPUT, [
@@ -102,9 +102,9 @@ t.test('error, audit command, json', t => {
' "body": {\n' +
' "response": "body"\n' +
' }\n' +
- '}'
- ]
+ '}',
+ ],
], 'some output')
- t.strictSame(LOGS, [ [ 'audit', 'message' ] ], 'some warnings')
+ t.strictSame(LOGS, [['audit', 'message']], 'some warnings')
t.end()
})
diff --git a/deps/npm/test/lib/utils/cleanup-log-files.js b/deps/npm/test/lib/utils/cleanup-log-files.js
index ee2c11e62a..7af0633fe7 100644
--- a/deps/npm/test/lib/utils/cleanup-log-files.js
+++ b/deps/npm/test/lib/utils/cleanup-log-files.js
@@ -6,7 +6,7 @@ const rimraf = require('rimraf')
const mocks = { glob, rimraf }
const cleanup = requireInject('../../../lib/utils/cleanup-log-files.js', {
glob: (...args) => mocks.glob(...args),
- rimraf: (...args) => mocks.rimraf(...args)
+ rimraf: (...args) => mocks.rimraf(...args),
})
const { basename } = require('path')
@@ -19,15 +19,15 @@ t.test('clean up those files', t => {
'2-debug.log': 'hello',
'3-debug.log': 'hello',
'4-debug.log': 'hello',
- '5-debug.log': 'hello'
- }
+ '5-debug.log': 'hello',
+ },
})
const warn = (...warning) => t.fail('failed cleanup', { warning })
return cleanup(cache, 3, warn).then(() => {
t.strictSame(fs.readdirSync(cache + '/_logs').sort(), [
'3-debug.log',
'4-debug.log',
- '5-debug.log'
+ '5-debug.log',
])
})
})
@@ -36,14 +36,14 @@ t.test('nothing to clean up', t => {
const cache = t.testdir({
_logs: {
'4-debug.log': 'hello',
- '5-debug.log': 'hello'
- }
+ '5-debug.log': 'hello',
+ },
})
const warn = (...warning) => t.fail('failed cleanup', { warning })
return cleanup(cache, 3, warn).then(() => {
t.strictSame(fs.readdirSync(cache + '/_logs').sort(), [
'4-debug.log',
- '5-debug.log'
+ '5-debug.log',
])
})
})
@@ -66,15 +66,15 @@ t.test('rimraf fail', t => {
'2-debug.log': 'hello',
'3-debug.log': 'hello',
'4-debug.log': 'hello',
- '5-debug.log': 'hello'
- }
+ '5-debug.log': 'hello',
+ },
})
const warnings = []
const warn = (...warning) => warnings.push(basename(warning[2]))
return cleanup(cache, 3, warn).then(() => {
t.strictSame(warnings.sort((a, b) => a.localeCompare(b)), [
'1-debug.log',
- '2-debug.log'
+ '2-debug.log',
])
})
})
diff --git a/deps/npm/test/lib/utils/completion/installed-deep.js b/deps/npm/test/lib/utils/completion/installed-deep.js
index 74c15e8f77..a2a3756104 100644
--- a/deps/npm/test/lib/utils/completion/installed-deep.js
+++ b/deps/npm/test/lib/utils/completion/installed-deep.js
@@ -7,19 +7,25 @@ let globalDir = 'MISSING_GLOBAL_DIR'
const _flatOptions = {
depth: Infinity,
global: false,
- get prefix () { return prefix }
+ get prefix () {
+ return prefix
+ },
}
const installedDeep = requireInject('../../../../lib/utils/completion/installed-deep.js', {
'../../../../lib/npm.js': {
flatOptions: _flatOptions,
- get prefix () { return _flatOptions.prefix },
- get globalDir () { return globalDir },
+ get prefix () {
+ return _flatOptions.prefix
+ },
+ get globalDir () {
+ return globalDir
+ },
config: {
get (key) {
return _flatOptions[key]
- }
- }
- }
+ },
+ },
+ },
})
const fixture = {
@@ -29,14 +35,14 @@ const fixture = {
dependencies: {
a: '^1.0.0',
b: '^1.0.0',
- c: '^1.0.0'
+ c: '^1.0.0',
},
devDependencies: {
- d: '^1.0.0'
+ d: '^1.0.0',
},
peerDependencies: {
- e: '^1.0.0'
- }
+ e: '^1.0.0',
+ },
}),
node_modules: {
a: {
@@ -44,33 +50,33 @@ const fixture = {
name: 'a',
version: '1.0.0',
dependencies: {
- f: '^1.0.0'
- }
- })
+ f: '^1.0.0',
+ },
+ }),
},
b: {
'package.json': JSON.stringify({
name: 'b',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
c: {
'package.json': JSON.stringify({
name: 'c',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
d: {
'package.json': JSON.stringify({
name: 'd',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
e: {
'package.json': JSON.stringify({
name: 'e',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
f: {
'package.json': JSON.stringify({
@@ -78,8 +84,8 @@ const fixture = {
version: '1.0.0',
dependencies: {
g: '^1.0.0',
- e: '^2.0.0'
- }
+ e: '^2.0.0',
+ },
}),
node_modules: {
e: {
@@ -87,27 +93,27 @@ const fixture = {
name: 'e',
version: '2.0.0',
dependencies: {
- bb: '^1.0.0'
- }
+ bb: '^1.0.0',
+ },
}),
node_modules: {
bb: {
'package.json': JSON.stringify({
name: 'bb',
- version: '1.0.0'
- })
- }
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
+ },
},
g: {
'package.json': JSON.stringify({
name: 'g',
- version: '1.0.0'
- })
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
}
const globalFixture = {
@@ -115,33 +121,33 @@ const globalFixture = {
foo: {
'package.json': JSON.stringify({
name: 'foo',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
},
bar: {
'package.json': JSON.stringify({
name: 'bar',
version: '1.0.0',
dependencies: {
- 'a-bar': '^1.0.0'
- }
+ 'a-bar': '^1.0.0',
+ },
}),
node_modules: {
'a-bar': {
'package.json': JSON.stringify({
name: 'a-bar',
- version: '1.0.0'
- })
- }
- }
- }
- }
+ version: '1.0.0',
+ }),
+ },
+ },
+ },
+ },
}
test('get list of package names', (t) => {
const fix = t.testdir({
local: fixture,
- global: globalFixture
+ global: globalFixture,
})
prefix = resolve(fix, 'local')
@@ -152,12 +158,12 @@ test('get list of package names', (t) => {
t.deepEqual(
res,
[
- [ 'bar', '-g' ],
- [ 'foo', '-g' ],
- [ 'a-bar', '-g' ],
+ ['bar', '-g'],
+ ['foo', '-g'],
+ ['a-bar', '-g'],
'a', 'b', 'c',
'd', 'e', 'f',
- 'g', 'bb'
+ 'g', 'bb',
],
'should return list of package names and global flag'
)
@@ -168,7 +174,7 @@ test('get list of package names', (t) => {
test('get list of package names as global', (t) => {
const fix = t.testdir({
local: fixture,
- global: globalFixture
+ global: globalFixture,
})
prefix = resolve(fix, 'local')
@@ -183,7 +189,7 @@ test('get list of package names as global', (t) => {
[
'bar',
'foo',
- 'a-bar'
+ 'a-bar',
],
'should return list of global packages with no extra flags'
)
@@ -195,7 +201,7 @@ test('get list of package names as global', (t) => {
test('limit depth', (t) => {
const fix = t.testdir({
local: fixture,
- global: globalFixture
+ global: globalFixture,
})
prefix = resolve(fix, 'local')
@@ -208,12 +214,12 @@ test('limit depth', (t) => {
t.deepEqual(
res,
[
- [ 'bar', '-g' ],
- [ 'foo', '-g' ],
+ ['bar', '-g'],
+ ['foo', '-g'],
'a', 'b',
'c', 'd',
'e', 'f',
- 'g'
+ 'g',
],
'should print only packages up to the specified depth'
)
@@ -225,7 +231,7 @@ test('limit depth', (t) => {
test('limit depth as global', (t) => {
const fix = t.testdir({
local: fixture,
- global: globalFixture
+ global: globalFixture,
})
prefix = resolve(fix, 'local')
@@ -240,7 +246,7 @@ test('limit depth as global', (t) => {
res,
[
'bar',
- 'foo'
+ 'foo',
],
'should reorder so that packages above that level depth goes last'
)
diff --git a/deps/npm/test/lib/utils/completion/installed-shallow.js b/deps/npm/test/lib/utils/completion/installed-shallow.js
index eb628a8ce8..1d6369bc78 100644
--- a/deps/npm/test/lib/utils/completion/installed-shallow.js
+++ b/deps/npm/test/lib/utils/completion/installed-shallow.js
@@ -6,7 +6,7 @@ const { resolve } = require('path')
const p = '../../../../lib/utils/completion/installed-shallow.js'
const installed = requireInject(p, {
- '../../../../lib/npm.js': npm
+ '../../../../lib/npm.js': npm,
})
t.test('global not set, include globals with -g', t => {
@@ -15,32 +15,32 @@ t.test('global not set, include globals with -g', t => {
node_modules: {
x: {},
'@scope': {
- y: {}
- }
- }
+ y: {},
+ },
+ },
},
local: {
node_modules: {
a: {},
'@scope': {
- b: {}
- }
- }
- }
+ b: {},
+ },
+ },
+ },
})
npm.globalDir = resolve(dir, 'global/node_modules')
npm.localDir = resolve(dir, 'local/node_modules')
flatOptions.global = false
const opt = { conf: { argv: { remain: [] } } }
installed(opt, (er, res) => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame(res.sort(), [
'@scope/y -g',
'x -g',
'a',
- '@scope/b'
+ '@scope/b',
].sort())
t.end()
})
@@ -52,18 +52,18 @@ t.test('global set, include globals and not locals', t => {
node_modules: {
x: {},
'@scope': {
- y: {}
- }
- }
+ y: {},
+ },
+ },
},
local: {
node_modules: {
a: {},
'@scope': {
- b: {}
- }
- }
- }
+ b: {},
+ },
+ },
+ },
})
npm.globalDir = resolve(dir, 'global/node_modules')
npm.localDir = resolve(dir, 'local/node_modules')
@@ -72,7 +72,7 @@ t.test('global set, include globals and not locals', t => {
installed(opt, (er, res) => {
t.strictSame(res.sort(), [
'@scope/y',
- 'x'
+ 'x',
].sort())
t.end()
})
@@ -84,27 +84,27 @@ t.test('more than 3 items in argv, skip it', t => {
node_modules: {
x: {},
'@scope': {
- y: {}
- }
- }
+ y: {},
+ },
+ },
},
local: {
node_modules: {
a: {},
'@scope': {
- b: {}
- }
- }
- }
+ b: {},
+ },
+ },
+ },
})
npm.globalDir = resolve(dir, 'global/node_modules')
npm.localDir = resolve(dir, 'local/node_modules')
flatOptions.global = false
const opt = { conf: { argv: { remain: [1, 2, 3, 4, 5, 6] } } }
installed(opt, (er, res) => {
- if (er) {
+ if (er)
throw er
- }
+
t.strictSame(res, null)
t.end()
})
diff --git a/deps/npm/test/lib/utils/completion/none.js b/deps/npm/test/lib/utils/completion/none.js
index 27f713b81e..70488be07e 100644
--- a/deps/npm/test/lib/utils/completion/none.js
+++ b/deps/npm/test/lib/utils/completion/none.js
@@ -1,6 +1,6 @@
const t = require('tap')
const none = require('../../../../lib/utils/completion/none.js')
-none({any:'thing'}, (er, res) => {
+none({any: 'thing'}, (er, res) => {
t.equal(er, null)
t.strictSame(res, [])
})
diff --git a/deps/npm/test/lib/utils/config.js b/deps/npm/test/lib/utils/config.js
index e8133eecb2..38fbe6753e 100644
--- a/deps/npm/test/lib/utils/config.js
+++ b/deps/npm/test/lib/utils/config.js
@@ -4,25 +4,25 @@ Object.defineProperty(process, 'umask', {
value: () => 0o26,
writable: true,
configurable: true,
- enumerable: true
+ enumerable: true,
})
// have to fake the node version, or else it'll only pass on this one
Object.defineProperty(process, 'version', {
- value: 'v14.8.0'
+ value: 'v14.8.0',
})
t.formatSnapshot = obj => {
- if (typeof obj !== 'object' || !obj || !obj.types) {
+ if (typeof obj !== 'object' || !obj || !obj.types)
return obj
- }
+
return {
...obj,
defaults: {
...obj.defaults,
- cache: '{CACHE DIR} ' + path.basename(obj.defaults.cache)
+ cache: '{CACHE DIR} ' + path.basename(obj.defaults.cache),
},
- types: formatTypes(obj.types)
+ types: formatTypes(obj.types),
}
}
@@ -38,19 +38,18 @@ const formatTypes = (types) => Object.entries(types).map(([key, value]) => {
}, {})
const formatTypeValue = (value) => {
- if (Array.isArray(value)) {
+ if (Array.isArray(value))
return value.map(formatTypeValue)
- } else if (value === url) {
+ else if (value === url)
return '{URL MODULE}'
- } else if (value === path) {
+ else if (value === path)
return '{PATH MODULE}'
- } else if (value === semver) {
+ else if (value === semver)
return '{SEMVER MODULE}'
- } else if (typeof value === 'function') {
+ else if (typeof value === 'function')
return `{${value.name} TYPE}`
- } else {
+ else
return value
- }
}
process.env.ComSpec = 'cmd.exe'
@@ -65,8 +64,8 @@ const networkInterfacesThrow = () => {
throw new Error('no network interfaces for some reason')
}
const networkInterfaces = () => ({
- 'eth420': [{ address: '127.0.0.1' }],
- 'eth69': [{ address: 'no place like home' }]
+ eth420: [{ address: '127.0.0.1' }],
+ eth69: [{ address: 'no place like home' }],
})
const tmpdir = () => '/tmp'
const os = { networkInterfaces, tmpdir }
@@ -77,7 +76,7 @@ t.test('working network interfaces, not windows', t => {
os,
'@npmcli/ci-detect': () => false,
'../../../lib/utils/is-windows.js': false,
- '../../../package.json': pkg
+ '../../../package.json': pkg,
})
t.matchSnapshot(config)
t.end()
@@ -88,7 +87,7 @@ t.test('no working network interfaces, on windows', t => {
os: { tmpdir, networkInterfaces: networkInterfacesThrow },
'@npmcli/ci-detect': () => false,
'../../../lib/utils/is-windows.js': true,
- '../../../package.json': pkg
+ '../../../package.json': pkg,
})
t.matchSnapshot(config)
t.end()
@@ -99,21 +98,21 @@ t.test('no process.umask() method', t => {
value: null,
writable: true,
configurable: true,
- enumerable: true
+ enumerable: true,
})
t.teardown(() => {
Object.defineProperty(process, 'umask', {
value: () => 0o26,
writable: true,
configurable: true,
- enumerable: true
+ enumerable: true,
})
})
const config = requireInject('../../../lib/utils/config.js', {
os: { tmpdir, networkInterfaces: networkInterfacesThrow },
'@npmcli/ci-detect': () => false,
'../../../lib/utils/is-windows.js': true,
- '../../../package.json': pkg
+ '../../../package.json': pkg,
})
t.equal(config.defaults.umask, 0o22)
t.matchSnapshot(config)
@@ -125,7 +124,7 @@ t.test('no comspec on windows', t => {
const config = requireInject('../../../lib/utils/config.js', {
os: { tmpdir, networkInterfaces: networkInterfacesThrow },
'@npmcli/ci-detect': () => false,
- '../../../lib/utils/is-windows.js': true
+ '../../../lib/utils/is-windows.js': true,
})
t.equal(config.defaults.shell, 'cmd')
t.end()
@@ -136,7 +135,7 @@ t.test('no shell on posix', t => {
const config = requireInject('../../../lib/utils/config.js', {
os,
'@npmcli/ci-detect': () => false,
- '../../../lib/utils/is-windows.js': false
+ '../../../lib/utils/is-windows.js': false,
})
t.equal(config.defaults.shell, 'sh')
t.end()
@@ -147,7 +146,7 @@ t.test('no EDITOR env, use VISUAL', t => {
const config = requireInject('../../../lib/utils/config.js', {
os,
'@npmcli/ci-detect': () => false,
- '../../../lib/utils/is-windows.js': false
+ '../../../lib/utils/is-windows.js': false,
})
t.equal(config.defaults.editor, 'mate')
t.end()
@@ -158,7 +157,7 @@ t.test('no VISUAL, use system default, not windows', t => {
const config = requireInject('../../../lib/utils/config.js', {
os,
'@npmcli/ci-detect': () => false,
- '../../../lib/utils/is-windows.js': false
+ '../../../lib/utils/is-windows.js': false,
})
t.equal(config.defaults.editor, 'vi')
t.end()
@@ -169,7 +168,7 @@ t.test('no VISUAL, use system default, not windows', t => {
const config = requireInject('../../../lib/utils/config.js', {
os,
'@npmcli/ci-detect': () => false,
- '../../../lib/utils/is-windows.js': true
+ '../../../lib/utils/is-windows.js': true,
})
t.equal(config.defaults.editor, 'notepad.exe')
t.end()
diff --git a/deps/npm/test/lib/utils/error-handler.js b/deps/npm/test/lib/utils/error-handler.js
index 9e32dfc5ff..2dc116a4d3 100644
--- a/deps/npm/test/lib/utils/error-handler.js
+++ b/deps/npm/test/lib/utils/error-handler.js
@@ -26,23 +26,23 @@ t.cleanSnapshot = (str) => redactCwd(str)
// internal modules mocks
const cacheFile = {
append: () => null,
- write: () => null
+ write: () => null,
}
const config = {
values: {
cache: 'cachefolder',
- timing: true
+ timing: true,
},
loaded: true,
updateNotification: null,
get (key) {
return this.values[key]
- }
+ },
}
const npm = {
version: '1.0.0',
- config
+ config,
}
const npmlog = {
@@ -52,26 +52,34 @@ const npmlog = {
id: this.record.length,
level,
message: args.reduce((res, i) => `${res} ${i.message ? i.message : i}`, ''),
- prefix: level !== 'verbose' ? 'foo' : ''
+ prefix: level !== 'verbose' ? 'foo' : '',
})
},
- error (...args) { this.log('error', ...args) },
- info (...args) { this.log('info', ...args) },
+ error (...args) {
+ this.log('error', ...args)
+ },
+ info (...args) {
+ this.log('info', ...args)
+ },
level: 'silly',
levels: {
silly: 0,
verbose: 1,
info: 2,
error: 3,
- silent: 4
+ silent: 4,
+ },
+ notice (...args) {
+ this.log('notice', ...args)
},
- notice (...args) { this.log('notice', ...args) },
record: [],
- verbose (...args) { this.log('verbose', ...args) }
+ verbose (...args) {
+ this.log('verbose', ...args)
+ },
}
const metrics = {
- stop: () => null
+ stop: () => null,
}
// overrides OS type/release for cross platform snapshots
@@ -96,8 +104,10 @@ process = Object.assign(
exit () {},
exitCode: 0,
version: 'v1.0.0',
- stdout: { write (_, cb) { cb() } },
- stderr: { write () {} }
+ stdout: { write (_, cb) {
+ cb()
+ } },
+ stderr: { write () {} },
}
)
// needs to put process back in its place
@@ -112,10 +122,10 @@ const mocks = {
'../../../lib/utils/error-message.js': (err) => ({
...err,
summary: [['ERR', err.message]],
- detail: [['ERR', err.message]]
+ detail: [['ERR', err.message]],
}),
'../../../lib/utils/metrics.js': metrics,
- '../../../lib/utils/cache-file.js': cacheFile
+ '../../../lib/utils/cache-file.js': cacheFile,
}
requireInject.installGlobally('../../../lib/utils/error-handler.js', mocks)
@@ -226,8 +236,8 @@ t.test('console.log output using --json', (t) => {
error: {
code: 'EBADTHING', // should default error code to E[A-Z]+
summary: 'Error: EBADTHING Something happened',
- detail: 'Error: EBADTHING Something happened'
- }
+ detail: 'Error: EBADTHING Something happened',
+ },
},
'should output expected json output'
)
@@ -246,7 +256,7 @@ t.test('throw a non-error obj', (t) => {
const weirdError = {
code: 'ESOMETHING',
- message: 'foo bar'
+ message: 'foo bar',
}
const _logError = npmlog.error
@@ -379,7 +389,7 @@ t.test('uses code from errno', (t) => {
errorHandler(Object.assign(
new Error('Error with errno'),
{
- errno: 127
+ errno: 127,
}
))
@@ -408,7 +418,7 @@ t.test('uses exitCode as code if using a number', (t) => {
errorHandler(Object.assign(
new Error('Error with code type number'),
{
- code: 404
+ code: 404,
}
))
@@ -464,7 +474,7 @@ t.test('defaults to log error msg if stack is missing', (t) => {
new Error('Error with no stack'),
{
code: 'ENOSTACK',
- errno: 127
+ errno: 127,
}
)
delete noStackErr.stack
diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js
index 2647a8e199..86db7c94ba 100644
--- a/deps/npm/test/lib/utils/error-message.js
+++ b/deps/npm/test/lib/utils/error-message.js
@@ -7,13 +7,13 @@ process.getgid = () => 420
Object.defineProperty(process, 'arch', {
value: 'x64',
- configurable: true
+ configurable: true,
})
const beWindows = () => {
Object.defineProperty(process, 'platform', {
value: 'win32',
- configurable: true
+ configurable: true,
})
delete require.cache[require.resolve('../../../lib/utils/is-windows.js')]
}
@@ -21,7 +21,7 @@ const beWindows = () => {
const bePosix = () => {
Object.defineProperty(process, 'platform', {
value: 'posix',
- configurable: true
+ configurable: true,
})
delete require.cache[require.resolve('../../../lib/utils/is-windows.js')]
}
@@ -33,22 +33,21 @@ npm.config = {
loaded: false,
localPrefix: '/some/prefix/dir',
get: key => {
- if (key === 'cache') {
+ if (key === 'cache')
return CACHE
- } else if (key === 'node-version') {
+ else if (key === 'node-version')
return '99.99.99'
- } else if (key === 'global') {
+ else if (key === 'global')
return false
- } else {
+ else
throw new Error('unexpected config lookup: ' + key)
- }
- }
+ },
}
npm.version = '123.69.420-npm'
Object.defineProperty(process, 'version', {
value: '123.69.420-node',
- configurable: true
+ configurable: true,
})
const npmlog = require('npmlog')
@@ -64,8 +63,8 @@ const errorMessage = requireInject('../../../lib/utils/error-message.js', {
report: (...args) => {
EXPLAIN_CALLED.push(args)
return 'explanation'
- }
- }
+ },
+ },
})
t.test('just simple messages', t => {
@@ -92,7 +91,7 @@ t.test('just simple messages', t => {
'EINVALIDTYPE',
'ETOOMANYARGS',
'ETARGET',
- 'E403'
+ 'E403',
]
t.plan(codes.length)
codes.forEach(code => {
@@ -100,13 +99,12 @@ t.test('just simple messages', t => {
const pkgid = 'some@package'
const file = '/some/file'
const stack = 'dummy stack trace'
- const required = { node: '1.2.3', npm: '4.2.0' }
const er = Object.assign(new Error('foo'), {
code,
path,
pkgid,
file,
- stack
+ stack,
})
t.matchSnapshot(errorMessage(er))
})
@@ -132,18 +130,19 @@ t.test('replace message/stack sensistive info', t => {
t.test('bad engine with config loaded', t => {
npm.config.loaded = true
- t.teardown(() => { npm.config.loaded = false })
+ t.teardown(() => {
+ npm.config.loaded = false
+ })
const path = '/some/path'
const pkgid = 'some@package'
const file = '/some/file'
const stack = 'dummy stack trace'
- const required = { node: '1.2.3', npm: '4.2.0' }
const er = Object.assign(new Error('foo'), {
code: 'EBADENGINE',
path,
pkgid,
file,
- stack
+ stack,
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -152,14 +151,12 @@ t.test('bad engine with config loaded', t => {
t.test('enoent without a file', t => {
const path = '/some/path'
const pkgid = 'some@package'
- const file = '/some/file'
const stack = 'dummy stack trace'
- const required = { node: '1.2.3', npm: '4.2.0' }
const er = Object.assign(new Error('foo'), {
code: 'ENOENT',
path,
pkgid,
- stack
+ stack,
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -171,13 +168,12 @@ t.test('enolock without a command', t => {
const pkgid = 'some@package'
const file = '/some/file'
const stack = 'dummy stack trace'
- const required = { node: '1.2.3', npm: '4.2.0' }
const er = Object.assign(new Error('foo'), {
code: 'ENOLOCK',
path,
pkgid,
file,
- stack
+ stack,
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -191,18 +187,18 @@ t.test('default message', t => {
signal: 'SIGYOLO',
args: ['a', 'r', 'g', 's'],
stdout: 'stdout',
- stderr: 'stderr'
+ stderr: 'stderr',
})))
t.end()
})
t.test('eacces/eperm', t => {
const runTest = (windows, loaded, cachePath, cacheDest) => t => {
- if (windows) {
+ if (windows)
beWindows()
- } else {
+ else
bePosix()
- }
+
npm.config.loaded = loaded
const path = `${cachePath ? CACHE : '/not/cache/dir'}/path`
const dest = `${cacheDest ? CACHE : '/not/cache/dir'}/dest`
@@ -210,7 +206,7 @@ t.test('eacces/eperm', t => {
code: 'EACCES',
path,
dest,
- stack: 'dummy stack trace'
+ stack: 'dummy stack trace',
})
verboseLogs.length = 0
t.matchSnapshot(errorMessage(er))
@@ -272,36 +268,36 @@ t.test('json parse', t => {
}
}
}
-`
+`,
})
const { prefix } = npm
const { argv } = process
t.teardown(() => {
Object.defineProperty(npm, 'prefix', {
value: prefix,
- configurable: true
+ configurable: true,
})
process.argv = argv
})
Object.defineProperty(npm, 'prefix', { value: dir, configurable: true })
process.argv = ['arg', 'v']
- const ok = t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), {
+ t.matchSnapshot(errorMessage(Object.assign(new Error('conflicted'), {
code: 'EJSONPARSE',
- file: resolve(dir, 'package.json')
+ file: resolve(dir, 'package.json'),
})))
t.end()
})
t.test('just regular bad json in package.json', t => {
const dir = t.testdir({
- 'package.json': 'not even slightly json'
+ 'package.json': 'not even slightly json',
})
const { prefix } = npm
const { argv } = process
t.teardown(() => {
Object.defineProperty(npm, 'prefix', {
value: prefix,
- configurable: true
+ configurable: true,
})
process.argv = argv
})
@@ -309,14 +305,14 @@ t.test('json parse', t => {
process.argv = ['arg', 'v']
t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), {
code: 'EJSONPARSE',
- file: resolve(dir, 'package.json')
+ file: resolve(dir, 'package.json'),
})))
t.end()
})
t.test('json somewhere else', t => {
const dir = t.testdir({
- 'blerg.json': 'not even slightly json'
+ 'blerg.json': 'not even slightly json',
})
const { argv } = process
t.teardown(() => {
@@ -325,7 +321,7 @@ t.test('json parse', t => {
process.argv = ['arg', 'v']
t.matchSnapshot(errorMessage(Object.assign(new Error('not json'), {
code: 'EJSONPARSE',
- file: `${dir}/blerg.json`
+ file: `${dir}/blerg.json`,
})))
t.end()
})
@@ -336,7 +332,7 @@ t.test('json parse', t => {
t.test('eotp/e401', t => {
t.test('401, no auth headers', t => {
t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), {
- code: 'E401'
+ code: 'E401',
})))
t.end()
})
@@ -350,7 +346,7 @@ t.test('eotp/e401', t => {
t.test('one-time pass challenge code', t => {
t.matchSnapshot(errorMessage(Object.assign(new Error('nope'), {
- code: 'EOTP'
+ code: 'EOTP',
})))
t.end()
})
@@ -358,7 +354,7 @@ t.test('eotp/e401', t => {
t.test('one-time pass challenge message', t => {
const message = 'one-time pass'
t.matchSnapshot(errorMessage(Object.assign(new Error(message), {
- code: 'E401'
+ code: 'E401',
})))
t.end()
})
@@ -368,16 +364,16 @@ t.test('eotp/e401', t => {
'Bearer realm=do, charset="UTF-8", challenge="yourself"',
'Basic realm=by, charset="UTF-8", challenge="your friends"',
'PickACardAnyCard realm=friday, charset="UTF-8"',
- 'WashYourHands, charset="UTF-8"'
+ 'WashYourHands, charset="UTF-8"',
]
t.plan(auths.length)
for (const auth of auths) {
t.test(auth, t => {
const er = Object.assign(new Error('challenge!'), {
headers: {
- 'www-authenticate': [ auth ]
+ 'www-authenticate': [auth],
},
- code: 'E401'
+ code: 'E401',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -397,7 +393,7 @@ t.test('404', t => {
t.test('you should publish it', t => {
const er = Object.assign(new Error('404 not found'), {
pkgid: 'yolo',
- code: 'E404'
+ code: 'E404',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -405,7 +401,7 @@ t.test('404', t => {
t.test('name with warning', t => {
const er = Object.assign(new Error('404 not found'), {
pkgid: new Array(215).fill('x').join(''),
- code: 'E404'
+ code: 'E404',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -413,7 +409,7 @@ t.test('404', t => {
t.test('name with error', t => {
const er = Object.assign(new Error('404 not found'), {
pkgid: 'node_modules',
- code: 'E404'
+ code: 'E404',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -427,13 +423,13 @@ t.test('bad platform', t => {
pkgid: 'lodash@1.0.0',
current: {
os: 'posix',
- cpu: 'x64'
+ cpu: 'x64',
},
required: {
os: '!yours',
- cpu: 'x420'
+ cpu: 'x420',
},
- code: 'EBADPLATFORM'
+ code: 'EBADPLATFORM',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -443,13 +439,13 @@ t.test('bad platform', t => {
pkgid: 'lodash@1.0.0',
current: {
os: 'posix',
- cpu: 'x64'
+ cpu: 'x64',
},
required: {
os: ['!yours', 'mine'],
- cpu: ['x420', 'x69']
+ cpu: ['x420', 'x69'],
},
- code: 'EBADPLATFORM'
+ code: 'EBADPLATFORM',
})
t.matchSnapshot(errorMessage(er))
t.end()
@@ -460,7 +456,7 @@ t.test('bad platform', t => {
t.test('explain ERESOLVE errors', t => {
const er = Object.assign(new Error('could not resolve'), {
- code: 'ERESOLVE'
+ code: 'ERESOLVE',
})
t.matchSnapshot(errorMessage(er))
t.strictSame(EXPLAIN_CALLED, [[er]])
diff --git a/deps/npm/test/lib/utils/escape-arg.js b/deps/npm/test/lib/utils/escape-arg.js
index 413fa47838..b80a63f0b8 100644
--- a/deps/npm/test/lib/utils/escape-arg.js
+++ b/deps/npm/test/lib/utils/escape-arg.js
@@ -2,7 +2,7 @@ const requireInject = require('require-inject')
const t = require('tap')
const getEscape = win => requireInject('../../../lib/utils/escape-arg.js', {
'../../../lib/utils/is-windows.js': win,
- path: require('path')[win ? 'win32' : 'posix']
+ path: require('path')[win ? 'win32' : 'posix'],
})
const winEscape = getEscape(true)
diff --git a/deps/npm/test/lib/utils/escape-exec-path.js b/deps/npm/test/lib/utils/escape-exec-path.js
index 28fe75c2a9..f16c576ec5 100644
--- a/deps/npm/test/lib/utils/escape-exec-path.js
+++ b/deps/npm/test/lib/utils/escape-exec-path.js
@@ -2,7 +2,7 @@ const requireInject = require('require-inject')
const t = require('tap')
const getEscape = win => requireInject('../../../lib/utils/escape-exec-path.js', {
'../../../lib/utils/is-windows.js': win,
- path: require('path')[win ? 'win32' : 'posix']
+ path: require('path')[win ? 'win32' : 'posix'],
})
const winEscape = getEscape(true)
diff --git a/deps/npm/test/lib/utils/explain-dep.js b/deps/npm/test/lib/utils/explain-dep.js
index 9a205e3c39..28f14477ab 100644
--- a/deps/npm/test/lib/utils/explain-dep.js
+++ b/deps/npm/test/lib/utils/explain-dep.js
@@ -143,7 +143,7 @@ cases.manyDeps = {
{
type: 'prod',
name: 'manydep',
- spec:'>1.0.0-beta <1.0.1',
+ spec: '>1.0.0-beta <1.0.1',
from: {
location: '/path/to/project',
},
diff --git a/deps/npm/test/lib/utils/explain-eresolve.js b/deps/npm/test/lib/utils/explain-eresolve.js
index def13153d2..8dae1b92cd 100644
--- a/deps/npm/test/lib/utils/explain-eresolve.js
+++ b/deps/npm/test/lib/utils/explain-eresolve.js
@@ -2,7 +2,7 @@ const t = require('tap')
const requireInject = require('require-inject')
const npm = {}
const { explain, report } = requireInject('../../../lib/utils/explain-eresolve.js', {
- '../../../lib/npm.js': npm
+ '../../../lib/npm.js': npm,
})
const { statSync, readFileSync, unlinkSync } = require('fs')
// strip out timestamps from reports
diff --git a/deps/npm/test/lib/utils/file-exists.js b/deps/npm/test/lib/utils/file-exists.js
index f247f564e0..473a4b050e 100644
--- a/deps/npm/test/lib/utils/file-exists.js
+++ b/deps/npm/test/lib/utils/file-exists.js
@@ -3,7 +3,7 @@ const fileExists = require('../../../lib/utils/file-exists.js')
test('returns true when arg is a file', async (t) => {
const path = t.testdir({
- foo: 'just some file'
+ foo: 'just some file',
})
const result = await fileExists(`${path}/foo`)
@@ -13,7 +13,7 @@ test('returns true when arg is a file', async (t) => {
test('returns false when arg is not a file', async (t) => {
const path = t.testdir({
- foo: {}
+ foo: {},
})
const result = await fileExists(`${path}/foo`)
diff --git a/deps/npm/test/lib/utils/flat-options.js b/deps/npm/test/lib/utils/flat-options.js
index 7601c78d27..82c00fc7e5 100644
--- a/deps/npm/test/lib/utils/flat-options.js
+++ b/deps/npm/test/lib/utils/flat-options.js
@@ -12,7 +12,7 @@ class Mocknpm {
this.modes = {
exec: 0o777,
file: 0o666,
- umask: 0o22
+ umask: 0o22,
}
this.color = true
this.projectScope = '@npmcli'
@@ -110,14 +110,16 @@ class MockConfig {
'user-agent': 'user-agent',
'@scope:registry': '@scope:registry',
'//nerf.dart:_authToken': '//nerf.dart:_authToken',
- 'proxy': 'proxy',
- 'noproxy': 'noproxy',
- ...opts
+ proxy: 'proxy',
+ noproxy: 'noproxy',
+ ...opts,
}]
}
+
get (key) {
return this.list[0][key]
}
+
set (key, val) {
this.list[0][key] = val
}
@@ -127,7 +129,7 @@ const flatOptions = require('../../../lib/utils/flat-options.js')
t.match(logs, [[
'verbose',
'npm-session',
- /^[0-9a-f]{16}$/
+ /^[0-9a-f]{16}$/,
]], 'logged npm session verbosely')
logs.length = 0
@@ -139,7 +141,7 @@ t.test('basic', t => {
npmBin: '/path/to/npm/bin.js',
log: {},
npmSession: '12345',
- cache: generatedFlat.cache.replace(/\\/g, '/')
+ cache: generatedFlat.cache.replace(/\\/g, '/'),
}
t.matchSnapshot(clean, 'flat options')
t.equal(generatedFlat.npmCommand, null, 'command not set yet')
@@ -158,7 +160,7 @@ t.test('basic', t => {
t.test('get preferOffline from cache-min', t => {
const npm = new Mocknpm({
'cache-min': 9999999,
- 'prefer-offline': undefined
+ 'prefer-offline': undefined,
})
const opts = flatOptions(npm)
t.equal(opts.preferOffline, true, 'got preferOffline from cache min')
@@ -172,7 +174,7 @@ t.test('get preferOffline from cache-min', t => {
t.test('get preferOnline from cache-max', t => {
const npm = new Mocknpm({
'cache-max': -1,
- 'prefer-online': undefined
+ 'prefer-online': undefined,
})
const opts = flatOptions(npm)
t.equal(opts.preferOnline, true, 'got preferOnline from cache min')
@@ -194,7 +196,7 @@ t.test('tag emits warning', t => {
t.test('omit/include options', t => {
t.test('omit explicitly', t => {
const npm = new Mocknpm({
- omit: ['dev', 'optional', 'peer']
+ omit: ['dev', 'optional', 'peer'],
})
t.strictSame(flatOptions(npm).omit, ['dev', 'optional', 'peer'])
t.end()
@@ -203,7 +205,7 @@ t.test('omit/include options', t => {
t.test('omit and include some', t => {
const npm = new Mocknpm({
omit: ['dev', 'optional', 'peer'],
- include: ['peer']
+ include: ['peer'],
})
t.strictSame(flatOptions(npm).omit, ['dev', 'optional'])
t.end()
@@ -213,7 +215,7 @@ t.test('omit/include options', t => {
const npm = new Mocknpm({
omit: ['dev', 'optional', 'peer'],
include: [],
- dev: true
+ dev: true,
})
t.strictSame(flatOptions(npm).omit, ['optional', 'peer'])
t.end()
@@ -223,7 +225,7 @@ t.test('omit/include options', t => {
const npm = new Mocknpm({
omit: [],
include: [],
- production: true
+ production: true,
})
t.strictSame(flatOptions(npm).omit, ['dev'])
t.end()
@@ -236,7 +238,7 @@ t.test('omit/include options', t => {
const npm = new Mocknpm({
omit: [],
include: [],
- only: c
+ only: c,
})
t.strictSame(flatOptions(npm).omit, ['dev'])
t.end()
@@ -246,7 +248,7 @@ t.test('omit/include options', t => {
t.test('also dev', t => {
const npm = new Mocknpm({
omit: ['dev', 'optional', 'peer'],
- also: 'dev'
+ also: 'dev',
})
t.strictSame(flatOptions(npm).omit, ['optional', 'peer'])
t.end()
@@ -256,7 +258,7 @@ t.test('omit/include options', t => {
const npm = new Mocknpm({
optional: false,
omit: null,
- include: null
+ include: null,
})
t.strictSame(flatOptions(npm).omit, ['optional'])
t.end()
@@ -276,9 +278,9 @@ t.test('various default values and falsey fallbacks', t => {
'script-shell': false,
registry: 'http://example.com',
'metrics-registry': null,
- 'searchlimit': 0,
+ searchlimit: 0,
'save-exact': false,
- 'save-prefix': '>='
+ 'save-prefix': '>=',
})
const opts = flatOptions(npm)
t.equal(opts.scriptShell, undefined, 'scriptShell is undefined if falsey')
@@ -298,7 +300,7 @@ t.test('legacy _auth token', t => {
t.strictSame(
flatOptions(npm)._auth,
'asdfasdf',
- 'should set legacy _auth token',
+ 'should set legacy _auth token'
)
t.end()
})
@@ -308,7 +310,7 @@ t.test('save-type', t => {
'save-optional': false,
'save-peer': false,
'save-dev': false,
- 'save-prod': false
+ 'save-prod': false,
}
const cases = [
['peerOptional', {
@@ -316,23 +318,23 @@ t.test('save-type', t => {
'save-peer': true,
}],
['optional', {
- 'save-optional': true
+ 'save-optional': true,
}],
['dev', {
- 'save-dev': true
+ 'save-dev': true,
}],
['peer', {
- 'save-peer': true
+ 'save-peer': true,
}],
['prod', {
- 'save-prod': true
+ 'save-prod': true,
}],
- [null, {}]
+ [null, {}],
]
for (const [expect, options] of cases) {
const opts = flatOptions(new Mocknpm({
...base,
- ...options
+ ...options,
}))
t.equal(opts.saveType, expect, JSON.stringify(options))
}
diff --git a/deps/npm/test/lib/utils/get-identity.js b/deps/npm/test/lib/utils/get-identity.js
index c72f48b2e8..8a4de88352 100644
--- a/deps/npm/test/lib/utils/get-identity.js
+++ b/deps/npm/test/lib/utils/get-identity.js
@@ -4,7 +4,7 @@ const requireInject = require('require-inject')
test('throws ENOREGISTRY when no registry option is provided', async (t) => {
t.plan(2)
const getIdentity = requireInject('../../../lib/utils/get-identity.js', {
- '../../../lib/npm.js': {}
+ '../../../lib/npm.js': {},
})
try {
@@ -23,9 +23,9 @@ test('returns username from uri when provided', async (t) => {
config: {
getCredentialsByURI: () => {
return { username: 'foo' }
- }
- }
- }
+ },
+ },
+ },
})
const identity = await getIdentity({ registry: 'https://registry.npmjs.org' })
@@ -37,22 +37,22 @@ test('calls registry whoami when token is provided', async (t) => {
const options = {
registry: 'https://registry.npmjs.org',
- token: 'thisisnotreallyatoken'
+ token: 'thisisnotreallyatoken',
}
const getIdentity = requireInject('../../../lib/utils/get-identity.js', {
'../../../lib/npm.js': {
config: {
- getCredentialsByURI: () => options
- }
+ getCredentialsByURI: () => options,
+ },
},
'npm-registry-fetch': {
json: (path, opts) => {
t.equal(path, '/-/whoami', 'calls whoami')
t.same(opts, options, 'passes through provided options')
return { username: 'foo' }
- }
- }
+ },
+ },
})
const identity = await getIdentity(options)
@@ -64,22 +64,22 @@ test('throws ENEEDAUTH when response does not include a username', async (t) =>
const options = {
registry: 'https://registry.npmjs.org',
- token: 'thisisnotreallyatoken'
+ token: 'thisisnotreallyatoken',
}
const getIdentity = requireInject('../../../lib/utils/get-identity.js', {
'../../../lib/npm.js': {
config: {
- getCredentialsByURI: () => options
- }
+ getCredentialsByURI: () => options,
+ },
},
'npm-registry-fetch': {
json: (path, opts) => {
t.equal(path, '/-/whoami', 'calls whoami')
t.same(opts, options, 'passes through provided options')
return {}
- }
- }
+ },
+ },
})
try {
@@ -94,9 +94,9 @@ test('throws ENEEDAUTH when neither username nor token is configured', async (t)
const getIdentity = requireInject('../../../lib/utils/get-identity.js', {
'../../../lib/npm.js': {
config: {
- getCredentialsByURI: () => ({})
- }
- }
+ getCredentialsByURI: () => ({}),
+ },
+ },
})
try {
diff --git a/deps/npm/test/lib/utils/get-project-scope.js b/deps/npm/test/lib/utils/get-project-scope.js
index 15ab2bdeff..9737b06433 100644
--- a/deps/npm/test/lib/utils/get-project-scope.js
+++ b/deps/npm/test/lib/utils/get-project-scope.js
@@ -3,7 +3,7 @@ const t = require('tap')
t.test('package.json with scope', t => {
const dir = t.testdir({
- 'package.json': JSON.stringify({ name: '@foo/bar' })
+ 'package.json': JSON.stringify({ name: '@foo/bar' }),
})
t.equal(getProjectScope(dir), '@foo')
t.end()
@@ -11,7 +11,7 @@ t.test('package.json with scope', t => {
t.test('package.json with slash, but no @', t => {
const dir = t.testdir({
- 'package.json': JSON.stringify({ name: 'foo/bar' })
+ 'package.json': JSON.stringify({ name: 'foo/bar' }),
})
t.equal(getProjectScope(dir), '')
t.end()
@@ -19,7 +19,7 @@ t.test('package.json with slash, but no @', t => {
t.test('package.json without scope', t => {
const dir = t.testdir({
- 'package.json': JSON.stringify({ name: 'foo' })
+ 'package.json': JSON.stringify({ name: 'foo' }),
})
t.equal(getProjectScope(dir), '')
t.end()
@@ -27,7 +27,7 @@ t.test('package.json without scope', t => {
t.test('package.json without name', t => {
const dir = t.testdir({
- 'package.json': JSON.stringify({})
+ 'package.json': JSON.stringify({}),
})
t.equal(getProjectScope(dir), '')
t.end()
@@ -35,7 +35,7 @@ t.test('package.json without name', t => {
t.test('package.json not JSON', t => {
const dir = t.testdir({
- 'package.json': 'hello'
+ 'package.json': 'hello',
})
t.equal(getProjectScope(dir), '')
t.end()
diff --git a/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js b/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js
index f87cb84eed..516d3d5867 100644
--- a/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js
+++ b/deps/npm/test/lib/utils/hosted-git-info-from-manifest.js
@@ -9,13 +9,13 @@ t.equal(hostedFromMani({ repository: 'not hosted anywhere' }), null)
t.equal(hostedFromMani({ repository: { url: 'not hosted anywhere' } }), null)
t.match(hostedFromMani({
- repository: 'git+https://github.com/isaacs/abbrev-js'
+ repository: 'git+https://github.com/isaacs/abbrev-js',
}), hostedGitInfo.fromUrl('git+https://github.com/isaacs/abbrev-js'))
t.match(hostedFromMani({
- repository: { url: 'git+https://github.com/isaacs/abbrev-js' }
+ repository: { url: 'git+https://github.com/isaacs/abbrev-js' },
}), hostedGitInfo.fromUrl('https://github.com/isaacs/abbrev-js'))
t.match(hostedFromMani({
- repository: { url: 'git+ssh://git@github.com/isaacs/abbrev-js' }
+ repository: { url: 'git+ssh://git@github.com/isaacs/abbrev-js' },
}), hostedGitInfo.fromUrl('ssh://git@github.com/isaacs/abbrev-js'))
diff --git a/deps/npm/test/lib/utils/is-windows-bash.js b/deps/npm/test/lib/utils/is-windows-bash.js
index 730dfe301b..94fde0ace1 100644
--- a/deps/npm/test/lib/utils/is-windows-bash.js
+++ b/deps/npm/test/lib/utils/is-windows-bash.js
@@ -8,13 +8,13 @@ const isWindowsBash = () => {
Object.defineProperty(process, 'platform', {
value: 'posix',
- configurable: true
+ configurable: true,
})
t.equal(isWindowsBash(), false, 'false when not windows')
Object.defineProperty(process, 'platform', {
value: 'win32',
- configurable: true
+ configurable: true,
})
process.env.MSYSTEM = 'not ming'
process.env.TERM = 'dumb'
diff --git a/deps/npm/test/lib/utils/is-windows-shell.js b/deps/npm/test/lib/utils/is-windows-shell.js
index e2164c222b..95519925c9 100644
--- a/deps/npm/test/lib/utils/is-windows-shell.js
+++ b/deps/npm/test/lib/utils/is-windows-shell.js
@@ -1,6 +1,6 @@
const t = require('tap')
Object.defineProperty(process, 'platform', {
- value: 'win32'
+ value: 'win32',
})
const isWindows = require('../../../lib/utils/is-windows.js')
const isWindowsBash = require('../../../lib/utils/is-windows-bash.js')
diff --git a/deps/npm/test/lib/utils/is-windows.js b/deps/npm/test/lib/utils/is-windows.js
index 9100071699..f8f2999c99 100644
--- a/deps/npm/test/lib/utils/is-windows.js
+++ b/deps/npm/test/lib/utils/is-windows.js
@@ -2,7 +2,7 @@ const t = require('tap')
const actuallyWindows = process.platform === 'win32'
t.equal(actuallyWindows, require('../../../lib/utils/is-windows.js'))
Object.defineProperty(process, 'platform', {
- value: actuallyWindows ? 'posix' : 'win32'
+ value: actuallyWindows ? 'posix' : 'win32',
})
delete require.cache[require.resolve('../../../lib/utils/is-windows.js')]
t.equal(!actuallyWindows, require('../../../lib/utils/is-windows.js'))
diff --git a/deps/npm/test/lib/utils/lifecycle-cmd.js b/deps/npm/test/lib/utils/lifecycle-cmd.js
index 7338229546..0eb342cee5 100644
--- a/deps/npm/test/lib/utils/lifecycle-cmd.js
+++ b/deps/npm/test/lib/utils/lifecycle-cmd.js
@@ -3,9 +3,9 @@ const requireInject = require('require-inject')
const lifecycleCmd = requireInject('../../../lib/utils/lifecycle-cmd.js', {
'../../../lib/npm.js': {
commands: {
- run: (args, cb) => cb(null, 'called npm.commands.run')
- }
- }
+ run: (args, cb) => cb(null, 'called npm.commands.run'),
+ },
+ },
})
t.test('create a lifecycle command', t => {
diff --git a/deps/npm/test/lib/utils/path.js b/deps/npm/test/lib/utils/path.js
index facee06459..74fb93462f 100644
--- a/deps/npm/test/lib/utils/path.js
+++ b/deps/npm/test/lib/utils/path.js
@@ -3,7 +3,7 @@ const requireInject = require('require-inject')
const mod = '../../../lib/utils/path.js'
const delim = require('../../../lib/utils/is-windows.js') ? ';' : ':'
Object.defineProperty(process, 'env', {
- value: {}
+ value: {},
})
process.env.path = ['foo', 'bar', 'baz'].join(delim)
t.strictSame(requireInject(mod), ['foo', 'bar', 'baz'])
diff --git a/deps/npm/test/lib/utils/perf.js b/deps/npm/test/lib/utils/perf.js
index 9b38a3da81..840dcb6e32 100644
--- a/deps/npm/test/lib/utils/perf.js
+++ b/deps/npm/test/lib/utils/perf.js
@@ -20,15 +20,15 @@ t.test('time some stuff', t => {
process.emit('timeEnd', 'foo')
process.emit('timeEnd', 'baz')
t.match(logs, [
- [ 'timing', 'foo', /Completed in [0-9]+ms/ ],
- [ 'timing', 'bar', /Completed in [0-9]+ms/ ],
- [ 'timing', 'foo', /Completed in [0-9]+ms/ ],
+ ['timing', 'foo', /Completed in [0-9]+ms/],
+ ['timing', 'bar', /Completed in [0-9]+ms/],
+ ['timing', 'foo', /Completed in [0-9]+ms/],
[
'silly',
'timing',
"Tried to end timer that doesn't exist:",
- 'baz'
- ]
+ 'baz',
+ ],
])
t.match(timings, { foo: Number, bar: Number })
t.equal(timings.foo > timings.bar, true, 'foo should be > bar')
diff --git a/deps/npm/test/lib/utils/ping.js b/deps/npm/test/lib/utils/ping.js
index d2b269556e..6e0451538f 100644
--- a/deps/npm/test/lib/utils/ping.js
+++ b/deps/npm/test/lib/utils/ping.js
@@ -11,7 +11,7 @@ test('pings', async (t) => {
t.equal(url, '/-/ping?write=true', 'calls the correct url')
t.equal(opts, options, 'passes through options')
return { json: () => Promise.resolve(response) }
- }
+ },
})
const res = await ping(options)
@@ -28,7 +28,7 @@ test('catches errors and returns empty json', async (t) => {
t.equal(url, '/-/ping?write=true', 'calls the correct url')
t.equal(opts, options, 'passes through options')
return { json: () => Promise.reject(response) }
- }
+ },
})
const res = await ping(options)
diff --git a/deps/npm/test/lib/utils/proc-log-listener.js b/deps/npm/test/lib/utils/proc-log-listener.js
index 0a6119d1a1..2c10095037 100644
--- a/deps/npm/test/lib/utils/proc-log-listener.js
+++ b/deps/npm/test/lib/utils/proc-log-listener.js
@@ -5,11 +5,11 @@ const { inspect } = require('util')
const logs = []
const npmlog = {
warn: (...args) => logs.push(['warn', ...args]),
- verbose: (...args) => logs.push(['verbose', ...args])
+ verbose: (...args) => logs.push(['verbose', ...args]),
}
requireInject('../../../lib/utils/proc-log-listener.js', {
- npmlog
+ npmlog,
})()
process.emit('log', 'warn', 'hello', 'i am a warning')
@@ -17,22 +17,26 @@ t.strictSame(logs, [['warn', 'hello', 'i am a warning']])
logs.length = 0
const nopeError = new Error('nope')
-npmlog.warn = () => { throw nopeError }
+npmlog.warn = () => {
+ throw nopeError
+}
process.emit('log', 'warn', 'fail')
t.strictSame(logs, [[
'verbose',
`attempt to log ${inspect(['warn', 'fail'])} crashed`,
- nopeError
+ nopeError,
]])
logs.length = 0
-npmlog.verbose = () => { throw nopeError }
+npmlog.verbose = () => {
+ throw nopeError
+}
const consoleErrors = []
console.error = (...args) => consoleErrors.push(args)
process.emit('log', 'warn', 'fail2')
t.strictSame(logs, [])
t.strictSame(consoleErrors, [[
`attempt to log ${inspect(['warn', 'fail2'])} crashed`,
- nopeError
+ nopeError,
]])
diff --git a/deps/npm/test/lib/utils/read-local-package.js b/deps/npm/test/lib/utils/read-local-package.js
index 8854cf4e5f..33a408eb53 100644
--- a/deps/npm/test/lib/utils/read-local-package.js
+++ b/deps/npm/test/lib/utils/read-local-package.js
@@ -5,21 +5,23 @@ let prefix
const _flatOptions = {
json: false,
global: false,
- get prefix () { return prefix }
+ get prefix () {
+ return prefix
+ },
}
const readLocalPackageName = requireInject('../../../lib/utils/read-local-package.js', {
'../../../lib/npm.js': {
- flatOptions: _flatOptions
- }
+ flatOptions: _flatOptions,
+ },
})
test('read local package.json', async (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
name: 'my-local-package',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
})
const packageName = await readLocalPackageName()
t.equal(
@@ -33,8 +35,8 @@ test('read local scoped-package.json', async (t) => {
prefix = t.testdir({
'package.json': JSON.stringify({
name: '@my-scope/my-local-package',
- version: '1.0.0'
- })
+ version: '1.0.0',
+ }),
})
const packageName = await readLocalPackageName()
t.equal(
diff --git a/deps/npm/test/lib/utils/reify-finish.js b/deps/npm/test/lib/utils/reify-finish.js
new file mode 100644
index 0000000000..d6c7d2e7b2
--- /dev/null
+++ b/deps/npm/test/lib/utils/reify-finish.js
@@ -0,0 +1,80 @@
+const t = require('tap')
+const requireInject = require('require-inject')
+
+const npm = {
+ config: {
+ data: {
+ get: () => builtinConfMock,
+ },
+ },
+}
+
+const builtinConfMock = {
+ loadError: new Error('no builtin config'),
+ raw: { hasBuiltinConfig: true, x: 'y', nested: { foo: 'bar' }},
+}
+
+const reifyOutput = () => {}
+
+let expectWrite = false
+const realFs = require('fs')
+const fs = {
+ ...realFs,
+ promises: {
+ ...realFs.promises,
+ writeFile: async (path, data) => {
+ if (!expectWrite)
+ throw new Error('did not expect to write builtin config file')
+ return realFs.promises.writeFile(path, data)
+ },
+ },
+}
+
+const reifyFinish = requireInject('../../../lib/utils/reify-finish.js', {
+ fs,
+ '../../../lib/npm.js': npm,
+ '../../../lib/utils/reify-output.js': reifyOutput,
+})
+
+t.test('should not write if not global', async t => {
+ expectWrite = false
+ await reifyFinish({
+ options: { global: false },
+ actualTree: {},
+ })
+})
+
+t.test('should not write if no global npm module', async t => {
+ expectWrite = false
+ await reifyFinish({
+ options: { global: true },
+ actualTree: {
+ inventory: new Map(),
+ },
+ })
+})
+
+t.test('should not write if builtin conf had load error', async t => {
+ expectWrite = false
+ await reifyFinish({
+ options: { global: true },
+ actualTree: {
+ inventory: new Map([['node_modules/npm', {}]]),
+ },
+ })
+})
+
+t.test('should write if everything above passes', async t => {
+ expectWrite = true
+ delete builtinConfMock.loadError
+ const path = t.testdir()
+ await reifyFinish({
+ options: { global: true },
+ actualTree: {
+ inventory: new Map([['node_modules/npm', {path}]]),
+ },
+ })
+ // windowwwwwwssss!!!!!
+ const data = fs.readFileSync(`${path}/npmrc`, 'utf8').replace(/\r\n/g, '\n')
+ t.matchSnapshot(data, 'written config')
+})
diff --git a/deps/npm/test/lib/utils/reify-output.js b/deps/npm/test/lib/utils/reify-output.js
index 55f77f1d9d..b905c9ab0f 100644
--- a/deps/npm/test/lib/utils/reify-output.js
+++ b/deps/npm/test/lib/utils/reify-output.js
@@ -9,18 +9,18 @@ log.level = 'warn'
t.cleanSnapshot = str => str.replace(/in [0-9]+m?s/g, 'in {TIME}')
const settings = {
- fund: true
+ fund: true,
}
const npmock = {
started: Date.now(),
- flatOptions: settings
+ flatOptions: settings,
}
const getReifyOutput = tester =>
requireInject(
'../../../lib/utils/reify-output.js',
{
'../../../lib/npm.js': npmock,
- '../../../lib/utils/output.js': tester
+ '../../../lib/utils/output.js': tester,
}
)
@@ -36,11 +36,11 @@ t.test('missing info', (t) => {
reifyOutput({
actualTree: {
- children: []
+ children: [],
},
diff: {
- children: []
- }
+ children: [],
+ },
})
})
@@ -56,12 +56,11 @@ t.test('even more missing info', t => {
reifyOutput({
actualTree: {
- children: []
- }
+ children: [],
+ },
})
})
-
t.test('single package', (t) => {
t.plan(1)
const reifyOutput = getReifyOutput(
@@ -81,14 +80,14 @@ t.test('single package', (t) => {
// the command is not 'audit'
auditReport: {
error: {
- message: 'no audit for youuuuu'
- }
+ message: 'no audit for youuuuu',
+ },
},
actualTree: {
name: 'foo',
package: {
name: 'foo',
- version: '1.0.0'
+ version: '1.0.0',
},
edgesOut: new Map([
['bar', {
@@ -97,26 +96,27 @@ t.test('single package', (t) => {
package: {
name: 'bar',
version: '1.0.0',
- funding: { type: 'foo', url: 'http://example.com' }
- }
- }
- }]
- ])
+ funding: { type: 'foo', url: 'http://example.com' },
+ },
+ },
+ }],
+ ]),
},
diff: {
- children: []
- }
+ children: [],
+ },
})
})
t.test('no message when funding config is false', (t) => {
- t.teardown(() => { settings.fund = true })
+ t.teardown(() => {
+ settings.fund = true
+ })
settings.fund = false
const reifyOutput = getReifyOutput(
out => {
- if (out.endsWith('looking for funding')) {
+ if (out.endsWith('looking for funding'))
t.fail('should not print funding info', { actual: out })
- }
}
)
@@ -125,7 +125,7 @@ t.test('no message when funding config is false', (t) => {
name: 'foo',
package: {
name: 'foo',
- version: '1.0.0'
+ version: '1.0.0',
},
edgesOut: new Map([
['bar', {
@@ -134,15 +134,15 @@ t.test('no message when funding config is false', (t) => {
package: {
name: 'bar',
version: '1.0.0',
- funding: { type: 'foo', url: 'http://example.com' }
- }
- }
- }]
- ])
+ funding: { type: 'foo', url: 'http://example.com' },
+ },
+ },
+ }],
+ ]),
},
diff: {
- children: []
- }
+ children: [],
+ },
})
t.end()
@@ -167,7 +167,7 @@ t.test('print appropriate message for many packages', (t) => {
name: 'foo',
package: {
name: 'foo',
- version: '1.0.0'
+ version: '1.0.0',
},
edgesOut: new Map([
['bar', {
@@ -176,9 +176,9 @@ t.test('print appropriate message for many packages', (t) => {
package: {
name: 'bar',
version: '1.0.0',
- funding: { type: 'foo', url: 'http://example.com' }
- }
- }
+ funding: { type: 'foo', url: 'http://example.com' },
+ },
+ },
}],
['lorem', {
to: {
@@ -186,9 +186,9 @@ t.test('print appropriate message for many packages', (t) => {
package: {
name: 'lorem',
version: '1.0.0',
- funding: { type: 'foo', url: 'http://example.com' }
- }
- }
+ funding: { type: 'foo', url: 'http://example.com' },
+ },
+ },
}],
['ipsum', {
to: {
@@ -196,15 +196,15 @@ t.test('print appropriate message for many packages', (t) => {
package: {
name: 'ipsum',
version: '1.0.0',
- funding: { type: 'foo', url: 'http://example.com' }
- }
- }
- }]
- ])
+ funding: { type: 'foo', url: 'http://example.com' },
+ },
+ },
+ }],
+ ]),
},
diff: {
- children: []
- }
+ children: [],
+ },
})
})
@@ -217,19 +217,21 @@ t.test('no output when silent', t => {
reifyOutput({
actualTree: { inventory: { size: 999 }, children: [] },
auditReport: {
- toJSON: () => mock.auditReport,
+ toJSON: () => {
+ throw new Error('this should not get called')
+ },
vulnerabilities: {},
metadata: {
vulnerabilities: {
- total: 99
- }
- }
+ total: 99,
+ },
+ },
},
diff: {
children: [
- { action: 'ADD', ideal: { location: 'loc' } }
- ]
- }
+ { action: 'ADD', ideal: { location: 'loc' } },
+ ],
+ },
})
t.end()
})
@@ -251,22 +253,22 @@ t.test('packages changed message', t => {
vulnerabilities: {},
metadata: {
vulnerabilities: {
- total: 0
- }
- }
+ total: 0,
+ },
+ },
} : null,
diff: {
children: [
- { action: 'some random unexpected junk' }
- ]
- }
+ { action: 'some random unexpected junk' },
+ ],
+ },
}
- for (let i = 0; i < added; i++) {
+ for (let i = 0; i < added; i++)
mock.diff.children.push({ action: 'ADD', ideal: { location: 'loc' } })
- }
- for (let i = 0; i < removed; i++) {
+
+ for (let i = 0; i < removed; i++)
mock.diff.children.push({ action: 'REMOVE', actual: { location: 'loc' } })
- }
+
for (let i = 0; i < changed; i++) {
const actual = { location: 'loc' }
const ideal = { location: 'loc' }
@@ -279,7 +281,7 @@ t.test('packages changed message', t => {
removed,
changed,
audited,
- json
+ json,
}))
}
@@ -288,9 +290,8 @@ t.test('packages changed message', t => {
for (const removed of [0, 1, 2]) {
for (const changed of [0, 1, 2]) {
for (const audited of [0, 1, 2]) {
- for (const json of [true, false]) {
+ for (const json of [true, false])
cases.push([added, removed, changed, audited, json, 'install'])
- }
}
}
}
@@ -301,9 +302,8 @@ t.test('packages changed message', t => {
cases.push([0, 0, 0, 2, false, 'audit'])
t.plan(cases.length)
- for (const [added, removed, changed, audited, json, command] of cases) {
+ for (const [added, removed, changed, audited, json, command] of cases)
testCase(t, added, removed, changed, audited, json, command)
- }
t.end()
})
@@ -319,14 +319,14 @@ t.test('added packages should be looked up within returned tree', t => {
actualTree: {
name: 'foo',
inventory: {
- has: () => true
- }
+ has: () => true,
+ },
},
diff: {
children: [
- { action: 'ADD', ideal: { name: 'baz' } }
- ]
- }
+ { action: 'ADD', ideal: { name: 'baz' } },
+ ],
+ },
})
})
@@ -340,14 +340,14 @@ t.test('added packages should be looked up within returned tree', t => {
actualTree: {
name: 'foo',
inventory: {
- has: () => false
- }
+ has: () => false,
+ },
},
diff: {
children: [
- { action: 'ADD', ideal: { name: 'baz' } }
- ]
- }
+ { action: 'ADD', ideal: { name: 'baz' } },
+ ],
+ },
})
})
t.end()
diff --git a/deps/npm/test/lib/utils/setup-log.js b/deps/npm/test/lib/utils/setup-log.js
index 2d5d794f13..4398200abe 100644
--- a/deps/npm/test/lib/utils/setup-log.js
+++ b/deps/npm/test/lib/utils/setup-log.js
@@ -1,15 +1,18 @@
const t = require('tap')
const requireInject = require('require-inject')
-const settings = {}
+const settings = {
+ level: 'warn',
+}
t.afterEach(cb => {
- Object.keys(settings).forEach(k => { delete settings[k] })
+ Object.keys(settings).forEach(k => {
+ delete settings[k]
+ })
cb()
})
const WARN_CALLED = []
const npmlog = {
- level: 'warn',
warn: (...args) => {
WARN_CALLED.push(args)
},
@@ -22,17 +25,39 @@ const npmlog = {
notice: 3500,
warn: 4000,
error: 5000,
- silent: Infinity
+ silent: Infinity,
},
settings,
- enableColor: () => { settings.color = true },
- disableColor: () => { settings.color = false },
- enableUnicode: () => { settings.unicode = true },
- disableUnicode: () => { settings.unicode = false },
- enableProgress: () => { settings.progress = true },
- disableProgress: () => { settings.progress = false },
- set heading (h) { settings.heading = h },
- set level (l) { settings.level = l }
+ enableColor: () => {
+ settings.color = true
+ },
+ disableColor: () => {
+ settings.color = false
+ },
+ enableUnicode: () => {
+ settings.unicode = true
+ },
+ disableUnicode: () => {
+ settings.unicode = false
+ },
+ enableProgress: () => {
+ settings.progress = true
+ },
+ disableProgress: () => {
+ settings.progress = false
+ },
+ get heading () {
+ return settings.heading
+ },
+ set heading (h) {
+ settings.heading = h
+ },
+ get level () {
+ return settings.level
+ },
+ set level (l) {
+ settings.level = l
+ },
}
const EXPLAIN_CALLED = []
@@ -41,9 +66,9 @@ const setupLog = requireInject('../../../lib/utils/setup-log.js', {
explain: (...args) => {
EXPLAIN_CALLED.push(args)
return 'explanation'
- }
+ },
},
- npmlog
+ npmlog,
})
const config = obj => ({
@@ -52,7 +77,7 @@ const config = obj => ({
},
set (k, v) {
obj[k] = v
- }
+ },
})
t.test('setup with color=always and unicode', t => {
@@ -65,7 +90,7 @@ t.test('setup with color=always and unicode', t => {
loglevel: 'warn',
color: 'always',
unicode: true,
- progress: false
+ progress: false,
})), true)
npmlog.warn('ERESOLVE', 'hello', { some: { other: 'object' } })
@@ -73,7 +98,7 @@ t.test('setup with color=always and unicode', t => {
'log.warn(ERESOLVE) patched to call explainEresolve()')
t.strictSame(WARN_CALLED, [
['ERESOLVE', 'hello'],
- ['', 'explanation']
+ ['', 'explanation'],
], 'warn the explanation')
EXPLAIN_CALLED.length = 0
WARN_CALLED.length = 0
@@ -86,7 +111,7 @@ t.test('setup with color=always and unicode', t => {
color: true,
unicode: true,
progress: false,
- heading: 'npm'
+ heading: 'npm',
})
t.end()
@@ -106,7 +131,7 @@ t.test('setup with color=true, no unicode, and non-TTY terminal', t => {
loglevel: 'warn',
color: false,
progress: false,
- heading: 'asdf'
+ heading: 'asdf',
})), false)
t.strictSame(settings, {
@@ -114,7 +139,7 @@ t.test('setup with color=true, no unicode, and non-TTY terminal', t => {
color: false,
unicode: false,
progress: false,
- heading: 'asdf'
+ heading: 'asdf',
})
t.end()
@@ -137,7 +162,7 @@ t.test('setup with color=true, no unicode, and dumb TTY terminal', t => {
loglevel: 'warn',
color: true,
progress: false,
- heading: 'asdf'
+ heading: 'asdf',
})), true)
t.strictSame(settings, {
@@ -145,7 +170,7 @@ t.test('setup with color=true, no unicode, and dumb TTY terminal', t => {
color: true,
unicode: false,
progress: false,
- heading: 'asdf'
+ heading: 'asdf',
})
t.end()
@@ -168,7 +193,7 @@ t.test('setup with color=true, no unicode, and non-dumb TTY terminal', t => {
loglevel: 'warn',
color: true,
progress: true,
- heading: 'asdf'
+ heading: 'asdf',
})), true)
t.strictSame(settings, {
@@ -176,7 +201,7 @@ t.test('setup with color=true, no unicode, and non-dumb TTY terminal', t => {
color: true,
unicode: false,
progress: true,
- heading: 'asdf'
+ heading: 'asdf',
})
t.end()
@@ -199,7 +224,7 @@ t.test('setup with non-TTY stdout, TTY stderr', t => {
loglevel: 'warn',
color: true,
progress: true,
- heading: 'asdf'
+ heading: 'asdf',
})), false)
t.strictSame(settings, {
@@ -207,7 +232,7 @@ t.test('setup with non-TTY stdout, TTY stderr', t => {
color: true,
unicode: false,
progress: true,
- heading: 'asdf'
+ heading: 'asdf',
})
t.end()
@@ -229,7 +254,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => {
loglevel: 'warn',
color: true,
progress: true,
- heading: 'asdf'
+ heading: 'asdf',
})), true)
t.strictSame(settings, {
@@ -237,7 +262,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => {
color: false,
unicode: false,
progress: false,
- heading: 'asdf'
+ heading: 'asdf',
})
t.end()
@@ -246,7 +271,7 @@ t.test('setup with TTY stdout, non-TTY stderr', t => {
t.test('set loglevel to timing', t => {
setupLog(config({
timing: true,
- loglevel: 'notice'
+ loglevel: 'notice',
}))
t.equal(settings.level, 'timing')
t.end()
@@ -266,7 +291,7 @@ t.test('silent has no logging', t => {
process.env.TERM = 'totes not dum'
setupLog(config({
- loglevel: 'silent'
+ loglevel: 'silent',
}))
t.equal(settings.progress, false, 'progress disabled when silent')
t.end()
diff --git a/deps/npm/test/lib/utils/tar.js b/deps/npm/test/lib/utils/tar.js
index 827bc9262d..b780a73e5e 100644
--- a/deps/npm/test/lib/utils/tar.js
+++ b/deps/npm/test/lib/utils/tar.js
@@ -11,9 +11,9 @@ const printLogs = (tarball, unicode) => {
log: {
notice: (...args) => {
args.map(el => logs.push(el))
- }
+ },
},
- unicode
+ unicode,
})
return logs.join('\n')
}
@@ -24,19 +24,19 @@ test('should log tarball contents', async (t) => {
name: 'my-cool-pkg',
version: '1.0.0',
bundleDependencies: [
- 'bundle-dep'
- ]
+ 'bundle-dep',
+ ],
}, null, 2),
- 'node_modules': {
- 'bundle-dep': 'toto'
- }
+ node_modules: {
+ 'bundle-dep': 'toto',
+ },
})
const tarball = await pack(testDir)
const tarballContents = await getContents({
_id: '1',
name: 'my-cool-pkg',
- version: '1.0.0'
+ version: '1.0.0',
}, tarball)
t.matchSnapshot(printLogs(tarballContents, false))
@@ -44,36 +44,36 @@ test('should log tarball contents', async (t) => {
test('should log tarball contents with unicode', async (t) => {
const { logTar } = requireInject('../../../lib/utils/tar.js', {
- 'npmlog': {
- 'notice': (str) => {
+ npmlog: {
+ notice: (str) => {
t.ok(true, 'defaults to npmlog')
return str
- }
- }
+ },
+ },
})
-
- logTar({
- files: [],
+
+ logTar({
+ files: [],
bundled: [],
- integrity: ''
+ integrity: '',
}, { unicode: true })
t.end()
})
test('should default to npmlog', async (t) => {
const { logTar } = requireInject('../../../lib/utils/tar.js', {
- 'npmlog': {
- 'notice': (str) => {
+ npmlog: {
+ notice: (str) => {
t.ok(true, 'defaults to npmlog')
return str
- }
- }
+ },
+ },
})
logTar({
files: [],
bundled: [],
- integrity: ''
+ integrity: '',
})
t.end()
})
@@ -82,19 +82,19 @@ test('should getContents of a tarball', async (t) => {
const testDir = t.testdir({
'package.json': JSON.stringify({
name: 'my-cool-pkg',
- version: '1.0.0'
- }, null, 2)
+ version: '1.0.0',
+ }, null, 2),
})
const tarball = await pack(testDir)
const tarballContents = await getContents({
name: 'my-cool-pkg',
- version: '1.0.0'
+ version: '1.0.0',
}, tarball)
const integrity = await ssri.fromData(tarball, {
- algorithms: ['sha1', 'sha512']
+ algorithms: ['sha1', 'sha512'],
})
t.strictSame(tarballContents, {
@@ -106,10 +106,9 @@ test('should getContents of a tarball', async (t) => {
shasum: 'c0bfd67a5142104e429afda09119eedd6a30d2fc',
integrity: ssri.parse(integrity.sha512[0]),
filename: 'my-cool-pkg-1.0.0.tgz',
- files: [ { path: 'package.json', size: 49, mode: 420 } ],
+ files: [{ path: 'package.json', size: 49, mode: 420 }],
entryCount: 1,
- bundled: []
+ bundled: [],
}, 'contents are correct')
t.end()
-
})
diff --git a/deps/npm/test/lib/utils/unsupported.js b/deps/npm/test/lib/utils/unsupported.js
index 89ee6af232..f14cba9b74 100644
--- a/deps/npm/test/lib/utils/unsupported.js
+++ b/deps/npm/test/lib/utils/unsupported.js
@@ -30,7 +30,7 @@ const versions = [
['v10.0.0-0', false, false],
['v11.0.0-0', false, false],
['v12.0.0-0', false, false],
- ['v13.0.0-0', false, false]
+ ['v13.0.0-0', false, false],
]
test('versions', function (t) {
@@ -71,7 +71,7 @@ test('checkForBrokenNode', t => {
const expectLogs = [
'ERROR: npm is known not to run on Node.js 1.2.3',
"You'll need to upgrade to a newer Node.js version in order to use this",
- 'version of npm. You can find the latest version at https://nodejs.org/'
+ 'version of npm. You can find the latest version at https://nodejs.org/',
]
console.error = msg => logs.push(msg)
unsupported.checkForBrokenNode()
@@ -92,7 +92,7 @@ test('checkForUnsupportedNode', t => {
'npm does not support Node.js 8.0.0',
'You should probably upgrade to a newer version of node as we',
"can't make any promises that npm will work with this version.",
- 'You can find the latest version at https://nodejs.org/'
+ 'You can find the latest version at https://nodejs.org/',
]
npmlog.warn = (section, msg) => logs.push(msg)
diff --git a/deps/npm/test/lib/utils/update-notifier.js b/deps/npm/test/lib/utils/update-notifier.js
index 903e888a5e..99c9dfc266 100644
--- a/deps/npm/test/lib/utils/update-notifier.js
+++ b/deps/npm/test/lib/utils/update-notifier.js
@@ -22,15 +22,15 @@ const pacote = {
process.exit(1)
}
MANIFEST_REQUEST.push(spec)
- if (PACOTE_ERROR) {
+ if (PACOTE_ERROR)
throw PACOTE_ERROR
- }
+
return {
version: spec === 'npm@latest' ? CURRENT_VERSION
- : /-/.test(spec) ? CURRENT_BETA
- : NEXT_VERSION
+ : /-/.test(spec) ? CURRENT_BETA
+ : NEXT_VERSION,
}
- }
+ },
}
const npm = {
@@ -38,13 +38,12 @@ const npm = {
log: { useColor: () => true },
version: CURRENT_VERSION,
config: { get: (k) => k !== 'global' },
- flatOptions,
command: 'view',
- argv: ['npm']
+ argv: ['npm'],
}
const npmNoColor = {
...npm,
- log: { useColor: () => false }
+ log: { useColor: () => false },
}
const { basename } = require('path')
@@ -70,17 +69,15 @@ const fs = {
process.exit(1)
}
process.nextTick(() => cb(WRITE_ERROR))
- }
+ },
}
const updateNotifier = requireInject('../../../lib/utils/update-notifier.js', {
'@npmcli/ci-detect': () => ciMock,
pacote,
- fs
+ fs,
})
-const semver = require('semver')
-
t.afterEach(cb => {
MANIFEST_REQUEST.length = 0
STAT_ERROR = null
@@ -94,7 +91,7 @@ t.test('situations in which we do not notify', t => {
t.test('nothing to do if notifier disabled', async t => {
t.equal(await updateNotifier({
...npm,
- config: { get: (k) => k === 'update-notifier' ? false : true }
+ config: { get: (k) => k !== 'update-notifier' },
}), null)
t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests')
})
@@ -104,7 +101,7 @@ t.test('situations in which we do not notify', t => {
...npm,
flatOptions: { ...flatOptions, global: true },
command: 'install',
- argv: ['npm']
+ argv: ['npm'],
}), null)
t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests')
})
@@ -140,7 +137,9 @@ t.test('situations in which we do not notify', t => {
})
t.test('do not update in CI', async t => {
- t.teardown(() => { ciMock = null })
+ t.teardown(() => {
+ ciMock = null
+ })
ciMock = 'something'
t.equal(await updateNotifier(npm), null)
t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests')
@@ -148,14 +147,14 @@ t.test('situations in which we do not notify', t => {
t.test('only check weekly for GA releases', async t => {
// the 10 is fuzz factor for test environment
- STAT_MTIME = Date.now() - (1000*60*60*24*7) + 10
+ STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24 * 7) + 10
t.equal(await updateNotifier(npm), null)
t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests')
})
t.test('only check daily for betas', async t => {
// the 10 is fuzz factor for test environment
- STAT_MTIME = Date.now() - (1000*60*60*24) + 10
+ STAT_MTIME = Date.now() - (1000 * 60 * 60 * 24) + 10
t.equal(await updateNotifier({ ...npm, version: HAVE_BETA }), null)
t.strictSame(MANIFEST_REQUEST, [], 'no requests for manifests')
})
diff --git a/deps/npm/test/lib/view.js b/deps/npm/test/lib/view.js
index 88b2769a05..f3e5d97f33 100644
--- a/deps/npm/test/lib/view.js
+++ b/deps/npm/test/lib/view.js
@@ -13,227 +13,225 @@ const cleanLogs = (done) => {
}
const packument = (nv, opts) => {
- if (!opts.fullMetadata) {
+ if (!opts.fullMetadata)
throw new Error('must fetch fullMetadata')
- }
- if (!opts.preferOnline) {
+ if (!opts.preferOnline)
throw new Error('must fetch with preferOnline')
- }
const mocks = {
- 'red': {
- 'name' : 'red',
+ red: {
+ name: 'red',
'dist-tags': {
- '1.0.1': {}
+ '1.0.1': {},
+ },
+ time: {
+ unpublished: new Date(),
},
- 'time': {
- 'unpublished': new Date()
- }
},
- 'blue': {
- 'name': 'blue',
+ blue: {
+ name: 'blue',
'dist-tags': {},
- 'time': {
- '1.0.0': '2019-08-06T16:21:09.842Z'
+ time: {
+ '1.0.0': '2019-08-06T16:21:09.842Z',
},
- 'versions': {
+ versions: {
'1.0.0': {
- 'name': 'blue',
- 'version': '1.0.0',
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.blue.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
+ name: 'blue',
+ version: '1.0.0',
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.blue.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
+ },
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'cyan': {
- '_npmUser': {
- 'name': 'claudia',
- 'email': 'claudia@cyan.com'
- } ,
- 'name': 'cyan',
+ cyan: {
+ _npmUser: {
+ name: 'claudia',
+ email: 'claudia@cyan.com',
+ },
+ name: 'cyan',
'dist-tags': {},
- 'versions': {
+ versions: {
'1.0.0': {
- 'version': '1.0.0',
- 'name': 'cyan',
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.cyan.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
+ version: '1.0.0',
+ name: 'cyan',
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.cyan.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
+ },
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'brown': {
- 'name': 'brown'
+ brown: {
+ name: 'brown',
},
- 'yellow': {
- '_id': 'yellow',
- 'name': 'yellow',
- 'author': {
- 'name': 'foo',
- 'email': 'foo@yellow.com',
- 'twitter': 'foo'
+ yellow: {
+ _id: 'yellow',
+ name: 'yellow',
+ author: {
+ name: 'foo',
+ email: 'foo@yellow.com',
+ twitter: 'foo',
},
- 'readme': 'a very useful readme',
- 'versions': {
+ readme: 'a very useful readme',
+ versions: {
'1.0.0': {
- 'version': '1.0.0',
- 'author': 'claudia',
- 'readme': 'a very useful readme',
- 'maintainers': [
- { 'name': 'claudia', 'email': 'c@yellow.com', 'twitter': 'cyellow' },
- { 'name': 'isaacs', 'email': 'i@yellow.com', 'twitter': 'iyellow' }
- ]
+ version: '1.0.0',
+ author: 'claudia',
+ readme: 'a very useful readme',
+ maintainers: [
+ { name: 'claudia', email: 'c@yellow.com', twitter: 'cyellow' },
+ { name: 'isaacs', email: 'i@yellow.com', twitter: 'iyellow' },
+ ],
},
'1.0.1': {
- 'version': '1.0.1',
- 'author': 'claudia'
+ version: '1.0.1',
+ author: 'claudia',
},
'1.0.2': {
- 'version': '1.0.2',
- 'author': 'claudia'
- }
- }
+ version: '1.0.2',
+ author: 'claudia',
+ },
+ },
},
- 'purple': {
- 'name': 'purple',
- 'versions': {
+ purple: {
+ name: 'purple',
+ versions: {
'1.0.0': {
- 'foo': 1,
- 'maintainers': [
- { 'name': 'claudia' }
- ]
+ foo: 1,
+ maintainers: [
+ { name: 'claudia' },
+ ],
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'green': {
- '_id': 'green',
- 'name': 'green',
+ green: {
+ _id: 'green',
+ name: 'green',
'dist-tags': {
- 'latest': '1.0.0'
+ latest: '1.0.0',
},
- 'maintainers': [
- { 'name': 'claudia', 'email': 'c@yellow.com', 'twitter': 'cyellow' },
- { 'name': 'isaacs', 'email': 'i@yellow.com', 'twitter': 'iyellow' }
+ maintainers: [
+ { name: 'claudia', email: 'c@yellow.com', twitter: 'cyellow' },
+ { name: 'isaacs', email: 'i@yellow.com', twitter: 'iyellow' },
],
- 'keywords': ['colors', 'green', 'crayola'],
- 'versions': {
+ keywords: ['colors', 'green', 'crayola'],
+ versions: {
'1.0.0': {
- '_id': 'green',
- 'version': '1.0.0',
- 'description': 'green is a very important color',
- 'bugs': {
- 'url': 'http://bugs.green.com'
+ _id: 'green',
+ version: '1.0.0',
+ description: 'green is a very important color',
+ bugs: {
+ url: 'http://bugs.green.com',
+ },
+ deprecated: true,
+ repository: {
+ url: 'http://repository.green.com',
},
- 'deprecated': true,
- 'repository': {
- 'url': 'http://repository.green.com'
+ license: { type: 'ACME' },
+ bin: {
+ green: 'bin/green.js',
},
- 'license': { type: 'ACME' },
- 'bin': {
- 'green': 'bin/green.js'
+ dependencies: {
+ red: '1.0.0',
+ yellow: '1.0.0',
},
- 'dependencies': {
- 'red': '1.0.0',
- 'yellow': '1.0.0'
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.green.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
},
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.green.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'black': {
- 'name': 'black',
+ black: {
+ name: 'black',
'dist-tags': {
- 'latest': '1.0.0'
+ latest: '1.0.0',
},
- 'versions': {
+ versions: {
'1.0.0': {
- 'version': '1.0.0',
- 'bugs': 'http://bugs.black.com',
- 'license': {},
- 'dependencies': (() => {
+ version: '1.0.0',
+ bugs: 'http://bugs.black.com',
+ license: {},
+ dependencies: (() => {
const deps = {}
- for (i = 0; i < 25; i++) {
+ for (let i = 0; i < 25; i++)
deps[i] = '1.0.0'
- }
+
return deps
})(),
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.black.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.black.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
+ },
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'pink': {
- 'name': 'pink',
+ pink: {
+ name: 'pink',
'dist-tags': {
- 'latest': '1.0.0'
+ latest: '1.0.0',
},
- 'versions': {
+ versions: {
'1.0.0': {
- 'version': '1.0.0',
- 'maintainers': [
- { 'name': 'claudia', 'url': 'http://c.pink.com' },
- { 'name': 'isaacs', 'url': 'http://i.pink.com' }
+ version: '1.0.0',
+ maintainers: [
+ { name: 'claudia', url: 'http://c.pink.com' },
+ { name: 'isaacs', url: 'http://i.pink.com' },
],
- 'repository': 'http://repository.pink.com',
- 'license': {},
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.pink.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
+ repository: 'http://repository.pink.com',
+ license: {},
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.pink.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
+ },
},
- '1.0.1': {}
- }
+ '1.0.1': {},
+ },
},
- 'orange': {
- 'name': 'orange',
+ orange: {
+ name: 'orange',
'dist-tags': {
- 'latest': '1.0.0'
+ latest: '1.0.0',
},
- 'versions': {
+ versions: {
'1.0.0': {
- 'version': '1.0.0',
- 'homepage': 'http://hm.orange.com',
- 'license': {},
- 'dist': {
- 'shasum': '123',
- 'tarball': 'http://hm.orange.com/1.0.0.tgz',
- 'integrity': '---',
- 'fileCount': 1,
- 'unpackedSize': 1
- }
+ version: '1.0.0',
+ homepage: 'http://hm.orange.com',
+ license: {},
+ dist: {
+ shasum: '123',
+ tarball: 'http://hm.orange.com/1.0.0.tgz',
+ integrity: '---',
+ fileCount: 1,
+ unpackedSize: 1,
+ },
},
- '1.0.1': {}
- }
- }
+ '1.0.1': {},
+ },
+ },
}
return mocks[nv.name]
}
@@ -244,34 +242,34 @@ t.test('should log package info', t => {
'../../lib/npm.js': {
flatOptions: {
global: false,
- }
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
const viewJson = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
flatOptions: {
- json: true
- }
+ json: true,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
const viewUnicode = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
flatOptions: {
global: false,
- unicode: true
- }
+ unicode: true,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
t.test('package with license, bugs, repository and other fields', t => {
@@ -344,8 +342,8 @@ t.test('should log info of package in current working dir', t => {
const testDir = t.testdir({
'package.json': JSON.stringify({
name: 'blue',
- version: '1.0.0'
- }, null, 2)
+ version: '1.0.0',
+ }, null, 2),
})
const view = requireInject('../../lib/view.js', {
@@ -353,12 +351,12 @@ t.test('should log info of package in current working dir', t => {
prefix: testDir,
flatOptions: {
defaultTag: '1.0.0',
- global: false
- }
+ global: false,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
t.test('specific version', t => {
@@ -383,23 +381,23 @@ t.test('should log info by field name', t => {
'../../lib/npm.js': {
flatOptions: {
json: true,
- global: false
- }
+ global: false,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
const view = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
flatOptions: {
- global: false
- }
+ global: false,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
t.test('readme', t => {
@@ -452,14 +450,14 @@ t.test('should log info by field name', t => {
})
t.test('array field - 1 element', t => {
- view(['purple@1.0.0', 'maintainers.name'], () => {
+ view(['purple@1.0.0', 'maintainers.name'], () => {
t.matchSnapshot(logs)
t.end()
})
})
t.test('array field - 2 elements', t => {
- view(['yellow@1.x.x', 'maintainers.name'], () => {
+ view(['yellow@1.x.x', 'maintainers.name'], () => {
t.matchSnapshot(logs)
t.end()
})
@@ -472,9 +470,9 @@ t.test('throw error if global mode', (t) => {
const view = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
flatOptions: {
- global: true
- }
- }
+ global: true,
+ },
+ },
})
view([], (err) => {
t.equals(err.message, 'Cannot use view command in global mode.')
@@ -489,9 +487,9 @@ t.test('throw ENOENT error if package.json misisng', (t) => {
'../../lib/npm.js': {
prefix: testDir,
flatOptions: {
- global: false
- }
- }
+ global: false,
+ },
+ },
})
view([], (err) => {
t.match(err, { code: 'ENOENT' })
@@ -501,16 +499,16 @@ t.test('throw ENOENT error if package.json misisng', (t) => {
t.test('throw EJSONPARSE error if package.json not json', (t) => {
const testDir = t.testdir({
- 'package.json': 'not json, nope, not even a little bit!'
+ 'package.json': 'not json, nope, not even a little bit!',
})
const view = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
prefix: testDir,
flatOptions: {
- global: false
- }
- }
+ global: false,
+ },
+ },
})
view([], (err) => {
t.match(err, { code: 'EJSONPARSE' })
@@ -520,16 +518,16 @@ t.test('throw EJSONPARSE error if package.json not json', (t) => {
t.test('throw error if package.json has no name', (t) => {
const testDir = t.testdir({
- 'package.json': '{}'
+ 'package.json': '{}',
})
const view = requireInject('../../lib/view.js', {
'../../lib/npm.js': {
prefix: testDir,
flatOptions: {
- global: false
- }
- }
+ global: false,
+ },
+ },
})
view([], (err) => {
t.equals(err.message, 'Invalid package.json, no "name" field')
@@ -542,12 +540,12 @@ t.test('throws when unpublished', (t) => {
'../../lib/npm.js': {
flatOptions: {
defaultTag: '1.0.1',
- global: false
- }
+ global: false,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
view(['red'], (err) => {
t.equals(err.code, 'E404')
@@ -560,16 +558,18 @@ t.test('completion', (t) => {
'../../lib/npm.js': {
flatOptions: {
defaultTag: '1.0.1',
- global: false
- }
+ global: false,
+ },
+ },
+ pacote: {
+ packument,
},
- 'pacote': {
- packument
- }
})
view.completion({
- conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } }
+ conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } },
}, (err, res) => {
+ if (err)
+ throw err
t.ok(res, 'returns back fields')
t.end()
})
@@ -580,14 +580,13 @@ t.test('no registry completion', (t) => {
'../../lib/npm.js': {
flatOptions: {
defaultTag: '1.0.1',
- }
- }
+ },
+ },
})
view.completion({
- conf: { argv: { remain: ['npm', 'view'] } }
+ conf: { argv: { remain: ['npm', 'view'] } },
}, (err) => {
t.notOk(err, 'there is no package completion')
t.end()
})
})
-
diff --git a/deps/npm/test/lib/whoami.js b/deps/npm/test/lib/whoami.js
index dc9dbdfd57..d54814db36 100644
--- a/deps/npm/test/lib/whoami.js
+++ b/deps/npm/test/lib/whoami.js
@@ -8,7 +8,7 @@ test('whoami', (t) => {
'../../lib/npm.js': { flatOptions: {} },
'../../lib/utils/output.js': (output) => {
t.equal(output, 'foo', 'should output the username')
- }
+ },
})
whoami([], (err) => {
@@ -24,7 +24,7 @@ test('whoami json', (t) => {
'../../lib/npm.js': { flatOptions: { json: true } },
'../../lib/utils/output.js': (output) => {
t.equal(output, '"foo"', 'should output the username as json')
- }
+ },
})
whoami([], (err) => {