summaryrefslogtreecommitdiff
path: root/deps/npm/lib
diff options
context:
space:
mode:
authorKat Marchán <kzm@sykosomatic.org>2017-12-07 14:05:23 -0800
committerMyles Borins <mylesborins@google.com>2018-01-19 11:32:08 -0500
commitd3b1c971bcf0177b17c649c3aeca1a94cbc3fff5 (patch)
tree321928c015be00cdbe11715297d2d2fc45802263 /deps/npm/lib
parentbfe41fe88e7421f441067a79fb7512cf5935a2bb (diff)
downloadandroid-node-v8-d3b1c971bcf0177b17c649c3aeca1a94cbc3fff5.tar.gz
android-node-v8-d3b1c971bcf0177b17c649c3aeca1a94cbc3fff5.tar.bz2
android-node-v8-d3b1c971bcf0177b17c649c3aeca1a94cbc3fff5.zip
deps: upgrade npm to 5.6.0
PR-URL: https://github.com/nodejs/node/pull/17777 Reviewed-By: Michaël Zasso <targos@protonmail.com> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Gibson Fahnestock <gibfahn@gmail.com>
Diffstat (limited to 'deps/npm/lib')
-rw-r--r--deps/npm/lib/auth/legacy.js9
-rw-r--r--deps/npm/lib/build.js130
-rw-r--r--deps/npm/lib/cache.js3
-rw-r--r--deps/npm/lib/config/bin-links.js32
-rw-r--r--deps/npm/lib/config/core.js1
-rw-r--r--deps/npm/lib/config/defaults.js4
-rw-r--r--deps/npm/lib/config/fetch-opts.js77
-rw-r--r--deps/npm/lib/config/find-prefix.js56
-rw-r--r--deps/npm/lib/config/gentle-fs.js32
-rw-r--r--deps/npm/lib/config/lifecycle.js1
-rw-r--r--deps/npm/lib/config/load-prefix.js8
-rw-r--r--deps/npm/lib/install.js103
-rw-r--r--deps/npm/lib/install/action/build.js2
-rw-r--r--deps/npm/lib/install/action/extract.js3
-rw-r--r--deps/npm/lib/install/action/fetch.js6
-rw-r--r--deps/npm/lib/install/action/finalize.js21
-rw-r--r--deps/npm/lib/install/action/refresh-package-json.js2
-rw-r--r--deps/npm/lib/install/actions.js13
-rw-r--r--deps/npm/lib/install/deps.js63
-rw-r--r--deps/npm/lib/install/diff-trees.js152
-rw-r--r--deps/npm/lib/install/inflate-shrinkwrap.js7
-rw-r--r--deps/npm/lib/install/realize-shrinkwrap-specifier.js3
-rw-r--r--deps/npm/lib/install/save.js2
-rw-r--r--deps/npm/lib/link.js2
-rw-r--r--deps/npm/lib/npm.js10
-rw-r--r--deps/npm/lib/pack.js17
-rw-r--r--deps/npm/lib/profile.js42
-rw-r--r--deps/npm/lib/publish.js22
-rw-r--r--deps/npm/lib/shrinkwrap.js5
-rw-r--r--deps/npm/lib/token.js10
-rw-r--r--deps/npm/lib/update.js52
-rw-r--r--deps/npm/lib/utils/convert-line-endings.js49
-rw-r--r--deps/npm/lib/utils/error-message.js86
-rw-r--r--deps/npm/lib/utils/gently-rm.js271
-rw-r--r--deps/npm/lib/utils/is-hashbang-file.js19
-rw-r--r--deps/npm/lib/utils/is-registry.js11
-rw-r--r--deps/npm/lib/utils/link.js71
-rw-r--r--deps/npm/lib/view.js2
38 files changed, 575 insertions, 824 deletions
diff --git a/deps/npm/lib/auth/legacy.js b/deps/npm/lib/auth/legacy.js
index 4c75ca6731..92bf44c119 100644
--- a/deps/npm/lib/auth/legacy.js
+++ b/deps/npm/lib/auth/legacy.js
@@ -4,6 +4,8 @@ const profile = require('npm-profile')
const log = require('npmlog')
const npm = require('../npm.js')
const output = require('../utils/output.js')
+const pacoteOpts = require('../config/pacote')
+const fetchOpts = require('../config/fetch-opts')
module.exports.login = function login (creds, registry, scope, cb) {
let username = creds.username || ''
@@ -22,10 +24,13 @@ module.exports.login = function login (creds, registry, scope, cb) {
email = e
return profile.login(username, password, {registry: registry, auth: auth}).catch((err) => {
if (err.code === 'EOTP') throw err
- return profile.adduser(username, email, password, {registry: registry})
+ return profile.adduser(username, email, password, {
+ registry: registry,
+ opts: fetchOpts.fromPacote(pacoteOpts())
+ })
}).catch((err) => {
if (err.code === 'EOTP' && !auth.otp) {
- return read.otp('Authenicator provided OTP:').then((otp) => {
+ return read.otp('Authenticator provided OTP:').then((otp) => {
auth.otp = otp
return profile.login(username, password, {registry: registry, auth: auth})
})
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
index 6a788bc857..395f9437b4 100644
--- a/deps/npm/lib/build.js
+++ b/deps/npm/lib/build.js
@@ -10,21 +10,14 @@
var npm = require('./npm.js')
var log = require('npmlog')
var chain = require('slide').chain
-var fs = require('graceful-fs')
var path = require('path')
+var fs = require('graceful-fs')
var lifecycle = require('./utils/lifecycle.js')
var readJson = require('read-package-json')
-var link = require('./utils/link.js')
-var linkIfExists = link.ifExists
-var cmdShim = require('cmd-shim')
-var cmdShimIfExists = cmdShim.ifExists
-var isHashbangFile = require('./utils/is-hashbang-file.js')
-var dos2Unix = require('./utils/convert-line-endings.js').dos2Unix
-var asyncMap = require('slide').asyncMap
+var binLinks = require('bin-links')
+var binLinksConfig = require('./config/bin-links.js')
var ini = require('ini')
var writeFile = require('write-file-atomic')
-var packageId = require('./utils/package-id.js')
-var output = require('./utils/output.js')
module.exports = build
build.usage = 'npm build [<folder>]'
@@ -74,7 +67,8 @@ function build_ (global, didPre, didRB) {
if (er) return cb(er)
chain([
!didPre && [lifecycle, pkg, 'preinstall', folder],
- [linkStuff, pkg, folder, global, didRB],
+ [linkStuff, pkg, folder, global],
+ !didRB && [rebuildBundles, pkg, folder],
[writeBuiltinConf, pkg, folder],
didPre !== build._noLC && [lifecycle, pkg, 'install', folder],
didPre !== build._noLC && [lifecycle, pkg, 'postinstall', folder]
@@ -102,35 +96,13 @@ var writeBuiltinConf = build.writeBuiltinConf = function (pkg, folder, cb) {
writeFile(path.resolve(folder, 'npmrc'), data, cb)
}
-var linkStuff = build.linkStuff = function (pkg, folder, global, didRB, cb) {
+var linkStuff = build.linkStuff = function (pkg, folder, global, cb) {
// allow to opt out of linking binaries.
if (npm.config.get('bin-links') === false) return cb()
-
- // if it's global, and folder is in {prefix}/node_modules,
- // then bins are in {prefix}/bin
- // otherwise, then bins are in folder/../.bin
- var parent = pkg.name && pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
- var gnm = global && npm.globalDir
- var gtop = parent === gnm
-
- log.info('linkStuff', packageId(pkg))
- log.silly('linkStuff', packageId(pkg), 'has', parent, 'as its parent node_modules')
- if (global) log.silly('linkStuff', packageId(pkg), 'is part of a global install')
- if (gnm) log.silly('linkStuff', packageId(pkg), 'is installed into a global node_modules')
- if (gtop) log.silly('linkStuff', packageId(pkg), 'is installed into the top-level global node_modules')
-
- asyncMap(
- [linkBins, linkMans, !didRB && rebuildBundles],
- function (fn, cb) {
- if (!fn) return cb()
- log.verbose(fn.name, packageId(pkg))
- fn(pkg, folder, parent, gtop, cb)
- },
- cb
- )
+ return binLinks(pkg, folder, global, binLinksConfig(pkg), cb)
}
-function rebuildBundles (pkg, folder, parent, gtop, cb) {
+function rebuildBundles (pkg, folder, cb) {
if (!npm.config.get('rebuild-bundle')) return cb()
var deps = Object.keys(pkg.dependencies || {})
@@ -166,89 +138,3 @@ function rebuildBundles (pkg, folder, parent, gtop, cb) {
}), cb)
})
}
-
-function linkBins (pkg, folder, parent, gtop, cb) {
- if (!pkg.bin || !gtop && path.basename(parent) !== 'node_modules') {
- return cb()
- }
- var binRoot = gtop ? npm.globalBin
- : path.resolve(parent, '.bin')
- log.verbose('linkBins', [pkg.bin, binRoot, gtop])
-
- asyncMap(Object.keys(pkg.bin), function (b, cb) {
- linkBin(
- path.resolve(folder, pkg.bin[b]),
- path.resolve(binRoot, b),
- gtop && folder,
- function (er) {
- if (er) return cb(er)
- // bins should always be executable.
- // XXX skip chmod on windows?
- var src = path.resolve(folder, pkg.bin[b])
- fs.chmod(src, npm.modes.exec, function (er) {
- if (er && er.code === 'ENOENT' && npm.config.get('ignore-scripts')) {
- return cb()
- }
- if (er) return cb(er)
- isHashbangFile(src).then((isHashbang) => {
- if (isHashbang) return dos2Unix(src)
- }).then(() => {
- if (!gtop) return cb()
- var dest = path.resolve(binRoot, b)
- var out = npm.config.get('parseable')
- ? dest + '::' + src + ':BINFILE'
- : dest + ' -> ' + src
- if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
- cb()
- }).catch(cb)
- })
- }
- )
- }, cb)
-}
-
-function linkBin (from, to, gently, cb) {
- if (process.platform !== 'win32') {
- return linkIfExists(from, to, gently, cb)
- } else {
- return cmdShimIfExists(from, to, cb)
- }
-}
-
-function linkMans (pkg, folder, parent, gtop, cb) {
- if (!pkg.man || !gtop || process.platform === 'win32') return cb()
-
- var manRoot = path.resolve(npm.config.get('prefix'), 'share', 'man')
- log.verbose('linkMans', 'man files are', pkg.man, 'in', manRoot)
-
- // make sure that the mans are unique.
- // otherwise, if there are dupes, it'll fail with EEXIST
- var set = pkg.man.reduce(function (acc, man) {
- acc[path.basename(man)] = man
- return acc
- }, {})
- pkg.man = pkg.man.filter(function (man) {
- return set[path.basename(man)] === man
- })
-
- asyncMap(pkg.man, function (man, cb) {
- if (typeof man !== 'string') return cb()
- log.silly('linkMans', 'preparing to link', man)
- var parseMan = man.match(/(.*\.([0-9]+)(\.gz)?)$/)
- if (!parseMan) {
- return cb(new Error(
- man + ' is not a valid name for a man file. ' +
- 'Man files must end with a number, ' +
- 'and optionally a .gz suffix if they are compressed.'
- ))
- }
-
- var stem = parseMan[1]
- var sxn = parseMan[2]
- var bn = path.basename(stem)
- var manSrc = path.resolve(folder, man)
- var manDest = path.join(manRoot, 'man' + sxn, bn)
-
- linkIfExists(manSrc, manDest, gtop && folder, cb)
- }, cb)
-}
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js
index af1ac57e71..8bd2d5fcb1 100644
--- a/deps/npm/lib/cache.js
+++ b/deps/npm/lib/cache.js
@@ -4,6 +4,7 @@ const BB = require('bluebird')
const assert = require('assert')
const cacache = require('cacache')
+const finished = BB.promisify(require('mississippi').finished)
const log = require('npmlog')
const npa = require('npm-package-arg')
const npm = require('./npm.js')
@@ -105,7 +106,7 @@ function add (args, where) {
log.verbose('cache add', 'spec', spec)
if (!spec) return BB.reject(new Error(usage))
log.silly('cache add', 'parsed spec', spec)
- return pacote.prefetch(spec, pacoteOpts({where}))
+ return finished(pacote.tarball.stream(spec, pacoteOpts({where})).resume())
}
cache.verify = verify
diff --git a/deps/npm/lib/config/bin-links.js b/deps/npm/lib/config/bin-links.js
new file mode 100644
index 0000000000..1ee90cc35e
--- /dev/null
+++ b/deps/npm/lib/config/bin-links.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const npm = require('../npm.js')
+var packageId = require('../utils/package-id.js')
+const log = require('npmlog')
+
+module.exports = binLinksOpts
+
+function binLinksOpts (pkg) {
+ return {
+ ignoreScripts: npm.config.get('ignore-scripts'),
+ force: npm.config.get('force'),
+ globalBin: npm.globalBin,
+ globalDir: npm.globalDir,
+ json: npm.config.get('json'),
+ log: log,
+ name: 'npm',
+ parseable: npm.config.get('parseable'),
+ pkgId: packageId(pkg),
+ prefix: npm.config.get('prefix'),
+ prefixes: [
+ npm.prefix,
+ npm.globalPrefix,
+ npm.dir,
+ npm.root,
+ npm.globalDir,
+ npm.bin,
+ npm.globalBin
+ ],
+ umask: npm.config.get('umask')
+ }
+}
diff --git a/deps/npm/lib/config/core.js b/deps/npm/lib/config/core.js
index 15a1674b3d..50cf4772e7 100644
--- a/deps/npm/lib/config/core.js
+++ b/deps/npm/lib/config/core.js
@@ -225,7 +225,6 @@ Conf.prototype.loadPrefix = require('./load-prefix.js')
Conf.prototype.loadCAFile = require('./load-cafile.js')
Conf.prototype.loadUid = require('./load-uid.js')
Conf.prototype.setUser = require('./set-user.js')
-Conf.prototype.findPrefix = require('./find-prefix.js')
Conf.prototype.getCredentialsByURI = require('./get-credentials-by-uri.js')
Conf.prototype.setCredentialsByURI = require('./set-credentials-by-uri.js')
Conf.prototype.clearCredentialsByURI = require('./clear-credentials-by-uri.js')
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index 35617fd638..c049f213fa 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -176,6 +176,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
maxsockets: 50,
message: '%s',
'metrics-registry': null,
+ 'node-options': null,
'node-version': process.version,
'offline': false,
'onload-script': false,
@@ -183,6 +184,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
optional: true,
otp: null,
'package-lock': true,
+ 'package-lock-only': false,
parseable: false,
'prefer-offline': false,
'prefer-online': false,
@@ -308,6 +310,7 @@ exports.types = {
maxsockets: Number,
message: String,
'metrics-registry': [null, String],
+ 'node-options': [null, String],
'node-version': [null, semver],
offline: Boolean,
'onload-script': [null, String],
@@ -315,6 +318,7 @@ exports.types = {
optional: Boolean,
'package-lock': Boolean,
otp: Number,
+ 'package-lock-only': Boolean,
parseable: Boolean,
'prefer-offline': Boolean,
'prefer-online': Boolean,
diff --git a/deps/npm/lib/config/fetch-opts.js b/deps/npm/lib/config/fetch-opts.js
new file mode 100644
index 0000000000..1a030c378e
--- /dev/null
+++ b/deps/npm/lib/config/fetch-opts.js
@@ -0,0 +1,77 @@
+'use strict'
+
+const url = require('url')
+
+module.exports.fromPacote = fromPacote
+
+function fromPacote (opts) {
+ return {
+ cache: getCacheMode(opts),
+ cacheManager: opts.cache,
+ ca: opts.ca,
+ cert: opts.cert,
+ headers: getHeaders('', opts.registry, opts),
+ key: opts.key,
+ localAddress: opts.localAddress,
+ maxSockets: opts.maxSockets,
+ proxy: opts.proxy,
+ referer: opts.refer,
+ retry: opts.retry,
+ strictSSL: !!opts.strictSSL,
+ timeout: opts.timeout,
+ uid: opts.uid,
+ gid: opts.gid
+ }
+}
+
+function getCacheMode (opts) {
+ return opts.offline
+ ? 'only-if-cached'
+ : opts.preferOffline
+ ? 'force-cache'
+ : opts.preferOnline
+ ? 'no-cache'
+ : 'default'
+}
+
+function getHeaders (uri, registry, opts) {
+ const headers = Object.assign({
+ 'npm-in-ci': opts.isFromCI,
+ 'npm-scope': opts.projectScope,
+ 'npm-session': opts.npmSession,
+ 'user-agent': opts.userAgent,
+ 'referer': opts.refer
+ }, opts.headers)
+ // check for auth settings specific to this registry
+ let auth = (
+ opts.auth &&
+ opts.auth[registryKey(registry)]
+ ) || opts.auth
+ // If a tarball is hosted on a different place than the manifest, only send
+ // credentials on `alwaysAuth`
+ const shouldAuth = auth && (
+ auth.alwaysAuth ||
+ url.parse(uri).host === url.parse(registry).host
+ )
+ if (shouldAuth && auth.token) {
+ headers.authorization = `Bearer ${auth.token}`
+ } else if (shouldAuth && auth.username && auth.password) {
+ const encoded = Buffer.from(
+ `${auth.username}:${auth.password}`, 'utf8'
+ ).toString('base64')
+ headers.authorization = `Basic ${encoded}`
+ } else if (shouldAuth && auth._auth) {
+ headers.authorization = `Basic ${auth._auth}`
+ }
+ return headers
+}
+
+function registryKey (registry) {
+ const parsed = url.parse(registry)
+ const formatted = url.format({
+ host: parsed.host,
+ pathname: parsed.pathname,
+ slashes: parsed.slashes
+ })
+ return url.resolve(formatted, '.')
+}
diff --git a/deps/npm/lib/config/find-prefix.js b/deps/npm/lib/config/find-prefix.js
deleted file mode 100644
index 58f5cc8040..0000000000
--- a/deps/npm/lib/config/find-prefix.js
+++ /dev/null
@@ -1,56 +0,0 @@
-// try to find the most reasonable prefix to use
-
-module.exports = findPrefix
-
-var fs = require('fs')
-var path = require('path')
-
-function findPrefix (p, cb_) {
- function cb (er, p) {
- process.nextTick(function () {
- cb_(er, p)
- })
- }
-
- p = path.resolve(p)
- // if there's no node_modules folder, then
- // walk up until we hopefully find one.
- // if none anywhere, then use cwd.
- var walkedUp = false
- while (path.basename(p) === 'node_modules') {
- p = path.dirname(p)
- walkedUp = true
- }
- if (walkedUp) return cb(null, p)
-
- findPrefix_(p, p, cb)
-}
-
-function findPrefix_ (p, original, cb) {
- if (p === '/' ||
- (process.platform === 'win32' && p.match(/^[a-zA-Z]:(\\|\/)?$/))) {
- return cb(null, original)
- }
- fs.readdir(p, function (er, files) {
- // an error right away is a bad sign.
- // unless the prefix was simply a non
- // existent directory.
- if (er && p === original) {
- if (er.code === 'ENOENT') return cb(null, original)
- return cb(er)
- }
-
- // walked up too high or something.
- if (er) return cb(null, original)
-
- if (files.indexOf('node_modules') !== -1 ||
- files.indexOf('package.json') !== -1) {
- return cb(null, p)
- }
-
- var d = path.dirname(p)
- if (d === p) return cb(null, original)
-
- return findPrefix_(d, original, cb)
- })
-}
diff --git a/deps/npm/lib/config/gentle-fs.js b/deps/npm/lib/config/gentle-fs.js
new file mode 100644
index 0000000000..c4a1f9fa22
--- /dev/null
+++ b/deps/npm/lib/config/gentle-fs.js
@@ -0,0 +1,32 @@
+'use strict'
+
+const npm = require('../npm.js')
+const log = require('npmlog')
+
+module.exports = gentleFSOpts
+
+function gentleFSOpts (gently, base, abs) {
+ return {
+ // never rm the root, prefix, or bin dirs
+ //
+ // globals included because of `npm link` -- as far as the package
+ // requesting the link is concerned, the linked package is always
+ // installed globally
+ prefixes: [
+ npm.prefix,
+ npm.globalPrefix,
+ npm.dir,
+ npm.root,
+ npm.globalDir,
+ npm.bin,
+ npm.globalBin
+ ],
+ absolute: abs,
+ log: log,
+ prefix: npm.prefix,
+ force: npm.config.get('force'),
+ gently: gently,
+ base: base,
+ name: 'npm'
+ }
+}
diff --git a/deps/npm/lib/config/lifecycle.js b/deps/npm/lib/config/lifecycle.js
index 5fca93939d..86941edcd8 100644
--- a/deps/npm/lib/config/lifecycle.js
+++ b/deps/npm/lib/config/lifecycle.js
@@ -18,6 +18,7 @@ function lifecycleOpts (moreOpts) {
ignorePrepublish: npm.config.get('ignore-prepublish'),
ignoreScripts: npm.config.get('ignore-scripts'),
log: log,
+ nodeOptions: npm.config.get('node-options'),
production: npm.config.get('production'),
scriptShell: npm.config.get('script-shell'),
scriptsPrependNodePath: npm.config.get('scripts-prepend-node-path'),
diff --git a/deps/npm/lib/config/load-prefix.js b/deps/npm/lib/config/load-prefix.js
index bb5d9f3be5..c2af00c7f6 100644
--- a/deps/npm/lib/config/load-prefix.js
+++ b/deps/npm/lib/config/load-prefix.js
@@ -1,6 +1,6 @@
module.exports = loadPrefix
-var findPrefix = require('./find-prefix.js')
+var findPrefix = require('find-npm-prefix')
var path = require('path')
function loadPrefix (cb) {
@@ -43,9 +43,9 @@ function loadPrefix (cb) {
p = path.resolve(cli.prefix)
process.nextTick(cb)
} else {
- findPrefix(process.cwd(), function (er, found) {
+ findPrefix(process.cwd()).then((found) => {
p = found
- cb(er)
- })
+ cb()
+ }, cb)
}
}
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 029f806f83..42906f2394 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -106,6 +106,8 @@ var mkdirp = require('mkdirp')
var rimraf = require('rimraf')
var iferr = require('iferr')
var validate = require('aproba')
+var uniq = require('lodash.uniq')
+var Bluebird = require('bluebird')
// npm internal utils
var npm = require('./npm.js')
@@ -132,6 +134,7 @@ var decomposeActions = require('./install/decompose-actions.js')
var validateTree = require('./install/validate-tree.js')
var validateArgs = require('./install/validate-args.js')
var saveRequested = require('./install/save.js').saveRequested
+var saveShrinkwrap = require('./install/save.js').saveShrinkwrap
var getSaveType = require('./install/save.js').getSaveType
var doSerialActions = require('./install/actions.js').doSerial
var doReverseSerialActions = require('./install/actions.js').doReverseSerial
@@ -199,8 +202,9 @@ function install (where, args, cb) {
new Installer(where, dryrun, args).run(cb)
}
-function Installer (where, dryrun, args) {
- validate('SBA', arguments)
+function Installer (where, dryrun, args, opts) {
+ validate('SBA|SBAO', arguments)
+ if (!opts) opts = {}
this.where = where
this.dryrun = dryrun
this.args = args
@@ -215,23 +219,40 @@ function Installer (where, dryrun, args) {
this.progress = {}
this.noPackageJsonOk = !!args.length
this.topLevelLifecycles = !args.length
- this.dev = npm.config.get('dev') || (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || /^dev(elopment)?$/.test(npm.config.get('only'))
- this.prod = !/^dev(elopment)?$/.test(npm.config.get('only'))
- this.rollback = npm.config.get('rollback')
- this.link = npm.config.get('link')
- this.global = this.where === path.resolve(npm.globalDir, '..')
+
+ const dev = npm.config.get('dev')
+ const only = npm.config.get('only')
+ const onlyProd = /^prod(uction)?$/.test(only)
+ const onlyDev = /^dev(elopment)?$/.test(only)
+ const prod = npm.config.get('production')
+ this.dev = opts.dev != null ? opts.dev : dev || (!onlyProd && !prod) || onlyDev
+ this.prod = opts.prod != null ? opts.prod : !onlyDev
+
+ this.packageLockOnly = opts.packageLockOnly != null
+ ? opts.packageLockOnly : npm.config.get('package-lock-only')
+ this.rollback = opts.rollback != null ? opts.rollback : npm.config.get('rollback')
+ this.link = opts.link != null ? opts.link : npm.config.get('link')
+ this.saveOnlyLock = opts.saveOnlyLock
+ this.global = opts.global != null ? opts.global : this.where === path.resolve(npm.globalDir, '..')
this.started = Date.now()
}
Installer.prototype = {}
Installer.prototype.run = function (_cb) {
- validate('F', arguments)
-
- var cb = function (err) {
- saveMetrics(!err)
- return _cb.apply(this, arguments)
+ validate('F|', arguments)
+
+ var result
+ var cb
+ if (_cb) {
+ cb = function (err) {
+ saveMetrics(!err)
+ return _cb.apply(this, arguments)
+ }
+ } else {
+ result = new Promise((resolve, reject) => {
+ cb = (err, value) => err ? reject(err) : resolve(value)
+ })
}
-
// FIXME: This is bad and I should feel bad.
// lib/install needs to have some way of sharing _limited_
// state with the things it calls. Passing the object is too
@@ -274,7 +295,11 @@ Installer.prototype.run = function (_cb) {
[this, this.debugActions, 'diffTrees', 'differences'],
[this, this.debugActions, 'decomposeActions', 'todo'])
- if (!this.dryrun) {
+
+ if (this.packageLockOnly) {
+ postInstallSteps.push(
+ [this, this.saveToDependencies])
+ } else if (!this.dryrun) {
installSteps.push(
[this.newTracker(log, 'executeActions', 8)],
[this, this.executeActions],
@@ -296,6 +321,7 @@ Installer.prototype.run = function (_cb) {
// until after we extract them
[this, (next) => { computeMetadata(this.idealTree); next() }],
[this, this.pruneIdealTree],
+ [this, this.debugLogicalTree, 'saveTree', 'idealTree'],
[this, this.saveToDependencies])
}
}
@@ -319,6 +345,7 @@ Installer.prototype.run = function (_cb) {
cb(installEr || postInstallEr, self.getInstalledModules(), self.idealTree)
})
})
+ return result
}
Installer.prototype.loadArgMetadata = function (next) {
@@ -544,17 +571,15 @@ Installer.prototype.executeActions = function (cb) {
Installer.prototype.rollbackFailedOptional = function (staging, actionsToRun, cb) {
if (!this.rollback) return cb()
- var failed = actionsToRun.map(function (action) {
+ var failed = uniq(actionsToRun.map(function (action) {
return action[1]
}).filter(function (pkg) {
return pkg.failed && pkg.rollback
- })
+ }))
var top = this.currentTree && this.currentTree.path
- asyncMap(failed, function (pkg, next) {
- asyncMap(pkg.rollback, function (rollback, done) {
- rollback(top, staging, pkg, done)
- }, next)
- }, cb)
+ Bluebird.map(failed, (pkg) => {
+ return Bluebird.map(pkg.rollback, (rollback) => rollback(top, staging, pkg))
+ }).asCallback(cb)
}
Installer.prototype.commit = function (staging, actionsToRun, cb) {
@@ -609,7 +634,11 @@ Installer.prototype.saveToDependencies = function (cb) {
validate('F', arguments)
if (this.failing) return cb()
log.silly('install', 'saveToDependencies')
- saveRequested(this.idealTree, cb)
+ if (this.saveOnlyLock) {
+ saveShrinkwrap(this.idealTree, cb)
+ } else {
+ saveRequested(this.idealTree, cb)
+ }
}
Installer.prototype.readGlobalPackageData = function (cb) {
@@ -747,6 +776,8 @@ Installer.prototype.printInstalledForHuman = function (diffs, cb) {
var moved = 0
diffs.forEach(function (action) {
var mutation = action[0]
+ var pkg = action[1]
+ if (pkg.failed) return
if (mutation === 'remove') {
++removed
} else if (mutation === 'move') {
@@ -885,11 +916,33 @@ Installer.prototype.debugActions = function (name, actionListName, cb) {
// to define the arguments for use by chain before the property exists yet.
Installer.prototype.debugTree = function (name, treeName, cb) {
validate('SSF', arguments)
- log.silly(name, this.prettify(this[treeName]).trim())
+ log.silly(name, this.archyDebugTree(this[treeName]).trim())
+ cb()
+}
+
+Installer.prototype.archyDebugTree = function (tree) {
+ validate('O', arguments)
+ var seen = new Set()
+ function byName (aa, bb) {
+ return packageId(aa).localeCompare(packageId(bb))
+ }
+ function expandTree (tree) {
+ seen.add(tree)
+ return {
+ label: packageId(tree),
+ nodes: tree.children.filter((tree) => { return !seen.has(tree) && !tree.removed }).sort(byName).map(expandTree)
+ }
+ }
+ return archy(expandTree(tree), '', { unicode: npm.config.get('unicode') })
+}
+
+Installer.prototype.debugLogicalTree = function (name, treeName, cb) {
+ validate('SSF', arguments)
+ this[treeName] && log.silly(name, this.archyDebugLogicalTree(this[treeName]).trim())
cb()
}
-Installer.prototype.prettify = function (tree) {
+Installer.prototype.archyDebugLogicalTree = function (tree) {
validate('O', arguments)
var seen = new Set()
function byName (aa, bb) {
@@ -899,7 +952,7 @@ Installer.prototype.prettify = function (tree) {
seen.add(tree)
return {
label: packageId(tree),
- nodes: tree.children.filter((tree) => { return !seen.has(tree) && !tree.removed && !tree.failed }).sort(byName).map(expandTree)
+ nodes: tree.requires.filter((tree) => { return !seen.has(tree) && !tree.removed }).sort(byName).map(expandTree)
}
}
return archy(expandTree(tree), '', { unicode: npm.config.get('unicode') })
diff --git a/deps/npm/lib/install/action/build.js b/deps/npm/lib/install/action/build.js
index f59b852e84..be2c141f0d 100644
--- a/deps/npm/lib/install/action/build.js
+++ b/deps/npm/lib/install/action/build.js
@@ -7,7 +7,7 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('build', packageId(pkg))
chain([
- [build.linkStuff, pkg.package, pkg.path, npm.config.get('global'), true],
+ [build.linkStuff, pkg.package, pkg.path, npm.config.get('global')],
[build.writeBuiltinConf, pkg.package, pkg.path]
], next)
}
diff --git a/deps/npm/lib/install/action/extract.js b/deps/npm/lib/install/action/extract.js
index 8e80d4adda..6b827f36ea 100644
--- a/deps/npm/lib/install/action/extract.js
+++ b/deps/npm/lib/install/action/extract.js
@@ -16,6 +16,7 @@ let pacoteOpts
const path = require('path')
const localWorker = require('./extract-worker.js')
const workerFarm = require('worker-farm')
+const isRegistry = require('../../utils/is-registry.js')
const WORKER_PATH = require.resolve('./extract-worker.js')
let workers
@@ -72,7 +73,7 @@ function extract (staging, pkg, log) {
let msg = args
const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
args[0] = spec.raw
- if (ENABLE_WORKERS && (spec.registry || spec.type === 'remote')) {
+ if (ENABLE_WORKERS && (isRegistry(spec) || spec.type === 'remote')) {
// We can't serialize these options
opts.loglevel = opts.log.level
opts.log = null
diff --git a/deps/npm/lib/install/action/fetch.js b/deps/npm/lib/install/action/fetch.js
index 474e00b05c..a4d760fe82 100644
--- a/deps/npm/lib/install/action/fetch.js
+++ b/deps/npm/lib/install/action/fetch.js
@@ -1,5 +1,8 @@
'use strict'
+const BB = require('bluebird')
+
+const finished = BB.promisify(require('mississippi').finished)
const packageId = require('../../utils/package-id.js')
const pacote = require('pacote')
const pacoteOpts = require('../../config/pacote')
@@ -8,5 +11,6 @@ module.exports = fetch
function fetch (staging, pkg, log, next) {
log.silly('fetch', packageId(pkg))
const opts = pacoteOpts({integrity: pkg.package._integrity})
- pacote.prefetch(pkg.package._requested, opts).then(() => next(), next)
+ return finished(pacote.tarball.stream(pkg.package._requested, opts))
+ .then(() => next(), next)
}
diff --git a/deps/npm/lib/install/action/finalize.js b/deps/npm/lib/install/action/finalize.js
index a50ec8a6bd..e46f1b9d83 100644
--- a/deps/npm/lib/install/action/finalize.js
+++ b/deps/npm/lib/install/action/finalize.js
@@ -7,11 +7,13 @@ const mkdirp = Bluebird.promisify(require('mkdirp'))
const lstat = Bluebird.promisify(fs.lstat)
const readdir = Bluebird.promisify(fs.readdir)
const symlink = Bluebird.promisify(fs.symlink)
-const gentlyRm = require('../../utils/gently-rm')
+const gentlyRm = Bluebird.promisify(require('../../utils/gently-rm'))
const moduleStagingPath = require('../module-staging-path.js')
const move = require('move-concurrently')
const moveOpts = {fs: fs, Promise: Bluebird, maxConcurrency: 4}
const getRequested = require('../get-requested.js')
+const log = require('npmlog')
+const packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log) {
log.silly('finalize', pkg.realpath)
@@ -88,8 +90,17 @@ module.exports = function (staging, pkg, log) {
}
}
-module.exports.rollback = function (top, staging, pkg, next) {
- const requested = pkg.package._requested || getRequested(pkg)
- if (requested && requested.type === 'directory') return next()
- gentlyRm(pkg.path, false, top, next)
+module.exports.rollback = function (top, staging, pkg) {
+ return Bluebird.try(() => {
+ const requested = pkg.package._requested || getRequested(pkg)
+ if (requested && requested.type === 'directory') return Promise.resolve()
+ // strictly speaking rolling back a finalize should ONLY remove module that
+ // was being finalized, not any of the things under it. But currently
+ // those modules are guaranteed to be useless so we may as well remove them too.
+ // When/if we separate `commit` step and can rollback to previous versions
+ // of upgraded modules then we'll need to revisit this…
+ return gentlyRm(pkg.path, false, top).catch((err) => {
+ log.warn('rollback', `Rolling back ${packageId(pkg)} failed (this is probably harmless): ${err.message ? err.message : err}`)
+ })
+ })
}
diff --git a/deps/npm/lib/install/action/refresh-package-json.js b/deps/npm/lib/install/action/refresh-package-json.js
index 42f8012100..32e6444444 100644
--- a/deps/npm/lib/install/action/refresh-package-json.js
+++ b/deps/npm/lib/install/action/refresh-package-json.js
@@ -14,7 +14,7 @@ module.exports = function (staging, pkg, log) {
return readJson(path.join(pkg.path, 'package.json'), false).then((metadata) => {
Object.keys(pkg.package).forEach(function (key) {
- if (key !== 'dependencies' && !isEmpty(pkg.package[key])) {
+ if (key !== 'version' && key !== 'dependencies' && !isEmpty(pkg.package[key])) {
metadata[key] = pkg.package[key]
}
})
diff --git a/deps/npm/lib/install/actions.js b/deps/npm/lib/install/actions.js
index 9f0dcfa5dc..9608a943a5 100644
--- a/deps/npm/lib/install/actions.js
+++ b/deps/npm/lib/install/actions.js
@@ -83,10 +83,8 @@ function markAsFailed (pkg) {
if (pkg.failed) return
pkg.failed = true
pkg.requires.forEach((req) => {
- req.requiredBy = req.requiredBy.filter((reqReqBy) => {
- return reqReqBy !== pkg
- })
- if (req.requiredBy.length === 0 && !req.userRequired) {
+ var requiredBy = req.requiredBy.filter((reqReqBy) => !reqReqBy.failed)
+ if (requiredBy.length === 0 && !req.userRequired) {
markAsFailed(req)
}
})
@@ -94,12 +92,7 @@ function markAsFailed (pkg) {
function handleOptionalDepErrors (pkg, err) {
markAsFailed(pkg)
- var anyFatal = pkg.userRequired || pkg.isTop
- for (var ii = 0; ii < pkg.requiredBy.length; ++ii) {
- var parent = pkg.requiredBy[ii]
- var isFatal = failedDependency(parent, pkg)
- if (isFatal) anyFatal = true
- }
+ var anyFatal = failedDependency(pkg)
if (anyFatal) {
throw err
} else {
diff --git a/deps/npm/lib/install/deps.js b/deps/npm/lib/install/deps.js
index c93907a416..93c4adffd7 100644
--- a/deps/npm/lib/install/deps.js
+++ b/deps/npm/lib/install/deps.js
@@ -32,6 +32,7 @@ var reportOptionalFailure = require('./report-optional-failure.js')
var getSaveType = require('./save.js').getSaveType
var unixFormatPath = require('../utils/unix-format-path.js')
var isExtraneous = require('./is-extraneous.js')
+var isRegistry = require('../utils/is-registry.js')
// The export functions in this module mutate a dependency tree, adding
// items to them.
@@ -121,7 +122,7 @@ function computeMetadata (tree, seen) {
}
}
- tree.children.filter((child) => !child.removed && !child.failed).forEach((child) => computeMetadata(child, seen))
+ tree.children.filter((child) => !child.removed).forEach((child) => computeMetadata(child, seen))
return tree
}
@@ -276,7 +277,7 @@ function isNotEmpty (value) {
return value != null && value !== ''
}
-module.exports.computeVersionSpec = computeVersionSpec
+exports.computeVersionSpec = computeVersionSpec
function computeVersionSpec (tree, child) {
validate('OO', arguments)
var requested
@@ -288,7 +289,7 @@ function computeVersionSpec (tree, child) {
} else {
requested = npa.resolve(child.package.name, child.package.version)
}
- if (requested.registry) {
+ if (isRegistry(requested)) {
var version = child.package.version
var rangeDescriptor = ''
if (semver.valid(version, true) &&
@@ -308,10 +309,6 @@ function moduleNameMatches (name) {
return function (child) { return moduleName(child) === name }
}
-function noModuleNameMatches (name) {
- return function (child) { return moduleName(child) !== name }
-}
-
// while this implementation does not require async calling, doing so
// gives this a consistent interface with loadDeps et al
exports.removeDeps = function (args, tree, saveToDependencies, next) {
@@ -377,19 +374,12 @@ function isDepOptional (tree, name, pkg) {
return false
}
-var failedDependency = exports.failedDependency = function (tree, name_pkg) {
- var name
- var pkg = {}
- if (typeof name_pkg === 'string') {
- name = name_pkg
- } else {
- pkg = name_pkg
- name = moduleName(pkg)
- }
- tree.children = tree.children.filter(noModuleNameMatches(name))
-
- if (isDepOptional(tree, name, pkg)) {
- return false
+exports.failedDependency = failedDependency
+function failedDependency (tree, name, pkg) {
+ if (name) {
+ if (isDepOptional(tree, name, pkg || {})) {
+ return false
+ }
}
tree.failed = true
@@ -398,17 +388,16 @@ var failedDependency = exports.failedDependency = function (tree, name_pkg) {
if (tree.userRequired) return true
- removeObsoleteDep(tree)
-
if (!tree.requiredBy) return false
+ let anyFailed = false
for (var ii = 0; ii < tree.requiredBy.length; ++ii) {
var requireParent = tree.requiredBy[ii]
- if (failedDependency(requireParent, tree.package)) {
- return true
+ if (failedDependency(requireParent, moduleName(tree), tree)) {
+ anyFailed = true
}
}
- return false
+ return anyFailed
}
function andHandleOptionalErrors (log, tree, name, done) {
@@ -418,7 +407,6 @@ function andHandleOptionalErrors (log, tree, name, done) {
if (!er) return done(er, child, childLog)
var isFatal = failedDependency(tree, name)
if (er && !isFatal) {
- tree.children = tree.children.filter(noModuleNameMatches(name))
reportOptionalFailure(tree, name, er)
return done()
} else {
@@ -443,7 +431,7 @@ function prefetchDeps (tree, deps, log, next) {
var allDependencies = Object.keys(deps).map((dep) => {
return npa.resolve(dep, deps[dep])
}).filter((dep) => {
- return dep.registry &&
+ return isRegistry(dep) &&
!seen.has(dep.toString()) &&
!findRequirement(tree, dep.name, dep)
})
@@ -601,8 +589,9 @@ function resolveWithNewModule (pkg, tree, log, next) {
validate('OOOF', arguments)
log.silly('resolveWithNewModule', packageId(pkg), 'checking installable status')
- return isInstallable(pkg, iferr(next, function () {
- addBundled(pkg, iferr(next, function () {
+ return isInstallable(pkg, (err) => {
+ let installable = !err
+ addBundled(pkg, (bundleErr) => {
var parent = earliestInstallable(tree, tree, pkg) || tree
var isLink = pkg._requested.type === 'directory'
var child = createChild({
@@ -613,8 +602,9 @@ function resolveWithNewModule (pkg, tree, log, next) {
children: pkg._bundled || [],
isLink: isLink,
isInLink: parent.isLink,
- knownInstallable: true
+ knownInstallable: installable
})
+ if (!installable || bundleErr) child.failed = true
delete pkg._bundled
var hasBundled = child.children.length
@@ -630,13 +620,14 @@ function resolveWithNewModule (pkg, tree, log, next) {
}
if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
- return inflateShrinkwrap(child, pkg._shrinkwrap, function (er) {
- next(er, child, log)
+ return inflateShrinkwrap(child, pkg._shrinkwrap, (swErr) => {
+ if (swErr) child.failed = true
+ next(err || bundleErr || swErr, child, log)
})
}
- next(null, child, log)
- }))
- }))
+ next(err || bundleErr, child, log)
+ })
+ })
}
var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) {
@@ -669,7 +660,7 @@ var findRequirement = exports.findRequirement = function (tree, name, requested,
validate('OSO', [tree, name, requested])
if (!requestor) requestor = tree
var nameMatch = function (child) {
- return moduleName(child) === name && child.parent && !child.removed && !child.failed
+ return moduleName(child) === name && child.parent && !child.removed
}
var versionMatch = function (child) {
return doesChildVersionMatch(child, requested, requestor)
diff --git a/deps/npm/lib/install/diff-trees.js b/deps/npm/lib/install/diff-trees.js
index ac4f421a50..4316f351cc 100644
--- a/deps/npm/lib/install/diff-trees.js
+++ b/deps/npm/lib/install/diff-trees.js
@@ -6,41 +6,98 @@ var flattenTree = require('./flatten-tree.js')
var isOnlyDev = require('./is-only-dev.js')
var log = require('npmlog')
var path = require('path')
+var ssri = require('ssri')
+var moduleName = require('../utils/module-name.js')
-function nonRegistrySource (pkg) {
- validate('O', arguments)
- var requested = pkg._requested || (pkg._from && npa(pkg._from))
- if (!requested) return false
+// we don't use get-requested because we're operating on files on disk, and
+// we don't want to extropolate from what _should_ be there.
+function pkgRequested (pkg) {
+ return pkg._requested || (pkg._resolved && npa(pkg._resolved)) || (pkg._from && npa(pkg._from))
+}
- if (requested.type === 'hosted') return true
- if (requested.type === 'file' || requested.type === 'directory') return true
+function nonRegistrySource (requested) {
+ if (fromGit(requested)) return true
+ if (fromLocal(requested)) return true
+ if (fromRemote(requested)) return true
return false
}
-function pkgAreEquiv (aa, bb) {
- var aaSha = (aa.dist && aa.dist.integrity) || aa._integrity
- var bbSha = (bb.dist && bb.dist.integrity) || bb._integrity
- if (aaSha === bbSha) return true
- if (aaSha || bbSha) return false
- if (nonRegistrySource(aa) || nonRegistrySource(bb)) return false
- if (aa.version === bb.version) return true
+function fromRemote (requested) {
+ if (requested.type === 'remote') return true
+}
+
+function fromLocal (requested) {
+ // local is an npm@3 type that meant "file"
+ if (requested.type === 'file' || requested.type === 'directory' || requested.type === 'local') return true
+ return false
+}
+
+function fromGit (requested) {
+ if (requested.type === 'hosted' || requested.type === 'git') return true
return false
}
-function getUniqueId (pkg) {
- var versionspec = pkg._integrity
+function pkgIntegrity (pkg) {
+ try {
+ // dist is provided by the registry
+ var sri = (pkg.dist && pkg.dist.integrity) ||
+ // _integrity is provided by pacote
+ pkg._integrity ||
+ // _shasum is legacy
+ (pkg._shasum && ssri.fromHex(pkg._shasum, 'sha1').toString())
+ if (!sri) return
+ var integrity = ssri.parse(sri)
+ if (Object.keys(integrity).length === 0) return
+ return integrity
+ } catch (ex) {
+ return
+ }
+}
- if (!versionspec && nonRegistrySource(pkg)) {
- if (pkg._requested) {
- versionspec = pkg._requested.fetchSpec
- } else if (pkg._from) {
- versionspec = npa(pkg._from).fetchSpec
+function sriMatch (aa, bb) {
+ if (!aa || !bb) return false
+ for (let algo of Object.keys(aa)) {
+ if (!bb[algo]) continue
+ for (let aaHash of aa[algo]) {
+ for (let bbHash of bb[algo]) {
+ return aaHash.digest === bbHash.digest
+ }
}
}
- if (!versionspec) {
- versionspec = pkg.version
+ return false
+}
+
+function pkgAreEquiv (aa, bb) {
+ // coming in we know they share a path…
+
+ // if they share package metadata _identity_, they're the same thing
+ if (aa.package === bb.package) return true
+ // if they share integrity information, they're the same thing
+ var aaIntegrity = pkgIntegrity(aa.package)
+ var bbIntegrity = pkgIntegrity(bb.package)
+ if (aaIntegrity || bbIntegrity) return sriMatch(aaIntegrity, bbIntegrity)
+
+ // if they're links and they share the same target, they're the same thing
+ if (aa.isLink && bb.isLink) return aa.realpath === bb.realpath
+
+ // if we can't determine both their sources then we have no way to know
+ // if they're the same thing, so we have to assume they aren't
+ var aaReq = pkgRequested(aa.package)
+ var bbReq = pkgRequested(bb.package)
+ if (!aaReq || !bbReq) return false
+
+ if (fromGit(aaReq) && fromGit(bbReq)) {
+ // if both are git and share a _resolved specifier (one with the
+ // comittish replaced by a commit hash) then they're the same
+ return aa.package._resolved && bb.package._resolved &&
+ aa.package._resolved === bb.package._resolved
}
- return pkg.name + '@' + versionspec
+
+ // we have to give up trying to find matches for non-registry sources at this point…
+ if (nonRegistrySource(aaReq) || nonRegistrySource(bbReq)) return false
+
+ // finally, if they ARE a registry source then version matching counts
+ return aa.package.version === bb.package.version
}
function pushAll (aa, bb) {
@@ -118,41 +175,56 @@ var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
var flatOldTree = flattenTree(oldTree)
var flatNewTree = flattenTree(newTree)
var toRemove = {}
- var toRemoveByUniqueId = {}
- // find differences
+ var toRemoveByName = {}
+
+ // Build our tentative remove list. We don't add remove actions yet
+ // because we might resuse them as part of a move.
Object.keys(flatOldTree).forEach(function (flatname) {
+ if (flatname === '/') return
if (flatNewTree[flatname]) return
var pkg = flatOldTree[flatname]
if (pkg.isInLink && /^[.][.][/\\]/.test(path.relative(newTree.realpath, pkg.realpath))) return
toRemove[flatname] = pkg
- var pkgunique = getUniqueId(pkg.package)
- if (!toRemoveByUniqueId[pkgunique]) toRemoveByUniqueId[pkgunique] = []
- toRemoveByUniqueId[pkgunique].push(flatname)
+ var name = moduleName(pkg)
+ if (!toRemoveByName[name]) toRemoveByName[name] = []
+ toRemoveByName[name].push({flatname: flatname, pkg: pkg})
})
- Object.keys(flatNewTree).forEach(function (path) {
- var pkg = flatNewTree[path]
- pkg.oldPkg = flatOldTree[path]
- if (pkg.oldPkg) {
- if (!pkg.userRequired && pkgAreEquiv(pkg.oldPkg.package, pkg.package)) return
+
+ // generate our add/update/move actions
+ Object.keys(flatNewTree).forEach(function (flatname) {
+ if (flatname === '/') return
+ var pkg = flatNewTree[flatname]
+ var oldPkg = pkg.oldPkg = flatOldTree[flatname]
+ if (oldPkg) {
+ // if the versions are equivalent then we don't need to update… unless
+ // the user explicitly asked us to.
+ if (!pkg.userRequired && pkgAreEquiv(oldPkg, pkg)) return
setAction(differences, 'update', pkg)
} else {
- var vername = getUniqueId(pkg.package)
- var removing = toRemoveByUniqueId[vername] && toRemoveByUniqueId[vername].length
+ var name = moduleName(pkg)
+ // find any packages we're removing that share the same name and are equivalent
+ var removing = (toRemoveByName[name] || []).filter((rm) => pkgAreEquiv(rm.pkg, pkg))
var bundlesOrFromBundle = pkg.fromBundle || pkg.package.bundleDependencies
- if (removing && !bundlesOrFromBundle) {
- var flatname = toRemoveByUniqueId[vername].shift()
- pkg.fromPath = toRemove[flatname].path
+ // if we have any removes that match AND we're not working with a bundle then upgrade to a move
+ if (removing.length && !bundlesOrFromBundle) {
+ var toMv = removing.shift()
+ toRemoveByName[name] = toRemoveByName[name].filter((rm) => rm !== toMv)
+ pkg.fromPath = toMv.pkg.path
setAction(differences, 'move', pkg)
- delete toRemove[flatname]
+ delete toRemove[toMv.flatname]
+ // we don't generate add actions for things found in links (which already exist on disk) or
+ // for bundled modules (which will be installed when we install their parent)
} else if (!(pkg.isInLink && pkg.fromBundle)) {
setAction(differences, 'add', pkg)
}
}
})
+
+ // finally generate our remove actions from any not consumed by moves
Object
.keys(toRemove)
- .map((path) => toRemove[path])
+ .map((flatname) => toRemove[flatname])
.forEach((pkg) => setAction(differences, 'remove', pkg))
const includeDev = npm.config.get('dev') ||
diff --git a/deps/npm/lib/install/inflate-shrinkwrap.js b/deps/npm/lib/install/inflate-shrinkwrap.js
index 48be93d095..43ac9136f0 100644
--- a/deps/npm/lib/install/inflate-shrinkwrap.js
+++ b/deps/npm/lib/install/inflate-shrinkwrap.js
@@ -13,6 +13,7 @@ const npm = require('../npm.js')
const realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier.js')
const validate = require('aproba')
const path = require('path')
+const isRegistry = require('../utils/is-registry.js')
module.exports = function (tree, sw, opts, finishInflating) {
if (!fetchPackageMetadata) {
@@ -147,7 +148,7 @@ function adaptResolved (requested, resolved) {
const registry = requested.scope
? npm.config.get(`${requested.scope}:registry`) || npm.config.get('registry')
: npm.config.get('registry')
- if (!requested.registry || (resolved && resolved.indexOf(registry) === 0)) {
+ if (!isRegistry(requested) || (resolved && resolved.indexOf(registry) === 0)) {
// Nothing to worry about here. Pass it through.
return resolved
} else {
@@ -199,7 +200,7 @@ function childIsEquivalent (sw, requested, child) {
if (child.isLink && requested.type === 'directory') return path.relative(child.realpath, requested.fetchSpec) === ''
if (sw.resolved) return child.package._resolved === sw.resolved
- if (!requested.registry && sw.from) return child.package._from === sw.from
- if (!requested.registry && child.package._resolved) return sw.version === child.package._resolved
+ if (!isRegistry(requested) && sw.from) return child.package._from === sw.from
+ if (!isRegistry(requested) && child.package._resolved) return sw.version === child.package._resolved
return child.package.version === sw.version
}
diff --git a/deps/npm/lib/install/realize-shrinkwrap-specifier.js b/deps/npm/lib/install/realize-shrinkwrap-specifier.js
index ac700278ff..e4b14b1f0d 100644
--- a/deps/npm/lib/install/realize-shrinkwrap-specifier.js
+++ b/deps/npm/lib/install/realize-shrinkwrap-specifier.js
@@ -1,5 +1,6 @@
'use strict'
var npa = require('npm-package-arg')
+const isRegistry = require('../utils/is-registry.js')
module.exports = function (name, sw, where) {
try {
@@ -7,7 +8,7 @@ module.exports = function (name, sw, where) {
return npa.resolve(name, sw.version, where)
} else if (sw.from) {
const spec = npa(sw.from, where)
- if (spec.registry && sw.version) {
+ if (isRegistry(spec) && sw.version) {
return npa.resolve(name, sw.version, where)
} else if (!sw.resolved) {
return spec
diff --git a/deps/npm/lib/install/save.js b/deps/npm/lib/install/save.js
index e1a94fcff7..f0c61f555d 100644
--- a/deps/npm/lib/install/save.js
+++ b/deps/npm/lib/install/save.js
@@ -39,6 +39,8 @@ function andWarnErrors (cb) {
}
}
+exports.saveShrinkwrap = saveShrinkwrap
+
function saveShrinkwrap (tree, next) {
validate('OF', arguments)
if (!npm.config.get('shrinkwrap') || !npm.config.get('package-lock')) {
diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js
index 54b8dcac7a..158d9b0645 100644
--- a/deps/npm/lib/link.js
+++ b/deps/npm/lib/link.js
@@ -122,7 +122,7 @@ function linkInstall (pkgs, cb) {
log.verbose('link', 'symlinking %s to %s', pp, target)
cb()
} ],
- [symlink, pp, target],
+ [symlink, pp, target, false, false],
// do not run any scripts
rp && [build, [target], npm.config.get('global'), build._noLC, true],
[resultPrinter, pkg, pp, target, rp]
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index 3a84947f79..e58712603b 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -24,6 +24,16 @@
var npm = module.exports = new EventEmitter()
var npmconf = require('./config/core.js')
var log = require('npmlog')
+ var inspect = require('util').inspect
+
+ // capture global logging
+ process.on('log', function (level) {
+ try {
+ return log[level].apply(log, [].slice.call(arguments, 1))
+ } catch (ex) {
+ log.verbose('attempt to log ' + inspect(arguments) + ' crashed: ' + ex.message)
+ }
+ })
var path = require('path')
var abbrev = require('abbrev')
diff --git a/deps/npm/lib/pack.js b/deps/npm/lib/pack.js
index c428482035..f6a0eff805 100644
--- a/deps/npm/lib/pack.js
+++ b/deps/npm/lib/pack.js
@@ -6,7 +6,6 @@
const BB = require('bluebird')
-const cache = require('./cache')
const cacache = require('cacache')
const cp = require('child_process')
const deprCheck = require('./utils/depr-check')
@@ -18,6 +17,7 @@ const log = require('npmlog')
const move = require('move-concurrently')
const npm = require('./npm')
const output = require('./utils/output')
+const pacote = require('pacote')
const pacoteOpts = require('./config/pacote')
const path = require('path')
const PassThrough = require('stream').PassThrough
@@ -26,7 +26,6 @@ const pipe = BB.promisify(require('mississippi').pipe)
const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install')
const pinflight = require('promise-inflight')
const readJson = BB.promisify(require('read-package-json'))
-const writeStreamAtomic = require('fs-write-stream-atomic')
const tar = require('tar')
const packlist = require('npm-packlist')
@@ -69,12 +68,8 @@ function pack_ (pkg, dir) {
return packDirectory(mani, mani._resolved, target)
})
} else {
- return cache.add(pkg).then((info) => {
- return pipe(
- cacache.get.stream.byDigest(pacoteOpts().cache, info.integrity || mani._integrity),
- writeStreamAtomic(target)
- )
- }).then(() => target)
+ return pacote.tarball.toFile(pkg, target, pacoteOpts())
+ .then(() => target)
}
})
})
@@ -125,11 +120,15 @@ function packDirectory (mani, dir, target) {
cwd: dir,
prefix: 'package/',
portable: true,
+ noMtime: true,
gzip: true
}
return packlist({ path: dir })
- .then((files) => tar.create(tarOpt, files))
+ // NOTE: node-tar does some Magic Stuff depending on prefixes for files
+ // specifically with @ signs, so we just neutralize that one
+ // and any such future "features" by prepending `./`
+ .then((files) => tar.create(tarOpt, files.map((f) => `./${f}`)))
.then(() => move(tmpTarget, target, {Promise: BB, fs}))
.then(() => lifecycle(pkg, 'postpack', dir))
.then(() => target)
diff --git a/deps/npm/lib/profile.js b/deps/npm/lib/profile.js
index 4238e14276..587a26ca8b 100644
--- a/deps/npm/lib/profile.js
+++ b/deps/npm/lib/profile.js
@@ -12,6 +12,7 @@ const qrcodeTerminal = require('qrcode-terminal')
const url = require('url')
const queryString = require('query-string')
const pulseTillDone = require('./utils/pulse-till-done.js')
+const inspect = require('util').inspect
module.exports = profileCmd
@@ -87,11 +88,11 @@ function config () {
}
const knownProfileKeys = qw`
- name email ${'two factor auth'} fullname homepage
+ name email ${'two-factor auth'} fullname homepage
freenode twitter github created updated`
function get (args) {
- const tfa = 'two factor auth'
+ const tfa = 'two-factor auth'
const conf = config()
return pulseTillDone.withPromise(profile.get(conf)).then((info) => {
if (!info.cidr_whitelist) delete info.cidr_whitelist
@@ -188,8 +189,10 @@ function set (args) {
output(JSON.stringify({[prop]: result[prop]}, null, 2))
} else if (conf.parseable) {
output(prop + '\t' + result[prop])
- } else {
+ } else if (result[prop] != null) {
output('Set', prop, 'to', result[prop])
+ } else {
+ output('Set', prop)
}
})
}))
@@ -202,7 +205,7 @@ function enable2fa (args) {
}
const mode = args[0] || 'auth-and-writes'
if (mode !== 'auth-only' && mode !== 'auth-and-writes') {
- return Promise.reject(new Error(`Invalid two factor authentication mode "${mode}".\n` +
+ return Promise.reject(new Error(`Invalid two-factor authentication mode "${mode}".\n` +
'Valid modes are:\n' +
' auth-only - Require two-factor authentication only when logging in\n' +
' auth-and-writes - Require two-factor authentication when logging in AND when publishing'))
@@ -210,16 +213,31 @@ function enable2fa (args) {
const conf = config()
if (conf.json || conf.parseable) {
return Promise.reject(new Error(
- 'Enabling two-factor authentication is an interactive opperation and ' +
- (conf.json ? 'JSON' : 'parseable') + 'output mode is not available'))
+ 'Enabling two-factor authentication is an interactive operation and ' +
+ (conf.json ? 'JSON' : 'parseable') + ' output mode is not available'))
}
- log.notice('profile', 'Enabling two factor authentication for ' + mode)
+
const info = {
tfa: {
mode: mode
}
}
- return readUserInfo.password().then((password) => {
+
+ return Bluebird.try(() => {
+ // if they're using legacy auth currently then we have to update them to a
+ // bearer token before continuing.
+ if (conf.auth.basic) {
+ log.info('profile', 'Updating authentication to bearer token')
+ return profile.login(conf.auth.basic.username, conf.auth.basic.password, conf).then((result) => {
+ if (!result.token) throw new Error('Your registry ' + conf.registry + 'does not seem to support bearer tokens. Bearer tokens are required for two-factor authentication')
+ npm.config.setCredentialsByURI(conf.registry, {token: result.token})
+ return Bluebird.fromNode((cb) => npm.config.save('user', cb))
+ })
+ }
+ }).then(() => {
+ log.notice('profile', 'Enabling two factor authentication for ' + mode)
+ return readUserInfo.password()
+ }).then((password) => {
info.tfa.password = password
log.info('profile', 'Determine if tfa is pending')
return pulseTillDone.withPromise(profile.get(conf)).then((info) => {
@@ -235,7 +253,7 @@ function enable2fa (args) {
}
})
}).then(() => {
- log.info('profile', 'Setting two factor authentication to ' + mode)
+ log.info('profile', 'Setting two-factor authentication to ' + mode)
return pulseTillDone.withPromise(profile.set(info, conf))
}).then((challenge) => {
if (challenge.tfa === null) {
@@ -243,7 +261,7 @@ function enable2fa (args) {
return
}
if (typeof challenge.tfa !== 'string' || !/^otpauth:[/][/]/.test(challenge.tfa)) {
- throw new Error('Unknown error enabling two-factor authentication. Expected otpauth URL, got: ' + challenge.tfa)
+ throw new Error('Unknown error enabling two-factor authentication. Expected otpauth URL, got: ' + inspect(challenge.tfa))
}
const otpauth = url.parse(challenge.tfa)
const opts = queryString.parse(otpauth.query)
@@ -252,10 +270,10 @@ function enable2fa (args) {
}).then((code) => {
return readUserInfo.otp('And an OTP code from your authenticator: ')
}).then((otp1) => {
- log.info('profile', 'Finalizing two factor authentication')
+ log.info('profile', 'Finalizing two-factor authentication')
return profile.set({tfa: [otp1]}, conf)
}).then((result) => {
- output('TFA successfully enabled. Below are your recovery codes, please print these out.')
+ output('2FA successfully enabled. Below are your recovery codes, please print these out.')
output('You will need these to recover access to your account if you lose your authentication device.')
result.tfa.forEach((c) => output('\t' + c))
})
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
index bf60e1d5a6..20bd2603e6 100644
--- a/deps/npm/lib/publish.js
+++ b/deps/npm/lib/publish.js
@@ -3,7 +3,6 @@
const BB = require('bluebird')
const cacache = require('cacache')
-const cache = require('./cache')
const createReadStream = require('graceful-fs').createReadStream
const getPublishConfig = require('./utils/get-publish-config.js')
const lifecycle = BB.promisify(require('./utils/lifecycle.js'))
@@ -16,11 +15,9 @@ const pack = require('./pack')
const pacote = require('pacote')
const pacoteOpts = require('./config/pacote')
const path = require('path')
-const pipe = BB.promisify(require('mississippi').pipe)
const readJson = BB.promisify(require('read-package-json'))
const semver = require('semver')
const statAsync = BB.promisify(require('graceful-fs').stat)
-const writeStreamAtomic = require('fs-write-stream-atomic')
const readUserInfo = require('./utils/read-user-info.js')
publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>]' +
@@ -105,20 +102,11 @@ function publishFromPackage (arg) {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromPackage'}, (tmp) => {
const extracted = path.join(tmp, 'package')
const target = path.join(tmp, 'package.json')
- return cache.add(arg).then((info) => {
- const opts = pacoteOpts({integrity: info.integrity})
- return BB.all([
- pipe(
- cacache.get.stream.byDigest(opts.cache, info.integrity),
- writeStreamAtomic(target)
- ).then(() => target),
- pacote.extract(arg, extracted, opts).then(() => {
- return readJson(path.join(extracted, 'package.json'))
- })
- ]).spread((target, pkg) => {
- return upload(arg, pkg, false, target)
- })
- })
+ const opts = pacoteOpts()
+ return pacote.tarball.toFile(arg, target, opts)
+ .then(() => pacote.extract(arg, extracted, opts))
+ .then(() => readJson(path.join(extracted, 'package.json')))
+ .tap((pkg) => upload(arg, pkg, false, target))
})
}
diff --git a/deps/npm/lib/shrinkwrap.js b/deps/npm/lib/shrinkwrap.js
index 956a693646..ddfff2c681 100644
--- a/deps/npm/lib/shrinkwrap.js
+++ b/deps/npm/lib/shrinkwrap.js
@@ -21,6 +21,7 @@ const ssri = require('ssri')
const validate = require('aproba')
const writeFileAtomic = require('write-file-atomic')
const unixFormatPath = require('./utils/unix-format-path.js')
+const isRegistry = require('./utils/is-registry.js')
const PKGLOCK = 'package-lock.json'
const SHRINKWRAP = 'npm-shrinkwrap.json'
@@ -113,7 +114,7 @@ function shrinkwrapDeps (deps, top, tree, seen) {
if (child.fromBundle || child.isInLink) {
pkginfo.bundled = true
} else {
- if (requested.registry) {
+ if (isRegistry(requested)) {
pkginfo.resolved = child.package._resolved
}
// no integrity for git deps as integirty hashes are based on the
@@ -153,7 +154,7 @@ function sortModules (modules) {
function childVersion (top, child, req) {
if (req.type === 'directory' || req.type === 'file') {
return 'file:' + unixFormatPath(path.relative(top.path, child.package._resolved || req.fetchSpec))
- } else if (!req.registry && !child.fromBundle) {
+ } else if (!isRegistry(req) && !child.fromBundle) {
return child.package._resolved || req.saveSpec || req.rawSpec
} else {
return child.package.version
diff --git a/deps/npm/lib/token.js b/deps/npm/lib/token.js
index a182b633d2..2a3b65e6ad 100644
--- a/deps/npm/lib/token.js
+++ b/deps/npm/lib/token.js
@@ -15,17 +15,17 @@ module.exports = token
token.usage =
'npm token list\n' +
- 'npm token delete <tokenKey>\n' +
+ 'npm token revoke <tokenKey>\n' +
'npm token create [--read-only] [--cidr=list]\n'
-token.subcommands = ['list', 'delete', 'create']
+token.subcommands = ['list', 'revoke', 'create']
token.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
switch (argv[2]) {
case 'list':
- case 'delete':
+ case 'revoke':
case 'create':
return cb(null, [])
default:
@@ -46,7 +46,7 @@ function token (args, cb) {
withCb(list(), cb)
break
case 'delete':
- case 'rel':
+ case 'revoke':
case 'remove':
case 'rm':
withCb(rm(args.slice(1)), cb)
@@ -127,7 +127,7 @@ function list (args) {
function rm (args) {
if (args.length === 0) {
- throw new Error('npm token delete <tokenKey>')
+ throw new Error('npm token revoke <tokenKey>')
}
const conf = config()
const toRemove = []
diff --git a/deps/npm/lib/update.js b/deps/npm/lib/update.js
index 2c1d1e82a8..efb56f5e41 100644
--- a/deps/npm/lib/update.js
+++ b/deps/npm/lib/update.js
@@ -1,11 +1,13 @@
+'use strict'
module.exports = update
-var url = require('url')
-var log = require('npmlog')
-var chain = require('slide').chain
-var npm = require('./npm.js')
-var Installer = require('./install.js').Installer
-var usage = require('./utils/usage')
+const url = require('url')
+const log = require('npmlog')
+const Bluebird = require('bluebird')
+const npm = require('./npm.js')
+const Installer = require('./install.js').Installer
+const usage = require('./utils/usage')
+const outdated = Bluebird.promisify(npm.commands.outdated)
update.usage = usage(
'update',
@@ -15,12 +17,16 @@ update.usage = usage(
update.completion = npm.commands.outdated.completion
function update (args, cb) {
- var dryrun = false
+ return update_(args).asCallback(cb)
+}
+
+function update_ (args) {
+ let dryrun = false
if (npm.config.get('dry-run')) dryrun = true
- npm.commands.outdated(args, true, function (er, rawOutdated) {
- if (er) return cb(er)
- var outdated = rawOutdated.map(function (ww) {
+ log.verbose('update', 'computing outdated modules to update')
+ return outdated(args, true).then((rawOutdated) => {
+ const outdated = rawOutdated.map(function (ww) {
return {
dep: ww[0],
depname: ww[1],
@@ -32,7 +38,7 @@ function update (args, cb) {
}
})
- var wanted = outdated.filter(function (ww) {
+ const wanted = outdated.filter(function (ww) {
if (ww.current === ww.wanted && ww.wanted !== ww.latest) {
log.verbose(
'outdated',
@@ -42,23 +48,25 @@ function update (args, cb) {
}
return ww.current !== ww.wanted && ww.latest !== 'linked'
})
- if (wanted.length === 0) return cb()
+ if (wanted.length === 0) return
log.info('outdated', 'updating', wanted)
- var toInstall = {}
+ const toInstall = {}
+
wanted.forEach(function (ww) {
// use the initial installation method (repo, tar, git) for updating
if (url.parse(ww.req).protocol) ww.what = ww.req
- var where = ww.dep.parent && ww.dep.parent.path || ww.dep.path
- if (toInstall[where]) {
- toInstall[where].push(ww.what)
- } else {
- toInstall[where] = [ww.what]
- }
+ const where = ww.dep.parent && ww.dep.parent.path || ww.dep.path
+ const isTransitive = !(ww.dep.requiredBy || []).some((p) => p.isTop)
+ const key = where + ':' + String(isTransitive)
+ if (!toInstall[key]) toInstall[key] = {where: where, opts: {saveOnlyLock: isTransitive}, what: []}
+ if (toInstall[key].what.indexOf(ww.what) === -1) toInstall[key].what.push(ww.what)
+ })
+ return Bluebird.each(Object.keys(toInstall), (key) => {
+ const deps = toInstall[key]
+ const inst = new Installer(deps.where, dryrun, deps.what, deps.opts)
+ return inst.run()
})
- chain(Object.keys(toInstall).map(function (where) {
- return [new Installer(where, dryrun, toInstall[where]), 'run']
- }), cb)
})
}
diff --git a/deps/npm/lib/utils/convert-line-endings.js b/deps/npm/lib/utils/convert-line-endings.js
deleted file mode 100644
index b05d328aac..0000000000
--- a/deps/npm/lib/utils/convert-line-endings.js
+++ /dev/null
@@ -1,49 +0,0 @@
-'use strict'
-
-const Transform = require('stream').Transform
-const Bluebird = require('bluebird')
-const fs = require('graceful-fs')
-const stat = Bluebird.promisify(fs.stat)
-const chmod = Bluebird.promisify(fs.chmod)
-const fsWriteStreamAtomic = require('fs-write-stream-atomic')
-
-module.exports.dos2Unix = dos2Unix
-
-function dos2Unix (file) {
- return stat(file).then((stats) => {
- let previousChunkEndedInCR = false
- return new Bluebird((resolve, reject) => {
- fs.createReadStream(file)
- .on('error', reject)
- .pipe(new Transform({
- transform: function (chunk, encoding, done) {
- let data = chunk.toString()
- if (previousChunkEndedInCR) {
- data = '\r' + data
- }
- if (data[data.length - 1] === '\r') {
- data = data.slice(0, -1)
- previousChunkEndedInCR = true
- } else {
- previousChunkEndedInCR = false
- }
- done(null, data.replace(/\r\n/g, '\n'))
- },
- flush: function (done) {
- if (previousChunkEndedInCR) {
- this.push('\r')
- }
- done()
- }
- }))
- .on('error', reject)
- .pipe(fsWriteStreamAtomic(file))
- .on('error', reject)
- .on('finish', function () {
- resolve(chmod(file, stats.mode))
- })
- })
- })
-}
-
-// could add unix2Dos and legacy Mac functions if need be
diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js
index 028a18bbb6..85504f5edc 100644
--- a/deps/npm/lib/utils/error-message.js
+++ b/deps/npm/lib/utils/error-message.js
@@ -67,49 +67,53 @@ function errorMessage (er) {
break
case 'EOTP':
- short.push(['', 'This operation requires a one-time password from your authenticator.'])
- detail.push([
- '',
- [
- 'You can provide a one-time password by passing --otp=<code> to the command you ran.',
- 'If you already provided a one-time password then it is likely that you either typoed',
- 'it, or it timed out. Please try again.'
- ].join('\n')
- ])
- break
-
case 'E401':
- // npm ERR! code E401
- // npm ERR! Unable to authenticate, need: Basic
- if (er.headers && er.headers['www-authenticate']) {
- const auth = er.headers['www-authenticate']
- if (auth.indexOf('Bearer') !== -1) {
- short.push(['', 'Unable to authenticate, your authentication token seems to be invalid.'])
- detail.push([
- '',
- [
- 'To correct this please trying logging in again with:',
- ' npm login'
- ].join('\n')
- ])
- break
- } else if (auth.indexOf('Basic') !== -1) {
- short.push(['', 'Incorrect or missing password.'])
- detail.push([
- '',
- [
- 'If you were trying to login, change your password, create an',
- 'authentication token or enable two-factor authentication then',
- 'that means you likely typed your password in incorectly.',
- 'Please try again, or recover your password at:',
- ' https://www.npmjs.com/forgot',
+ // the E401 message checking is a hack till we replace npm-registry-client with something
+ // OTP aware.
+ if (er.code === 'EOTP' || (er.code === 'E401' && /one-time pass/.test(er.message))) {
+ short.push(['', 'This operation requires a one-time password from your authenticator.'])
+ detail.push([
+ '',
+ [
+ 'You can provide a one-time password by passing --otp=<code> to the command you ran.',
+ 'If you already provided a one-time password then it is likely that you either typoed',
+ 'it, or it timed out. Please try again.'
+ ].join('\n')
+ ])
+ break
+ } else {
+ // npm ERR! code E401
+ // npm ERR! Unable to authenticate, need: Basic
+ if (er.headers && er.headers['www-authenticate']) {
+ const auth = er.headers['www-authenticate'].map((au) => au.split(/,\s*/))[0] || []
+ if (auth.indexOf('Bearer') !== -1) {
+ short.push(['', 'Unable to authenticate, your authentication token seems to be invalid.'])
+ detail.push([
+ '',
+ [
+ 'To correct this please trying logging in again with:',
+ ' npm login'
+ ].join('\n')
+ ])
+ break
+ } else if (auth.indexOf('Basic') !== -1) {
+ short.push(['', 'Incorrect or missing password.'])
+ detail.push([
'',
- 'If you were doing some other operation then your saved credentials are',
- 'probably out of date. To correct this please try logging in again with:',
- ' npm login'
- ].join('\n')
- ])
- break
+ [
+ 'If you were trying to login, change your password, create an',
+ 'authentication token or enable two-factor authentication then',
+ 'that means you likely typed your password in incorrectly.',
+ 'Please try again, or recover your password at:',
+ ' https://www.npmjs.com/forgot',
+ '',
+ 'If you were doing some other operation then your saved credentials are',
+ 'probably out of date. To correct this please try logging in again with:',
+ ' npm login'
+ ].join('\n')
+ ])
+ break
+ }
}
}
diff --git a/deps/npm/lib/utils/gently-rm.js b/deps/npm/lib/utils/gently-rm.js
index 7253e873c6..16d0aa9bd7 100644
--- a/deps/npm/lib/utils/gently-rm.js
+++ b/deps/npm/lib/utils/gently-rm.js
@@ -3,20 +3,8 @@
exports = module.exports = gentlyRm
-var resolve = require('path').resolve
-var dirname = require('path').dirname
-var normalize = require('path').normalize
-var validate = require('aproba')
-var log = require('npmlog')
-var lstat = require('graceful-fs').lstat
-var readlink = require('graceful-fs').readlink
-var isInside = require('path-is-inside')
-var vacuum = require('fs-vacuum')
-var chain = require('slide').chain
-var asyncMap = require('slide').asyncMap
-var readCmdShim = require('read-cmd-shim')
-var iferr = require('iferr')
-var npm = require('../npm.js')
+var gentleFS = require('gentle-fs')
+var gentleFSOpts = require('../config/gentle-fs.js')
function gentlyRm (target, gently, base, cb) {
if (!cb) {
@@ -29,258 +17,5 @@ function gentlyRm (target, gently, base, cb) {
gently = false
}
- // never rm the root, prefix, or bin dirs
- //
- // globals included because of `npm link` -- as far as the package
- // requesting the link is concerned, the linked package is always
- // installed globally
- var prefixes = [
- npm.prefix,
- npm.globalPrefix,
- npm.dir,
- npm.root,
- npm.globalDir,
- npm.bin,
- npm.globalBin
- ]
-
- var targetPath = normalize(resolve(npm.prefix, target))
- if (prefixes.indexOf(targetPath) !== -1) {
- return cb(new Error('May not delete: ' + targetPath))
- }
- var options = { }
- if (npm.config.get('force') || !gently) options.purge = true
- if (base) options.base = normalize(resolve(npm.prefix, base))
-
- if (!gently) {
- return vacuum(targetPath, options, cb)
- }
-
- var parent = options.base = options.base || normalize(npm.prefix)
-
- // Do all the async work we'll need to do in order to tell if this is a
- // safe operation
- chain([
- [isEverInside, parent, prefixes],
- [readLinkOrShim, targetPath],
- [isEverInside, targetPath, prefixes],
- [isEverInside, targetPath, [parent]]
- ], function (er, results) {
- if (er) {
- if (er.code === 'ENOENT') return cb()
- return cb(er)
- }
- var parentInfo = {
- path: parent,
- managed: results[0]
- }
- var targetInfo = {
- path: targetPath,
- symlink: results[1],
- managed: results[2],
- inParent: results[3]
- }
-
- isSafeToRm(parentInfo, targetInfo, iferr(cb, thenRemove))
-
- function thenRemove (toRemove, removeBase) {
- if (!toRemove) return cb()
- if (removeBase) options.base = removeBase
- return vacuum(toRemove, options, cb)
- }
- })
-}
-
-exports._isSafeToRm = isSafeToRm
-function isSafeToRm (parent, target, cb) {
- log.silly('gentlyRm', 'parent.path =', parent.path)
- log.silly('gentlyRm', 'parent.managed =',
- parent.managed && parent.managed.target + ' is in ' + parent.managed.path)
- log.silly('gentlyRm', 'target.path = ', target.path)
- log.silly('gentlyRm', 'target.symlink =', target.symlink)
- log.silly('gentlyRm', 'target.managed =',
- target.managed && target.managed.target + ' is in ' + target.managed.path)
- log.silly('gentlyRm', 'target.inParent = ', target.inParent)
-
- // The parent directory or something it symlinks to must eventually be in
- // a folder that npm maintains.
- if (!parent.managed) {
- log.info('gentlyRm', parent.path,
- 'is not contained in any diretory npm is known to control or ' +
- 'any place they link to')
- return cb(clobberFail(target.path, 'containing path ' + parent.path +
- " isn't under npm's control"))
- }
-
- // The target or something it symlinks to must eventually be in the parent
- // or something the parent symlinks to
- if (target.inParent) {
- var actualTarget = target.inParent.target
- var targetsParent = target.inParent.path
- // if the target.path was what we found in some version of parent, remove
- // using that parent as the base
- if (target.path === actualTarget) {
- return cb(null, target.path, targetsParent)
- } else {
- // If something the target.path links to was what was found, just
- // remove target.path in the location it was found.
- return cb(null, target.path, dirname(target.path))
- }
- }
-
- // If the target is in a managed directory and is in a symlink, but was
- // not in our parent that usually means someone else installed a bin file
- // with the same name as one of our bin files.
- if (target.managed && target.symlink) {
- log.warn('gentlyRm', 'not removing', target.path,
- "as it wasn't installed by", parent.path)
- return cb()
- }
-
- if (target.symlink) {
- return cb(clobberFail(target.path, target.symlink +
- ' symlink target is not controlled by npm ' + parent.path))
- } else {
- return cb(clobberFail(target.path, 'is outside ' + parent.path +
- ' and not a link'))
- }
-}
-
-function clobberFail (target, msg) {
- validate('SS', arguments)
- var er = new Error('Refusing to delete ' + target + ': ' + msg)
- er.code = 'EEXIST'
- er.path = target
- return er
-}
-
-function isENOENT (err) {
- return err && err.code === 'ENOENT'
-}
-
-function notENOENT (err) {
- return !isENOENT(err)
-}
-
-function skipENOENT (cb) {
- return function (err, value) {
- if (isENOENT(err)) {
- return cb(null, false)
- } else {
- return cb(err, value)
- }
- }
-}
-
-function errorsToValues (fn) {
- return function () {
- var args = Array.prototype.slice.call(arguments)
- var cb = args.pop()
- args.push(function (err, value) {
- if (err) {
- return cb(null, err)
- } else {
- return cb(null, value)
- }
- })
- fn.apply(null, args)
- }
-}
-
-function isNotError (value) {
- return !(value instanceof Error)
-}
-
-exports._isEverInside = isEverInside
-// return the first of path, where target (or anything it symlinks to)
-// isInside the path (or anything it symlinks to)
-function isEverInside (target, paths, cb) {
- validate('SAF', arguments)
- asyncMap(paths, errorsToValues(readAllLinks), iferr(cb, function (resolvedPaths) {
- var errorFree = resolvedPaths.filter(isNotError)
- if (errorFree.length === 0) {
- var badErrors = resolvedPaths.filter(notENOENT)
- if (badErrors.length === 0) {
- return cb(null, false)
- } else {
- return cb(badErrors[0])
- }
- }
- readAllLinks(target, iferr(skipENOENT(cb), function (targets) {
- cb(null, areAnyInsideAny(targets, errorFree))
- }))
- }))
-}
-
-exports._areAnyInsideAny = areAnyInsideAny
-// Return the first path found that any target is inside
-function areAnyInsideAny (targets, paths) {
- validate('AA', arguments)
- var toCheck = []
- paths.forEach(function (path) {
- targets.forEach(function (target) {
- toCheck.push([target, path])
- })
- })
- for (var ii = 0; ii < toCheck.length; ++ii) {
- var target = toCheck[ii][0]
- var path = toCheck[ii][1]
- var inside = isInside(target, path)
- if (!inside) log.silly('isEverInside', target, 'is not inside', path)
- if (inside && path) return inside && path && {target: target, path: path}
- }
- return false
-}
-
-exports._readAllLinks = readAllLinks
-// resolves chains of symlinks of unlimited depth, returning a list of paths
-// it's seen in the process when it hits either a symlink cycle or a
-// non-symlink
-function readAllLinks (path, cb) {
- validate('SF', arguments)
- var seen = {}
- _readAllLinks(path)
-
- function _readAllLinks (path) {
- if (seen[path]) return cb(null, Object.keys(seen))
- seen[path] = true
- resolveSymlink(path, iferr(cb, _readAllLinks))
- }
-}
-
-exports._resolveSymlink = resolveSymlink
-var resolvedPaths = {}
-function resolveSymlink (symlink, cb) {
- validate('SF', arguments)
- var cached = resolvedPaths[symlink]
- if (cached) return cb(null, cached)
-
- readLinkOrShim(symlink, iferr(cb, function (symlinkTarget) {
- if (symlinkTarget) {
- resolvedPaths[symlink] = resolve(dirname(symlink), symlinkTarget)
- } else {
- resolvedPaths[symlink] = symlink
- }
- return cb(null, resolvedPaths[symlink])
- }))
-}
-
-exports._readLinkOrShim = readLinkOrShim
-function readLinkOrShim (path, cb) {
- validate('SF', arguments)
- lstat(path, iferr(cb, function (stat) {
- if (stat.isSymbolicLink()) {
- readlink(path, cb)
- } else {
- readCmdShim(path, function (er, source) {
- if (!er) return cb(null, source)
- // lstat wouldn't return an error on these, so we don't either.
- if (er.code === 'ENOTASHIM' || er.code === 'EISDIR') {
- return cb(null, null)
- } else {
- return cb(er)
- }
- })
- }
- }))
+ return gentleFS.rm(target, gentleFSOpts(gently, base), cb)
}
diff --git a/deps/npm/lib/utils/is-hashbang-file.js b/deps/npm/lib/utils/is-hashbang-file.js
deleted file mode 100644
index f1677381fa..0000000000
--- a/deps/npm/lib/utils/is-hashbang-file.js
+++ /dev/null
@@ -1,19 +0,0 @@
-'use strict'
-const Bluebird = require('bluebird')
-const fs = require('graceful-fs')
-const open = Bluebird.promisify(fs.open)
-const close = Bluebird.promisify(fs.close)
-
-module.exports = isHashbangFile
-
-function isHashbangFile (file) {
- return open(file, 'r').then((fileHandle) => {
- return new Bluebird((resolve, reject) => {
- fs.read(fileHandle, new Buffer(new Array(2)), 0, 2, 0, function (err, bytesRead, buffer) {
- close(fileHandle).then(() => {
- resolve(!err && buffer.toString() === '#!')
- }).catch(reject)
- })
- })
- })
-}
diff --git a/deps/npm/lib/utils/is-registry.js b/deps/npm/lib/utils/is-registry.js
new file mode 100644
index 0000000000..e5f08e16a0
--- /dev/null
+++ b/deps/npm/lib/utils/is-registry.js
@@ -0,0 +1,11 @@
+'use strict'
+module.exports = isRegistry
+
+function isRegistry (req) {
+ if (req == null) return false
+ // modern metadata
+ if ('registry' in req) return req.registry
+ // legacy metadata
+ if (req.type === 'range' || req.type === 'version' || req.type === 'tag') return true
+ return false
+}
diff --git a/deps/npm/lib/utils/link.js b/deps/npm/lib/utils/link.js
index 15331740a4..c264248144 100644
--- a/deps/npm/lib/utils/link.js
+++ b/deps/npm/lib/utils/link.js
@@ -1,73 +1,8 @@
module.exports = link
-link.ifExists = linkIfExists
-var fs = require('graceful-fs')
-var chain = require('slide').chain
-var mkdir = require('mkdirp')
-var rm = require('./gently-rm.js')
-var path = require('path')
-var npm = require('../npm.js')
-
-function linkIfExists (from, to, gently, cb) {
- fs.stat(from, function (er) {
- if (er) return cb()
- fs.readlink(to, function (er, fromOnDisk) {
- // if the link already exists and matches what we would do,
- // we don't need to do anything
- if (!er) {
- var toDir = path.dirname(to)
- var absoluteFrom = path.resolve(toDir, from)
- var absoluteFromOnDisk = path.resolve(toDir, fromOnDisk)
- if (absoluteFrom === absoluteFromOnDisk) return cb()
- }
- link(from, to, gently, cb)
- })
- })
-}
-
-function resolveIfSymlink (maybeSymlinkPath, cb) {
- fs.lstat(maybeSymlinkPath, function (err, stat) {
- if (err) return cb.apply(this, arguments)
- if (!stat.isSymbolicLink()) return cb(null, maybeSymlinkPath)
- fs.readlink(maybeSymlinkPath, cb)
- })
-}
-
-function ensureFromIsNotSource (from, to, cb) {
- resolveIfSymlink(from, function (err, fromDestination) {
- if (err) return cb.apply(this, arguments)
- if (path.resolve(path.dirname(from), fromDestination) === path.resolve(to)) {
- return cb(new Error('Link target resolves to the same directory as link source: ' + to))
- }
- cb.apply(this, arguments)
- })
-}
+var gentleFS = require('gentle-fs')
+var gentleFSOpts = require('../config/gentle-fs.js')
function link (from, to, gently, abs, cb) {
- if (typeof cb !== 'function') {
- cb = abs
- abs = false
- }
- if (typeof cb !== 'function') {
- cb = gently
- gently = null
- }
- if (npm.config.get('force')) gently = false
-
- to = path.resolve(to)
- var toDir = path.dirname(to)
- var absTarget = path.resolve(toDir, from)
- var relativeTarget = path.relative(toDir, absTarget)
- var target = abs ? absTarget : relativeTarget
-
- chain(
- [
- [ensureFromIsNotSource, absTarget, to],
- [fs, 'stat', absTarget],
- [rm, to, gently, path.dirname(to)],
- [mkdir, path.dirname(to)],
- [fs, 'symlink', target, to, 'junction']
- ],
- cb
- )
+ return gentleFS.link(from, to, gentleFSOpts(gently, undefined, abs), cb)
}
diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js
index 2f06aea624..e0904048df 100644
--- a/deps/npm/lib/view.js
+++ b/deps/npm/lib/view.js
@@ -264,7 +264,7 @@ function printData (data, name, cb) {
if (npm.config.get('json')) {
msgJson[msgJson.length - 1][f] = d
} else {
- d = util.inspect(d, false, 5, npm.color)
+ d = util.inspect(d, { showHidden: false, depth: 5, colors: npm.color, maxArrayLength: null })
}
} else if (typeof d === 'string' && npm.config.get('json')) {
d = JSON.stringify(d)