summaryrefslogtreecommitdiff
path: root/deps/npm/lib
diff options
context:
space:
mode:
authorMyles Borins <myles.borins@gmail.com>2017-10-26 22:35:25 -0400
committerMyles Borins <myles.borins@gmail.com>2017-10-29 21:32:15 -0400
commitace4fe566fc3af4876c7458f983feeb5eae3df26 (patch)
tree458d847e9bd56199cd0d8b34cec126c7410fb6ca /deps/npm/lib
parent64168eb9b43e30e4c0b986c9b29c41be63e85df6 (diff)
downloadandroid-node-v8-ace4fe566fc3af4876c7458f983feeb5eae3df26.tar.gz
android-node-v8-ace4fe566fc3af4876c7458f983feeb5eae3df26.tar.bz2
android-node-v8-ace4fe566fc3af4876c7458f983feeb5eae3df26.zip
deps: update npm to 5.5.1
Closes: https://github.com/nodejs/node/pull/16280 PR-URL: https://github.com/nodejs/node/pull/16509 Fixes: https://github.com/nodejs/node/issues/14161 Reviewed-By: Daijiro Wachi <daijiro.wachi@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Michaƫl Zasso <targos@protonmail.com>
Diffstat (limited to 'deps/npm/lib')
-rw-r--r--deps/npm/lib/auth/legacy.js186
-rw-r--r--deps/npm/lib/build.js21
-rw-r--r--deps/npm/lib/config.js149
-rw-r--r--deps/npm/lib/config/cmd-list.js2
-rw-r--r--deps/npm/lib/config/defaults.js12
-rw-r--r--deps/npm/lib/config/lifecycle.js30
-rw-r--r--deps/npm/lib/help.js6
-rw-r--r--deps/npm/lib/install/action/extract.js32
-rw-r--r--deps/npm/lib/install/action/install.js2
-rw-r--r--deps/npm/lib/install/action/move.js12
-rw-r--r--deps/npm/lib/install/action/postinstall.js2
-rw-r--r--deps/npm/lib/install/action/preinstall.js2
-rw-r--r--deps/npm/lib/install/action/prepare.js4
-rw-r--r--deps/npm/lib/install/action/unbuild.js6
-rw-r--r--deps/npm/lib/install/actions.js1
-rw-r--r--deps/npm/lib/install/deps.js22
-rw-r--r--deps/npm/lib/install/update-package-json.js7
-rw-r--r--deps/npm/lib/install/validate-tree.js2
-rw-r--r--deps/npm/lib/ls.js2
-rw-r--r--deps/npm/lib/npm.js15
-rw-r--r--deps/npm/lib/outdated.js10
-rw-r--r--deps/npm/lib/pack.js22
-rw-r--r--deps/npm/lib/ping.js8
-rw-r--r--deps/npm/lib/profile.js296
-rw-r--r--deps/npm/lib/publish.js9
-rw-r--r--deps/npm/lib/restart.js2
-rw-r--r--deps/npm/lib/run-script.js2
-rw-r--r--deps/npm/lib/shrinkwrap.js12
-rw-r--r--deps/npm/lib/start.js2
-rw-r--r--deps/npm/lib/stop.js2
-rw-r--r--deps/npm/lib/test.js8
-rw-r--r--deps/npm/lib/token.js211
-rw-r--r--deps/npm/lib/unbuild.js10
-rw-r--r--deps/npm/lib/utils/convert-line-endings.js49
-rw-r--r--deps/npm/lib/utils/did-you-mean.js20
-rw-r--r--deps/npm/lib/utils/error-handler.js8
-rw-r--r--deps/npm/lib/utils/error-message.js48
-rw-r--r--deps/npm/lib/utils/is-hashbang-file.js19
-rw-r--r--deps/npm/lib/utils/lifecycle-cmd.js18
-rw-r--r--deps/npm/lib/utils/lifecycle.js462
-rw-r--r--deps/npm/lib/utils/map-to-registry.js3
-rw-r--r--deps/npm/lib/utils/module-name.js4
-rw-r--r--deps/npm/lib/utils/pulse-till-done.js40
-rw-r--r--deps/npm/lib/utils/read-user-info.js65
-rw-r--r--deps/npm/lib/utils/tar.js454
-rw-r--r--deps/npm/lib/utils/unsupported.js26
-rw-r--r--deps/npm/lib/version.js10
47 files changed, 1101 insertions, 1234 deletions
diff --git a/deps/npm/lib/auth/legacy.js b/deps/npm/lib/auth/legacy.js
index 2fa4a26e35..4c75ca6731 100644
--- a/deps/npm/lib/auth/legacy.js
+++ b/deps/npm/lib/auth/legacy.js
@@ -1,142 +1,52 @@
-var log = require('npmlog')
-var npm = require('../npm.js')
-var read = require('read')
-var userValidate = require('npm-user-validate')
-var output = require('../utils/output')
-var chain = require('slide').chain
+'use strict'
+const read = require('../utils/read-user-info.js')
+const profile = require('npm-profile')
+const log = require('npmlog')
+const npm = require('../npm.js')
+const output = require('../utils/output.js')
module.exports.login = function login (creds, registry, scope, cb) {
- var c = {
- u: creds.username || '',
- p: creds.password || '',
- e: creds.email || ''
- }
- var u = {}
-
- chain([
- [readUsername, c, u],
- [readPassword, c, u],
- [readEmail, c, u],
- [save, c, u, registry, scope]
- ], function (err, res) {
- cb(err, res && res[res.length - 1])
- })
-}
-
-function readUsername (c, u, cb) {
- var v = userValidate.username
- read({prompt: 'Username: ', default: c.u || ''}, function (er, un) {
- if (er) {
- return cb(er.message === 'cancelled' ? er.message : er)
- }
-
- // make sure it's valid. we have to do this here, because
- // couchdb will only ever say "bad password" with a 401 when
- // you try to PUT a _users record that the validate_doc_update
- // rejects for *any* reason.
-
- if (!un) {
- return readUsername(c, u, cb)
- }
-
- var error = v(un)
- if (error) {
- log.warn(error.message)
- return readUsername(c, u, cb)
- }
-
- c.changed = c.u !== un
- u.u = un
- cb(er)
- })
-}
-
-function readPassword (c, u, cb) {
- var v = userValidate.pw
-
- var prompt
- if (c.p && !c.changed) {
- prompt = 'Password: (or leave unchanged) '
- } else {
- prompt = 'Password: '
- }
-
- read({prompt: prompt, silent: true}, function (er, pw) {
- if (er) {
- return cb(er.message === 'cancelled' ? er.message : er)
- }
-
- if (!c.changed && pw === '') {
- // when the username was not changed,
- // empty response means "use the old value"
- pw = c.p
- }
-
- if (!pw) {
- return readPassword(c, u, cb)
- }
-
- var error = v(pw)
- if (error) {
- log.warn(error.message)
- return readPassword(c, u, cb)
- }
-
- c.changed = c.changed || c.p !== pw
- u.p = pw
- cb(er)
- })
-}
-
-function readEmail (c, u, cb) {
- var v = userValidate.email
- var r = { prompt: 'Email: (this IS public) ', default: c.e || '' }
- read(r, function (er, em) {
- if (er) {
- return cb(er.message === 'cancelled' ? er.message : er)
- }
-
- if (!em) {
- return readEmail(c, u, cb)
- }
-
- var error = v(em)
- if (error) {
- log.warn(error.message)
- return readEmail(c, u, cb)
- }
-
- u.e = em
- cb(er)
- })
-}
-
-function save (c, u, registry, scope, cb) {
- var params = {
- auth: {
- username: u.u,
- password: u.p,
- email: u.e
- }
- }
- npm.registry.adduser(registry, params, function (er, doc) {
- if (er) return cb(er)
-
- var newCreds = (doc && doc.token)
- ? {
- token: doc.token
- }
- : {
- username: u.u,
- password: u.p,
- email: u.e,
- alwaysAuth: npm.config.get('always-auth')
- }
-
- log.info('adduser', 'Authorized user %s', u.u)
- var scopeMessage = scope ? ' to scope ' + scope : ''
- output('Logged in as %s%s on %s.', u.u, scopeMessage, registry)
-
+ let username = creds.username || ''
+ let password = creds.password || ''
+ let email = creds.email || ''
+ const auth = {}
+ if (npm.config.get('otp')) auth.otp = npm.config.get('otp')
+
+ return read.username('Username:', username, {log: log}).then((u) => {
+ username = u
+ return read.password('Password: ', password)
+ }).then((p) => {
+ password = p
+ return read.email('Email: (this IS public) ', email, {log: log})
+ }).then((e) => {
+ email = e
+ return profile.login(username, password, {registry: registry, auth: auth}).catch((err) => {
+ if (err.code === 'EOTP') throw err
+ return profile.adduser(username, email, password, {registry: registry})
+ }).catch((err) => {
+ if (err.code === 'EOTP' && !auth.otp) {
+ return read.otp('Authenicator provided OTP:').then((otp) => {
+ auth.otp = otp
+ return profile.login(username, password, {registry: registry, auth: auth})
+ })
+ } else {
+ throw err
+ }
+ })
+ }).then((result) => {
+ const newCreds = {}
+ if (result && result.token) {
+ newCreds.token = result.token
+ } else {
+ newCreds.username = username
+ newCreds.password = password
+ newCreds.email = email
+ newCreds.alwaysAuth = npm.config.get('always-auth')
+ }
+
+ log.info('adduser', 'Authorized user %s', username)
+ const scopeMessage = scope ? ' to scope ' + scope : ''
+ output('Logged in as %s%s on %s.', username, scopeMessage, registry)
cb(null, newCreds)
- })
+ }).catch(cb)
}
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
index 44ac40a007..6a788bc857 100644
--- a/deps/npm/lib/build.js
+++ b/deps/npm/lib/build.js
@@ -18,6 +18,8 @@ var link = require('./utils/link.js')
var linkIfExists = link.ifExists
var cmdShim = require('cmd-shim')
var cmdShimIfExists = cmdShim.ifExists
+var isHashbangFile = require('./utils/is-hashbang-file.js')
+var dos2Unix = require('./utils/convert-line-endings.js').dos2Unix
var asyncMap = require('slide').asyncMap
var ini = require('ini')
var writeFile = require('write-file-atomic')
@@ -187,13 +189,18 @@ function linkBins (pkg, folder, parent, gtop, cb) {
if (er && er.code === 'ENOENT' && npm.config.get('ignore-scripts')) {
return cb()
}
- if (er || !gtop) return cb(er)
- var dest = path.resolve(binRoot, b)
- var out = npm.config.get('parseable')
- ? dest + '::' + src + ':BINFILE'
- : dest + ' -> ' + src
- if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
- cb()
+ if (er) return cb(er)
+ isHashbangFile(src).then((isHashbang) => {
+ if (isHashbang) return dos2Unix(src)
+ }).then(() => {
+ if (!gtop) return cb()
+ var dest = path.resolve(binRoot, b)
+ var out = npm.config.get('parseable')
+ ? dest + '::' + src + ':BINFILE'
+ : dest + ' -> ' + src
+ if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
+ cb()
+ }).catch(cb)
})
}
)
diff --git a/deps/npm/lib/config.js b/deps/npm/lib/config.js
index 0426546274..d260c04a54 100644
--- a/deps/npm/lib/config.js
+++ b/deps/npm/lib/config.js
@@ -19,7 +19,7 @@ config.usage = usage(
'npm config set <key> <value>' +
'\nnpm config get [<key>]' +
'\nnpm config delete <key>' +
- '\nnpm config list' +
+ '\nnpm config list [--json]' +
'\nnpm config edit' +
'\nnpm set <key> <value>' +
'\nnpm get [<key>]'
@@ -45,9 +45,11 @@ config.completion = function (opts, cb) {
case 'rm':
return cb(null, Object.keys(types))
case 'edit':
- case 'list': case 'ls':
+ case 'list':
+ case 'ls':
+ return cb(null, [])
+ default:
return cb(null, [])
- default: return cb(null, [])
}
}
@@ -57,12 +59,21 @@ config.completion = function (opts, cb) {
function config (args, cb) {
var action = args.shift()
switch (action) {
- case 'set': return set(args[0], args[1], cb)
- case 'get': return get(args[0], cb)
- case 'delete': case 'rm': case 'del': return del(args[0], cb)
- case 'list': case 'ls': return list(cb)
- case 'edit': return edit(cb)
- default: return unknown(action, cb)
+ case 'set':
+ return set(args[0], args[1], cb)
+ case 'get':
+ return get(args[0], cb)
+ case 'delete':
+ case 'rm':
+ case 'del':
+ return del(args[0], cb)
+ case 'list':
+ case 'ls':
+ return npm.config.get('json') ? listJson(cb) : list(cb)
+ case 'edit':
+ return edit(cb)
+ default:
+ return unknown(action, cb)
}
}
@@ -159,15 +170,49 @@ function sort (a, b) {
}
function publicVar (k) {
- return !(k.charAt(0) === '_' ||
- k.indexOf(':_') !== -1 ||
- types[k] !== types[k])
+ return !(k.charAt(0) === '_' || k.indexOf(':_') !== -1)
}
function getKeys (data) {
return Object.keys(data).filter(publicVar).sort(sort)
}
+function listJson (cb) {
+ const publicConf = npm.config.keys.reduce((publicConf, k) => {
+ var value = npm.config.get(k)
+
+ if (publicVar(k) &&
+ // argv is not really config, it's command config
+ k !== 'argv' &&
+ // logstream is a Stream, and would otherwise produce circular refs
+ k !== 'logstream') publicConf[k] = value
+
+ return publicConf
+ }, {})
+
+ output(JSON.stringify(publicConf, null, 2))
+ return cb()
+}
+
+function listFromSource (title, conf, long) {
+ var confKeys = getKeys(conf)
+ var msg = ''
+
+ if (confKeys.length) {
+ msg += '; ' + title + '\n'
+ confKeys.forEach(function (k) {
+ var val = JSON.stringify(conf[k])
+ if (conf[k] !== npm.config.get(k)) {
+ if (!long) return
+ msg += '; ' + k + ' = ' + val + ' (overridden)\n'
+ } else msg += k + ' = ' + val + '\n'
+ })
+ msg += '\n'
+ }
+
+ return msg
+}
+
function list (cb) {
var msg = ''
var long = npm.config.get('long')
@@ -185,92 +230,22 @@ function list (cb) {
}
// env configs
- var env = npm.config.sources.env.data
- var envKeys = getKeys(env)
- if (envKeys.length) {
- msg += '; environment configs\n'
- envKeys.forEach(function (k) {
- if (env[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' +
- JSON.stringify(env[k]) + ' (overridden)\n'
- } else msg += k + ' = ' + JSON.stringify(env[k]) + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('environment configs', npm.config.sources.env.data, long)
// project config file
var project = npm.config.sources.project
- var pconf = project.data
- var ppath = project.path
- var pconfKeys = getKeys(pconf)
- if (pconfKeys.length) {
- msg += '; project config ' + ppath + '\n'
- pconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(pconf[k])
- if (pconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('project config ' + project.path, project.data, long)
// user config file
- var uconf = npm.config.sources.user.data
- var uconfKeys = getKeys(uconf)
- if (uconfKeys.length) {
- msg += '; userconfig ' + npm.config.get('userconfig') + '\n'
- uconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(uconf[k])
- if (uconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('userconfig ' + npm.config.get('userconfig'), npm.config.sources.user.data, long)
// global config file
- var gconf = npm.config.sources.global.data
- var gconfKeys = getKeys(gconf)
- if (gconfKeys.length) {
- msg += '; globalconfig ' + npm.config.get('globalconfig') + '\n'
- gconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(gconf[k])
- if (gconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('globalconfig ' + npm.config.get('globalconfig'), npm.config.sources.global.data, long)
// builtin config file
var builtin = npm.config.sources.builtin || {}
if (builtin && builtin.data) {
- var bconf = builtin.data
- var bpath = builtin.path
- var bconfKeys = getKeys(bconf)
- if (bconfKeys.length) {
- msg += '; builtin config ' + bpath + '\n'
- bconfKeys.forEach(function (k) {
- var val = (k.charAt(0) === '_')
- ? '---sekretz---'
- : JSON.stringify(bconf[k])
- if (bconf[k] !== npm.config.get(k)) {
- if (!long) return
- msg += '; ' + k + ' = ' + val + ' (overridden)\n'
- } else msg += k + ' = ' + val + '\n'
- })
- msg += '\n'
- }
+ msg += listFromSource('builtin config ' + builtin.path, builtin.data, long)
}
// only show defaults if --long
diff --git a/deps/npm/lib/config/cmd-list.js b/deps/npm/lib/config/cmd-list.js
index f2d5fab17d..49c445a4f0 100644
--- a/deps/npm/lib/config/cmd-list.js
+++ b/deps/npm/lib/config/cmd-list.js
@@ -74,6 +74,8 @@ var cmdList = [
'team',
'deprecate',
'shrinkwrap',
+ 'token',
+ 'profile',
'help',
'help-search',
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index 93bac84a61..35617fd638 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -128,6 +128,8 @@ Object.defineProperty(exports, 'defaults', {get: function () {
cert: null,
+ cidr: null,
+
color: true,
depth: Infinity,
description: true,
@@ -144,6 +146,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
git: 'git',
'git-tag-version': true,
+ 'commit-hooks': true,
global: false,
globalconfig: path.resolve(globalPrefix, 'etc', 'npmrc'),
@@ -178,6 +181,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'onload-script': false,
only: null,
optional: true,
+ otp: null,
'package-lock': true,
parseable: false,
'prefer-offline': false,
@@ -185,13 +189,13 @@ Object.defineProperty(exports, 'defaults', {get: function () {
prefix: globalPrefix,
production: process.env.NODE_ENV === 'production',
'progress': !process.env.TRAVIS && !process.env.CI,
- 'proprietary-attribs': true,
proxy: null,
'https-proxy': null,
'user-agent': 'npm/{npm-version} ' +
'node/{node-version} ' +
'{platform} ' +
'{arch}',
+ 'read-only': false,
'rebuild-bundle': true,
registry: 'https://registry.npmjs.org/',
rollback: true,
@@ -257,6 +261,7 @@ exports.types = {
'cache-max': Number,
'cache-min': Number,
cert: [null, String],
+ cidr: [null, String, Array],
color: ['always', Boolean],
depth: Number,
description: Boolean,
@@ -271,6 +276,7 @@ exports.types = {
'fetch-retry-maxtimeout': Number,
git: String,
'git-tag-version': Boolean,
+ 'commit-hooks': Boolean,
global: Boolean,
globalconfig: path,
'global-style': Boolean,
@@ -308,14 +314,15 @@ exports.types = {
only: [null, 'dev', 'development', 'prod', 'production'],
optional: Boolean,
'package-lock': Boolean,
+ otp: Number,
parseable: Boolean,
'prefer-offline': Boolean,
'prefer-online': Boolean,
prefix: path,
production: Boolean,
progress: Boolean,
- 'proprietary-attribs': Boolean,
proxy: [null, false, url], // allow proxy to be disabled explicitly
+ 'read-only': Boolean,
'rebuild-bundle': Boolean,
registry: [null, url],
rollback: Boolean,
@@ -405,6 +412,7 @@ exports.shorthands = {
m: ['--message'],
p: ['--parseable'],
porcelain: ['--parseable'],
+ readonly: ['--read-only'],
g: ['--global'],
S: ['--save'],
D: ['--save-dev'],
diff --git a/deps/npm/lib/config/lifecycle.js b/deps/npm/lib/config/lifecycle.js
new file mode 100644
index 0000000000..5fca93939d
--- /dev/null
+++ b/deps/npm/lib/config/lifecycle.js
@@ -0,0 +1,30 @@
+'use strict'
+
+const npm = require('../npm.js')
+const log = require('npmlog')
+
+module.exports = lifecycleOpts
+
+let opts
+
+function lifecycleOpts (moreOpts) {
+ if (!opts) {
+ opts = {
+ config: npm.config.snapshot,
+ dir: npm.dir,
+ failOk: false,
+ force: npm.config.get('force'),
+ group: npm.config.get('group'),
+ ignorePrepublish: npm.config.get('ignore-prepublish'),
+ ignoreScripts: npm.config.get('ignore-scripts'),
+ log: log,
+ production: npm.config.get('production'),
+ scriptShell: npm.config.get('script-shell'),
+ scriptsPrependNodePath: npm.config.get('scripts-prepend-node-path'),
+ unsafePerm: npm.config.get('unsafe-perm'),
+ user: npm.config.get('user')
+ }
+ }
+
+ return moreOpts ? Object.assign({}, opts, moreOpts) : opts
+}
diff --git a/deps/npm/lib/help.js b/deps/npm/lib/help.js
index 9763d5fccd..64c80f7874 100644
--- a/deps/npm/lib/help.js
+++ b/deps/npm/lib/help.js
@@ -12,6 +12,7 @@ var npm = require('./npm.js')
var log = require('npmlog')
var opener = require('opener')
var glob = require('glob')
+var didYouMean = require('./utils/did-you-mean')
var cmdList = require('./config/cmd-list').cmdList
var shorthands = require('./config/cmd-list').shorthands
var commands = cmdList.concat(Object.keys(shorthands))
@@ -181,6 +182,11 @@ function npmUsage (valid, cb) {
'',
'npm@' + npm.version + ' ' + path.dirname(__dirname)
].join('\n'))
+
+ if (npm.argv.length > 1) {
+ didYouMean(npm.argv[1], commands)
+ }
+
cb(valid)
}
diff --git a/deps/npm/lib/install/action/extract.js b/deps/npm/lib/install/action/extract.js
index 5534e8b28a..8e80d4adda 100644
--- a/deps/npm/lib/install/action/extract.js
+++ b/deps/npm/lib/install/action/extract.js
@@ -20,16 +20,34 @@ const workerFarm = require('worker-farm')
const WORKER_PATH = require.resolve('./extract-worker.js')
let workers
+// NOTE: temporarily disabled on non-OSX due to ongoing issues:
+//
+// * Seems to make Windows antivirus issues much more common
+// * Messes with Docker (I think)
+//
+// There are other issues that should be fixed that affect OSX too:
+//
+// * Logging is messed up right now because pacote does its own thing
+// * Global deduplication in pacote breaks due to multiple procs
+//
+// As these get fixed, we can start experimenting with re-enabling it
+// at least on some platforms.
+const ENABLE_WORKERS = process.platform === 'darwin'
+
extract.init = () => {
- workers = workerFarm({
- maxConcurrentCallsPerWorker: npm.limit.fetch,
- maxRetries: 1
- }, WORKER_PATH)
+ if (ENABLE_WORKERS) {
+ workers = workerFarm({
+ maxConcurrentCallsPerWorker: npm.limit.fetch,
+ maxRetries: 1
+ }, WORKER_PATH)
+ }
return BB.resolve()
}
extract.teardown = () => {
- workerFarm.end(workers)
- workers = null
+ if (ENABLE_WORKERS) {
+ workerFarm.end(workers)
+ workers = null
+ }
return BB.resolve()
}
module.exports = extract
@@ -54,7 +72,7 @@ function extract (staging, pkg, log) {
let msg = args
const spec = typeof args[0] === 'string' ? npa(args[0]) : args[0]
args[0] = spec.raw
- if (spec.registry || spec.type === 'remote') {
+ if (ENABLE_WORKERS && (spec.registry || spec.type === 'remote')) {
// We can't serialize these options
opts.loglevel = opts.log.level
opts.log = null
diff --git a/deps/npm/lib/install/action/install.js b/deps/npm/lib/install/action/install.js
index 754bff43ff..a5cf63b739 100644
--- a/deps/npm/lib/install/action/install.js
+++ b/deps/npm/lib/install/action/install.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('install', packageId(pkg))
- lifecycle(pkg.package, 'install', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'install', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/move.js b/deps/npm/lib/install/action/move.js
index bc9bf6a883..00d58a1592 100644
--- a/deps/npm/lib/install/action/move.js
+++ b/deps/npm/lib/install/action/move.js
@@ -7,7 +7,6 @@ var rimraf = require('rimraf')
var mkdirp = require('mkdirp')
var rmStuff = require('../../unbuild.js').rmStuff
var lifecycle = require('../../utils/lifecycle.js')
-var updatePackageJson = require('../update-package-json.js')
var move = require('../../utils/move.js')
/*
@@ -19,14 +18,13 @@ var move = require('../../utils/move.js')
module.exports = function (staging, pkg, log, next) {
log.silly('move', pkg.fromPath, pkg.path)
chain([
- [lifecycle, pkg.package, 'preuninstall', pkg.fromPath, false, true],
- [lifecycle, pkg.package, 'uninstall', pkg.fromPath, false, true],
+ [lifecycle, pkg.package, 'preuninstall', pkg.fromPath, { failOk: true }],
+ [lifecycle, pkg.package, 'uninstall', pkg.fromPath, { failOk: true }],
[rmStuff, pkg.package, pkg.fromPath],
- [lifecycle, pkg.package, 'postuninstall', pkg.fromPath, false, true],
+ [lifecycle, pkg.package, 'postuninstall', pkg.fromPath, { failOk: true }],
[moveModuleOnly, pkg.fromPath, pkg.path, log],
- [lifecycle, pkg.package, 'preinstall', pkg.path, false, true],
- [removeEmptyParents, path.resolve(pkg.fromPath, '..')],
- [updatePackageJson, pkg, pkg.path]
+ [lifecycle, pkg.package, 'preinstall', pkg.path, { failOk: true }],
+ [removeEmptyParents, path.resolve(pkg.fromPath, '..')]
], next)
}
diff --git a/deps/npm/lib/install/action/postinstall.js b/deps/npm/lib/install/action/postinstall.js
index 197dc1e6f9..01accb2a47 100644
--- a/deps/npm/lib/install/action/postinstall.js
+++ b/deps/npm/lib/install/action/postinstall.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('postinstall', packageId(pkg))
- lifecycle(pkg.package, 'postinstall', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'postinstall', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/preinstall.js b/deps/npm/lib/install/action/preinstall.js
index a16082ef73..374ff56332 100644
--- a/deps/npm/lib/install/action/preinstall.js
+++ b/deps/npm/lib/install/action/preinstall.js
@@ -4,5 +4,5 @@ var packageId = require('../../utils/package-id.js')
module.exports = function (staging, pkg, log, next) {
log.silly('preinstall', packageId(pkg))
- lifecycle(pkg.package, 'preinstall', pkg.path, false, false, next)
+ lifecycle(pkg.package, 'preinstall', pkg.path, next)
}
diff --git a/deps/npm/lib/install/action/prepare.js b/deps/npm/lib/install/action/prepare.js
index 5e4333a5b5..d48c8e7e86 100644
--- a/deps/npm/lib/install/action/prepare.js
+++ b/deps/npm/lib/install/action/prepare.js
@@ -19,8 +19,8 @@ module.exports = function (staging, pkg, log, next) {
var buildpath = moduleStagingPath(staging, pkg)
chain(
[
- [lifecycle, pkg.package, 'prepublish', buildpath, false, false],
- [lifecycle, pkg.package, 'prepare', buildpath, false, false]
+ [lifecycle, pkg.package, 'prepublish', buildpath],
+ [lifecycle, pkg.package, 'prepare', buildpath]
],
next
)
diff --git a/deps/npm/lib/install/action/unbuild.js b/deps/npm/lib/install/action/unbuild.js
index ce20df75d3..dbfbd9c4b1 100644
--- a/deps/npm/lib/install/action/unbuild.js
+++ b/deps/npm/lib/install/action/unbuild.js
@@ -6,11 +6,11 @@ var rmStuff = Bluebird.promisify(require('../../unbuild.js').rmStuff)
module.exports = function (staging, pkg, log) {
log.silly('unbuild', packageId(pkg))
- return lifecycle(pkg.package, 'preuninstall', pkg.path, false, true).then(() => {
- return lifecycle(pkg.package, 'uninstall', pkg.path, false, true)
+ return lifecycle(pkg.package, 'preuninstall', pkg.path, { failOk: true }).then(() => {
+ return lifecycle(pkg.package, 'uninstall', pkg.path, { failOk: true })
}).then(() => {
return rmStuff(pkg.package, pkg.path)
}).then(() => {
- return lifecycle(pkg.package, 'postuninstall', pkg.path, false, true)
+ return lifecycle(pkg.package, 'postuninstall', pkg.path, { failOk: true })
})
}
diff --git a/deps/npm/lib/install/actions.js b/deps/npm/lib/install/actions.js
index 028d932373..9f0dcfa5dc 100644
--- a/deps/npm/lib/install/actions.js
+++ b/deps/npm/lib/install/actions.js
@@ -80,6 +80,7 @@ function runAction (action, staging, pkg, log) {
}
function markAsFailed (pkg) {
+ if (pkg.failed) return
pkg.failed = true
pkg.requires.forEach((req) => {
req.requiredBy = req.requiredBy.filter((reqReqBy) => {
diff --git a/deps/npm/lib/install/deps.js b/deps/npm/lib/install/deps.js
index d7a2c27c1c..c93907a416 100644
--- a/deps/npm/lib/install/deps.js
+++ b/deps/npm/lib/install/deps.js
@@ -62,8 +62,9 @@ function doesChildVersionMatch (child, requested, requestor) {
// In those cases _from, will be preserved and we can compare that to ensure that they
// really came from the same sources.
// You'll see this scenario happen with at least tags and git dependencies.
+ // Some buggy clients will write spaces into the module name part of a _from.
if (child.package._from) {
- var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^' + moduleName(child) + '@'), ''))
+ var fromReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^\s*' + moduleName(child) + '\s*@'), ''))
if (fromReq.rawSpec === requested.rawSpec) return true
if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true
}
@@ -197,18 +198,31 @@ function matchingDep (tree, name) {
exports.getAllMetadata = function (args, tree, where, next) {
asyncMap(args, function (arg, done) {
- var spec = npa(arg)
+ let spec
+ try {
+ spec = npa(arg)
+ } catch (e) {
+ return done(e)
+ }
if (spec.type !== 'file' && spec.type !== 'directory' && (spec.name == null || spec.rawSpec === '')) {
return fs.stat(path.join(arg, 'package.json'), (err) => {
if (err) {
var version = matchingDep(tree, spec.name)
if (version) {
- return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
+ try {
+ return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
+ } catch (e) {
+ return done(e)
+ }
} else {
return fetchPackageMetadata(spec, where, done)
}
} else {
- return fetchPackageMetadata(npa('file:' + arg), where, done)
+ try {
+ return fetchPackageMetadata(npa('file:' + arg), where, done)
+ } catch (e) {
+ return done(e)
+ }
}
})
} else {
diff --git a/deps/npm/lib/install/update-package-json.js b/deps/npm/lib/install/update-package-json.js
index 14339d0012..afffaf7800 100644
--- a/deps/npm/lib/install/update-package-json.js
+++ b/deps/npm/lib/install/update-package-json.js
@@ -4,6 +4,7 @@ var writeFileAtomic = require('write-file-atomic')
var moduleName = require('../utils/module-name.js')
var deepSortObject = require('../utils/deep-sort-object.js')
var sortedObject = require('sorted-object')
+var isWindows = require('../utils/is-windows.js')
var sortKeys = [
'dependencies', 'devDependencies', 'bundleDependencies',
@@ -47,7 +48,9 @@ module.exports = function (mod, buildpath, next) {
var data = JSON.stringify(sortedObject(pkg), null, 2) + '\n'
writeFileAtomic(path.resolve(buildpath, 'package.json'), data, {
- // We really don't need this guarantee, and fsyncing here is super slow.
- fsync: false
+ // We really don't need this guarantee, and fsyncing here is super slow. Except on
+ // Windows where there isn't a big performance difference and it prevents errors when
+ // rolling back optional packages (#17671)
+ fsync: isWindows
}, next)
}
diff --git a/deps/npm/lib/install/validate-tree.js b/deps/npm/lib/install/validate-tree.js
index ccd4e2e310..24a140171d 100644
--- a/deps/npm/lib/install/validate-tree.js
+++ b/deps/npm/lib/install/validate-tree.js
@@ -40,7 +40,7 @@ function thenValidateAllPeerDeps (idealTree, next) {
validate('OF', arguments)
validateAllPeerDeps(idealTree, function (tree, pkgname, version) {
var warn = new Error(packageId(tree) + ' requires a peer of ' + pkgname + '@' +
- version + ' but none was installed.')
+ version + ' but none is installed. You must install peer dependencies yourself.')
warn.code = 'EPEERINVALID'
idealTree.warnings.push(warn)
})
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
index 2e3db79c3b..7c0ea71e77 100644
--- a/deps/npm/lib/ls.js
+++ b/deps/npm/lib/ls.js
@@ -135,7 +135,7 @@ function filterByEnv (data) {
var devKeys = Object.keys(data.devDependencies || [])
var prodKeys = Object.keys(data._dependencies || [])
Object.keys(data.dependencies).forEach(function (name) {
- if (!dev && inList(devKeys, name) && data.dependencies[name].missing) {
+ if (!dev && inList(devKeys, name) && !inList(prodKeys, name) && data.dependencies[name].missing) {
return
}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index 990d8c5109..3a84947f79 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -25,7 +25,6 @@
var npmconf = require('./config/core.js')
var log = require('npmlog')
- var tty = require('tty')
var path = require('path')
var abbrev = require('abbrev')
var which = require('which')
@@ -285,20 +284,20 @@
switch (color) {
case 'always':
- log.enableColor()
npm.color = true
break
case false:
- log.disableColor()
npm.color = false
break
default:
- if (process.stdout.isTTY) npm.color = true
- else if (!tty.isatty) npm.color = true
- else if (tty.isatty(1)) npm.color = true
- else npm.color = false
+ npm.color = process.stdout.isTTY && process.env['TERM'] !== 'dumb'
break
}
+ if (npm.color) {
+ log.enableColor()
+ } else {
+ log.disableColor()
+ }
if (config.get('unicode')) {
log.enableUnicode()
@@ -306,7 +305,7 @@
log.disableUnicode()
}
- if (config.get('progress') && (process.stderr.isTTY || (tty.isatty && tty.isatty(2)))) {
+ if (config.get('progress') && process.stderr.isTTY && process.env['TERM'] !== 'dumb') {
log.enableProgress()
} else {
log.disableProgress()
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index f2fb2df79a..a38137b66c 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -32,7 +32,6 @@ var table = require('text-table')
var semver = require('semver')
var npa = require('npm-package-arg')
var mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js')
-var cache = require('./cache.js')
var npm = require('./npm.js')
var long = npm.config.get('long')
var mapToRegistry = require('./utils/map-to-registry.js')
@@ -42,6 +41,7 @@ var computeVersionSpec = require('./install/deps.js').computeVersionSpec
var moduleName = require('./utils/module-name.js')
var output = require('./utils/output.js')
var ansiTrim = require('./utils/ansi-trim')
+var fetchPackageMetadata = require('./fetch-package-metadata.js')
function uniq (list) {
// we maintain the array because we need an array, not iterator, return
@@ -387,8 +387,12 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
}
}
- // We didn't find the version in the doc. See if cache can find it.
- cache.add(dep, req, null, false, onCacheAdd)
+ // We didn't find the version in the doc. See if we can find it in metadata.
+ var spec = dep
+ if (req) {
+ spec = dep + '@' + req
+ }
+ fetchPackageMetadata(spec, '', onCacheAdd)
function onCacheAdd (er, d) {
// if this fails, then it means we can't update this thing.
diff --git a/deps/npm/lib/pack.js b/deps/npm/lib/pack.js
index ae3bb260ba..c428482035 100644
--- a/deps/npm/lib/pack.js
+++ b/deps/npm/lib/pack.js
@@ -26,8 +26,9 @@ const pipe = BB.promisify(require('mississippi').pipe)
const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install')
const pinflight = require('promise-inflight')
const readJson = BB.promisify(require('read-package-json'))
-const tarPack = BB.promisify(require('./utils/tar').pack)
const writeStreamAtomic = require('fs-write-stream-atomic')
+const tar = require('tar')
+const packlist = require('npm-packlist')
pack.usage = 'npm pack [[<@scope>/]<pkg>...]'
@@ -118,11 +119,20 @@ function packDirectory (mani, dir, target) {
}).then((pkg) => {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
- return tarPack(tmpTarget, dir, pkg).then(() => {
- return move(tmpTarget, target, {Promise: BB, fs})
- }).then(() => {
- return lifecycle(pkg, 'postpack', dir)
- }).then(() => target)
+
+ const tarOpt = {
+ file: tmpTarget,
+ cwd: dir,
+ prefix: 'package/',
+ portable: true,
+ gzip: true
+ }
+
+ return packlist({ path: dir })
+ .then((files) => tar.create(tarOpt, files))
+ .then(() => move(tmpTarget, target, {Promise: BB, fs}))
+ .then(() => lifecycle(pkg, 'postpack', dir))
+ .then(() => target)
})
})
}
diff --git a/deps/npm/lib/ping.js b/deps/npm/lib/ping.js
index e06be9a471..13f390397c 100644
--- a/deps/npm/lib/ping.js
+++ b/deps/npm/lib/ping.js
@@ -15,7 +15,13 @@ function ping (args, silent, cb) {
var auth = npm.config.getCredentialsByURI(registry)
npm.registry.ping(registry, {auth: auth}, function (er, pong, data, res) {
- if (!silent) output(JSON.stringify(pong))
+ if (!silent) {
+ if (er) {
+ output('Ping error: ' + er)
+ } else {
+ output('Ping success: ' + JSON.stringify(pong))
+ }
+ }
cb(er, er ? null : pong, data, res)
})
}
diff --git a/deps/npm/lib/profile.js b/deps/npm/lib/profile.js
new file mode 100644
index 0000000000..4238e14276
--- /dev/null
+++ b/deps/npm/lib/profile.js
@@ -0,0 +1,296 @@
+'use strict'
+const profile = require('npm-profile')
+const npm = require('./npm.js')
+const log = require('npmlog')
+const output = require('./utils/output.js')
+const qw = require('qw')
+const Table = require('cli-table2')
+const ansistyles = require('ansistyles')
+const Bluebird = require('bluebird')
+const readUserInfo = require('./utils/read-user-info.js')
+const qrcodeTerminal = require('qrcode-terminal')
+const url = require('url')
+const queryString = require('query-string')
+const pulseTillDone = require('./utils/pulse-till-done.js')
+
+module.exports = profileCmd
+
+profileCmd.usage =
+ 'npm profile enable-2fa [auth-only|auth-and-writes]\n' +
+ 'npm profile disable-2fa\n' +
+ 'npm profile get [<key>]\n' +
+ 'npm profile set <key> <value>'
+
+profileCmd.subcommands = qw`enable-2fa disable-2fa get set`
+
+profileCmd.completion = function (opts, cb) {
+ var argv = opts.conf.argv.remain
+ switch (argv[2]) {
+ case 'enable-2fa':
+ case 'enable-tfa':
+ if (argv.length === 3) {
+ return cb(null, qw`auth-and-writes auth-only`)
+ } else {
+ return cb(null, [])
+ }
+ case 'disable-2fa':
+ case 'disable-tfa':
+ case 'get':
+ case 'set':
+ return cb(null, [])
+ default:
+ return cb(new Error(argv[2] + ' not recognized'))
+ }
+}
+
+function withCb (prom, cb) {
+ prom.then((value) => cb(null, value), cb)
+}
+
+function profileCmd (args, cb) {
+ if (args.length === 0) return cb(new Error(profileCmd.usage))
+ log.gauge.show('profile')
+ switch (args[0]) {
+ case 'enable-2fa':
+ case 'enable-tfa':
+ case 'enable2fa':
+ case 'enabletfa':
+ withCb(enable2fa(args.slice(1)), cb)
+ break
+ case 'disable-2fa':
+ case 'disable-tfa':
+ case 'disable2fa':
+ case 'disabletfa':
+ withCb(disable2fa(), cb)
+ break
+ case 'get':
+ withCb(get(args.slice(1)), cb)
+ break
+ case 'set':
+ withCb(set(args.slice(1)), cb)
+ break
+ default:
+ cb(new Error('Unknown profile command: ' + args[0]))
+ }
+}
+
+function config () {
+ const conf = {
+ json: npm.config.get('json'),
+ parseable: npm.config.get('parseable'),
+ registry: npm.config.get('registry'),
+ otp: npm.config.get('otp')
+ }
+ conf.auth = npm.config.getCredentialsByURI(conf.registry)
+ if (conf.otp) conf.auth.otp = conf.otp
+ return conf
+}
+
+const knownProfileKeys = qw`
+ name email ${'two factor auth'} fullname homepage
+ freenode twitter github created updated`
+
+function get (args) {
+ const tfa = 'two factor auth'
+ const conf = config()
+ return pulseTillDone.withPromise(profile.get(conf)).then((info) => {
+ if (!info.cidr_whitelist) delete info.cidr_whitelist
+ if (conf.json) {
+ output(JSON.stringify(info, null, 2))
+ return
+ }
+ const cleaned = {}
+ knownProfileKeys.forEach((k) => { cleaned[k] = info[k] || '' })
+ Object.keys(info).filter((k) => !(k in cleaned)).forEach((k) => { cleaned[k] = info[k] || '' })
+ delete cleaned.tfa
+ delete cleaned.email_verified
+ cleaned['email'] += info.email_verified ? ' (verified)' : '(unverified)'
+ if (info.tfa && !info.tfa.pending) {
+ cleaned[tfa] = info.tfa.mode
+ } else {
+ cleaned[tfa] = 'disabled'
+ }
+ if (args.length) {
+ const values = args // comma or space separated ā†“
+ .join(',').split(/,/).map((arg) => arg.trim()).filter((arg) => arg !== '')
+ .map((arg) => cleaned[arg])
+ .join('\t')
+ output(values)
+ } else {
+ if (conf.parseable) {
+ Object.keys(info).forEach((key) => {
+ if (key === 'tfa') {
+ output(`${key}\t${cleaned[tfa]}`)
+ } else {
+ output(`${key}\t${info[key]}`)
+ }
+ })
+ return
+ } else {
+ const table = new Table()
+ Object.keys(cleaned).forEach((k) => table.push({[ansistyles.bright(k)]: cleaned[k]}))
+ output(table.toString())
+ }
+ }
+ })
+}
+
+const writableProfileKeys = qw`
+ email password fullname homepage freenode twitter github`
+
+function set (args) {
+ const conf = config()
+ const prop = (args[0] || '').toLowerCase().trim()
+ let value = args.length > 1 ? args.slice(1).join(' ') : null
+ if (prop !== 'password' && value === null) {
+ return Promise.reject(Error('npm profile set <prop> <value>'))
+ }
+ if (prop === 'password' && value !== null) {
+ return Promise.reject(Error(
+ 'npm profile set password\n' +
+ 'Do not include your current or new passwords on the command line.'))
+ }
+ if (writableProfileKeys.indexOf(prop) === -1) {
+ return Promise.reject(Error(`"${prop}" is not a property we can set. Valid properties are: ` + writableProfileKeys.join(', ')))
+ }
+ return Bluebird.try(() => {
+ if (prop !== 'password') return
+ return readUserInfo.password('Current password: ').then((current) => {
+ return readPasswords().then((newpassword) => {
+ value = {old: current, new: newpassword}
+ })
+ })
+ function readPasswords () {
+ return readUserInfo.password('New password: ').then((password1) => {
+ return readUserInfo.password(' Again: ').then((password2) => {
+ if (password1 !== password2) {
+ log.warn('profile', 'Passwords do not match, please try again.')
+ return readPasswords()
+ }
+ return password1
+ })
+ })
+ }
+ }).then(() => {
+ // FIXME: Work around to not clear everything other than what we're setting
+ return pulseTillDone.withPromise(profile.get(conf).then((user) => {
+ const newUser = {}
+ writableProfileKeys.forEach((k) => { newUser[k] = user[k] })
+ newUser[prop] = value
+ return profile.set(newUser, conf).catch((err) => {
+ if (err.code !== 'EOTP') throw err
+ return readUserInfo.otp('Enter OTP: ').then((otp) => {
+ conf.auth.otp = otp
+ return profile.set(newUser, conf)
+ })
+ }).then((result) => {
+ if (conf.json) {
+ output(JSON.stringify({[prop]: result[prop]}, null, 2))
+ } else if (conf.parseable) {
+ output(prop + '\t' + result[prop])
+ } else {
+ output('Set', prop, 'to', result[prop])
+ }
+ })
+ }))
+ })
+}
+
+function enable2fa (args) {
+ if (args.length > 1) {
+ return Promise.reject(new Error('npm profile enable-2fa [auth-and-writes|auth-only]'))
+ }
+ const mode = args[0] || 'auth-and-writes'
+ if (mode !== 'auth-only' && mode !== 'auth-and-writes') {
+ return Promise.reject(new Error(`Invalid two factor authentication mode "${mode}".\n` +
+ 'Valid modes are:\n' +
+ ' auth-only - Require two-factor authentication only when logging in\n' +
+ ' auth-and-writes - Require two-factor authentication when logging in AND when publishing'))
+ }
+ const conf = config()
+ if (conf.json || conf.parseable) {
+ return Promise.reject(new Error(
+ 'Enabling two-factor authentication is an interactive opperation and ' +
+ (conf.json ? 'JSON' : 'parseable') + 'output mode is not available'))
+ }
+ log.notice('profile', 'Enabling two factor authentication for ' + mode)
+ const info = {
+ tfa: {
+ mode: mode
+ }
+ }
+ return readUserInfo.password().then((password) => {
+ info.tfa.password = password
+ log.info('profile', 'Determine if tfa is pending')
+ return pulseTillDone.withPromise(profile.get(conf)).then((info) => {
+ if (!info.tfa) return
+ if (info.tfa.pending) {
+ log.info('profile', 'Resetting two-factor authentication')
+ return pulseTillDone.withPromise(profile.set({tfa: {password, mode: 'disable'}}, conf))
+ } else {
+ if (conf.auth.otp) return
+ return readUserInfo.otp('Enter OTP: ').then((otp) => {
+ conf.auth.otp = otp
+ })
+ }
+ })
+ }).then(() => {
+ log.info('profile', 'Setting two factor authentication to ' + mode)
+ return pulseTillDone.withPromise(profile.set(info, conf))
+ }).then((challenge) => {
+ if (challenge.tfa === null) {
+ output('Two factor authentication mode changed to: ' + mode)
+ return
+ }
+ if (typeof challenge.tfa !== 'string' || !/^otpauth:[/][/]/.test(challenge.tfa)) {
+ throw new Error('Unknown error enabling two-factor authentication. Expected otpauth URL, got: ' + challenge.tfa)
+ }
+ const otpauth = url.parse(challenge.tfa)
+ const opts = queryString.parse(otpauth.query)
+ return qrcode(challenge.tfa).then((code) => {
+ output('Scan into your authenticator app:\n' + code + '\n Or enter code:', opts.secret)
+ }).then((code) => {
+ return readUserInfo.otp('And an OTP code from your authenticator: ')
+ }).then((otp1) => {
+ log.info('profile', 'Finalizing two factor authentication')
+ return profile.set({tfa: [otp1]}, conf)
+ }).then((result) => {
+ output('TFA successfully enabled. Below are your recovery codes, please print these out.')
+ output('You will need these to recover access to your account if you lose your authentication device.')
+ result.tfa.forEach((c) => output('\t' + c))
+ })
+ })
+}
+
+function disable2fa (args) {
+ const conf = config()
+ return pulseTillDone.withPromise(profile.get(conf)).then((info) => {
+ if (!info.tfa || info.tfa.pending) {
+ output('Two factor authentication not enabled.')
+ return
+ }
+ return readUserInfo.password().then((password) => {
+ return Bluebird.try(() => {
+ if (conf.auth.otp) return
+ return readUserInfo.otp('Enter one-time password from your authenticator: ').then((otp) => {
+ conf.auth.otp = otp
+ })
+ }).then(() => {
+ log.info('profile', 'disabling tfa')
+ return pulseTillDone.withPromise(profile.set({tfa: {password: password, mode: 'disable'}}, conf)).then(() => {
+ if (conf.json) {
+ output(JSON.stringify({tfa: false}, null, 2))
+ } else if (conf.parseable) {
+ output('tfa\tfalse')
+ } else {
+ output('Two factor authentication disabled.')
+ }
+ })
+ })
+ })
+ })
+}
+
+function qrcode (url) {
+ return new Promise((resolve) => qrcodeTerminal.generate(url, resolve))
+}
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
index 5d99bfd089..bf60e1d5a6 100644
--- a/deps/npm/lib/publish.js
+++ b/deps/npm/lib/publish.js
@@ -21,6 +21,7 @@ const readJson = BB.promisify(require('read-package-json'))
const semver = require('semver')
const statAsync = BB.promisify(require('graceful-fs').stat)
const writeStreamAtomic = require('fs-write-stream-atomic')
+const readUserInfo = require('./utils/read-user-info.js')
publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>]' +
"\n\nPublishes '.' if no argument supplied" +
@@ -199,5 +200,13 @@ function upload (arg, pkg, isRetry, cached) {
throw err
}
})
+ }).catch((err) => {
+ if (err.code !== 'EOTP' && !(err.code === 'E401' && /one-time pass/.test(err.message))) throw err
+ // we prompt on stdout and read answers from stdin, so they need to be ttys.
+ if (!process.stdin.isTTY || !process.stdout.isTTY) throw err
+ return readUserInfo.otp('Enter OTP: ').then((otp) => {
+ npm.config.set('otp', otp)
+ return upload(arg, pkg, isRetry, cached)
+ })
})
}
diff --git a/deps/npm/lib/restart.js b/deps/npm/lib/restart.js
index 601249fd6b..41f9c3a756 100644
--- a/deps/npm/lib/restart.js
+++ b/deps/npm/lib/restart.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('restart')
+module.exports = require('./utils/lifecycle-cmd.js')('restart')
diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js
index 05bc1fe98b..fb7781f551 100644
--- a/deps/npm/lib/run-script.js
+++ b/deps/npm/lib/run-script.js
@@ -166,7 +166,7 @@ function run (pkg, wd, cmd, args, cb) {
}
// when running scripts explicitly, assume that they're trusted.
- return [lifecycle, pkg, c, wd, true]
+ return [lifecycle, pkg, c, wd, { unsafePerm: true }]
}), cb)
}
diff --git a/deps/npm/lib/shrinkwrap.js b/deps/npm/lib/shrinkwrap.js
index a541d868fc..956a693646 100644
--- a/deps/npm/lib/shrinkwrap.js
+++ b/deps/npm/lib/shrinkwrap.js
@@ -101,7 +101,7 @@ function shrinkwrapDeps (deps, top, tree, seen) {
if (!seen) seen = new Set()
if (seen.has(tree)) return
seen.add(tree)
- tree.children.sort(function (aa, bb) { return moduleName(aa).localeCompare(moduleName(bb)) }).forEach(function (child) {
+ sortModules(tree.children).forEach(function (child) {
if (child.fakeChild) {
deps[moduleName(child)] = child.fakeChild
return
@@ -130,7 +130,7 @@ function shrinkwrapDeps (deps, top, tree, seen) {
if (isOnlyOptional(child)) pkginfo.optional = true
if (child.requires.length) {
pkginfo.requires = {}
- child.requires.sort((a, b) => moduleName(a).localeCompare(moduleName(b))).forEach((required) => {
+ sortModules(child.requires).forEach((required) => {
var requested = required.package._requested || getRequested(required) || {}
pkginfo.requires[moduleName(required)] = childVersion(top, required, requested)
})
@@ -142,6 +142,14 @@ function shrinkwrapDeps (deps, top, tree, seen) {
})
}
+function sortModules (modules) {
+ // sort modules with the locale-agnostic Unicode sort
+ var sortedModuleNames = modules.map(moduleName).sort()
+ return modules.sort((a, b) => (
+ sortedModuleNames.indexOf(moduleName(a)) - sortedModuleNames.indexOf(moduleName(b))
+ ))
+}
+
function childVersion (top, child, req) {
if (req.type === 'directory' || req.type === 'file') {
return 'file:' + unixFormatPath(path.relative(top.path, child.package._resolved || req.fetchSpec))
diff --git a/deps/npm/lib/start.js b/deps/npm/lib/start.js
index 85d61e78d0..e978536500 100644
--- a/deps/npm/lib/start.js
+++ b/deps/npm/lib/start.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('start')
+module.exports = require('./utils/lifecycle-cmd.js')('start')
diff --git a/deps/npm/lib/stop.js b/deps/npm/lib/stop.js
index e4d02ff281..fd43d08fc1 100644
--- a/deps/npm/lib/stop.js
+++ b/deps/npm/lib/stop.js
@@ -1 +1 @@
-module.exports = require('./utils/lifecycle.js').cmd('stop')
+module.exports = require('./utils/lifecycle-cmd.js')('stop')
diff --git a/deps/npm/lib/test.js b/deps/npm/lib/test.js
index 4ef025c4ba..06138ac00a 100644
--- a/deps/npm/lib/test.js
+++ b/deps/npm/lib/test.js
@@ -1,12 +1,8 @@
module.exports = test
-const testCmd = require('./utils/lifecycle.js').cmd('test')
-const usage = require('./utils/usage')
+const testCmd = require('./utils/lifecycle-cmd.js')('test')
-test.usage = usage(
- 'test',
- 'npm test [-- <args>]'
-)
+test.usage = testCmd.usage
function test (args, cb) {
testCmd(args, function (er) {
diff --git a/deps/npm/lib/token.js b/deps/npm/lib/token.js
new file mode 100644
index 0000000000..a182b633d2
--- /dev/null
+++ b/deps/npm/lib/token.js
@@ -0,0 +1,211 @@
+'use strict'
+const profile = require('npm-profile')
+const npm = require('./npm.js')
+const output = require('./utils/output.js')
+const Table = require('cli-table2')
+const Bluebird = require('bluebird')
+const isCidrV4 = require('is-cidr').isCidrV4
+const isCidrV6 = require('is-cidr').isCidrV6
+const readUserInfo = require('./utils/read-user-info.js')
+const ansistyles = require('ansistyles')
+const log = require('npmlog')
+const pulseTillDone = require('./utils/pulse-till-done.js')
+
+module.exports = token
+
+token.usage =
+ 'npm token list\n' +
+ 'npm token delete <tokenKey>\n' +
+ 'npm token create [--read-only] [--cidr=list]\n'
+
+token.subcommands = ['list', 'delete', 'create']
+
+token.completion = function (opts, cb) {
+ var argv = opts.conf.argv.remain
+
+ switch (argv[2]) {
+ case 'list':
+ case 'delete':
+ case 'create':
+ return cb(null, [])
+ default:
+ return cb(new Error(argv[2] + ' not recognized'))
+ }
+}
+
+function withCb (prom, cb) {
+ prom.then((value) => cb(null, value), cb)
+}
+
+function token (args, cb) {
+ log.gauge.show('token')
+ if (args.length === 0) return withCb(list([]), cb)
+ switch (args[0]) {
+ case 'list':
+ case 'ls':
+ withCb(list(), cb)
+ break
+ case 'delete':
+ case 'rel':
+ case 'remove':
+ case 'rm':
+ withCb(rm(args.slice(1)), cb)
+ break
+ case 'create':
+ withCb(create(args.slice(1)), cb)
+ break
+ default:
+ cb(new Error('Unknown profile command: ' + args[0]))
+ }
+}
+
+function generateTokenIds (tokens, minLength) {
+ const byId = {}
+ tokens.forEach((token) => {
+ token.id = token.key
+ for (let ii = minLength; ii < token.key.length; ++ii) {
+ if (!tokens.some((ot) => ot !== token && ot.key.slice(0, ii) === token.key.slice(0, ii))) {
+ token.id = token.key.slice(0, ii)
+ break
+ }
+ }
+ byId[token.id] = token
+ })
+ return byId
+}
+
+function config () {
+ const conf = {
+ json: npm.config.get('json'),
+ parseable: npm.config.get('parseable'),
+ registry: npm.config.get('registry'),
+ otp: npm.config.get('otp')
+ }
+ conf.auth = npm.config.getCredentialsByURI(conf.registry)
+ if (conf.otp) conf.auth.otp = conf.otp
+ return conf
+}
+
+function list (args) {
+ const conf = config()
+ log.info('token', 'getting list')
+ return pulseTillDone.withPromise(profile.listTokens(conf)).then((tokens) => {
+ if (conf.json) {
+ output(JSON.stringify(tokens, null, 2))
+ return
+ } else if (conf.parseable) {
+ output(['key', 'token', 'created', 'readonly', 'CIDR whitelist'].join('\t'))
+ tokens.forEach((token) => {
+ output([
+ token.key,
+ token.token,
+ token.created,
+ token.readonly ? 'true' : 'false',
+ token.cidr_whitelist ? token.cidr_whitelist.join(',') : ''
+ ].join('\t'))
+ })
+ return
+ }
+ generateTokenIds(tokens, 6)
+ const idWidth = tokens.reduce((acc, token) => Math.max(acc, token.id.length), 0)
+ const table = new Table({
+ head: ['id', 'token', 'created', 'readonly', 'CIDR whitelist'],
+ colWidths: [Math.max(idWidth, 2) + 2, 9, 12, 10]
+ })
+ tokens.forEach((token) => {
+ table.push([
+ token.id,
+ token.token + 'ā€¦',
+ String(token.created).slice(0, 10),
+ token.readonly ? 'yes' : 'no',
+ token.cidr_whitelist ? token.cidr_whitelist.join(', ') : ''
+ ])
+ })
+ output(table.toString())
+ })
+}
+
+function rm (args) {
+ if (args.length === 0) {
+ throw new Error('npm token delete <tokenKey>')
+ }
+ const conf = config()
+ const toRemove = []
+ const progress = log.newItem('removing tokens', toRemove.length)
+ progress.info('token', 'getting existing list')
+ return pulseTillDone.withPromise(profile.listTokens(conf).then((tokens) => {
+ args.forEach((id) => {
+ const matches = tokens.filter((token) => token.key.indexOf(id) === 0)
+ if (matches.length === 1) {
+ toRemove.push(matches[0].key)
+ } else if (matches.length > 1) {
+ throw new Error(`Token ID "${id}" was ambiguous, a new token may have been created since you last ran \`npm-profile token list\`.`)
+ } else {
+ const tokenMatches = tokens.filter((token) => id.indexOf(token.token) === 0)
+ if (tokenMatches === 0) {
+ throw new Error(`Unknown token id or value "${id}".`)
+ }
+ toRemove.push(id)
+ }
+ })
+ return Bluebird.map(toRemove, (key) => {
+ progress.info('token', 'removing', key)
+ profile.removeToken(key, conf).then(() => profile.completeWork(1))
+ })
+ })).then(() => {
+ if (conf.json) {
+ output(JSON.stringify(toRemove))
+ } else if (conf.parseable) {
+ output(toRemove.join('\t'))
+ } else {
+ output('Removed ' + toRemove.length + ' token' + (toRemove.length !== 1 ? 's' : ''))
+ }
+ })
+}
+
+function create (args) {
+ const conf = config()
+ const cidr = npm.config.get('cidr')
+ const readonly = npm.config.get('read-only')
+
+ const validCIDR = validateCIDRList(cidr)
+ return readUserInfo.password().then((password) => {
+ log.info('token', 'creating')
+ return profile.createToken(password, readonly, validCIDR, conf).catch((ex) => {
+ if (ex.code !== 'EOTP') throw ex
+ log.info('token', 'failed because it requires OTP')
+ return readUserInfo.otp('Authenticator provided OTP:').then((otp) => {
+ conf.auth.otp = otp
+ log.info('token', 'creating with OTP')
+ return pulseTillDone.withPromise(profile.createToken(password, readonly, validCIDR, conf))
+ })
+ })
+ }).then((result) => {
+ delete result.key
+ delete result.updated
+ if (conf.json) {
+ output(JSON.stringify(result))
+ } else if (conf.parseable) {
+ Object.keys(result).forEach((k) => output(k + '\t' + result[k]))
+ } else {
+ const table = new Table()
+ Object.keys(result).forEach((k) => table.push({[ansistyles.bright(k)]: String(result[k])}))
+ output(table.toString())
+ }
+ })
+}
+
+function validateCIDR (cidr) {
+ if (isCidrV6(cidr)) {
+ throw new Error('CIDR whitelist can only contain IPv4 addresses, ' + cidr + ' is IPv6')
+ }
+ if (!isCidrV4(cidr)) {
+ throw new Error('CIDR whitelist contains invalid CIDR entry: ' + cidr)
+ }
+}
+
+function validateCIDRList (cidrs) {
+ const list = Array.isArray(cidrs) ? cidrs : cidrs ? cidrs.split(/,\s*/) : []
+ list.forEach(validateCIDR)
+ return list
+}
diff --git a/deps/npm/lib/unbuild.js b/deps/npm/lib/unbuild.js
index 9ba5972d8a..78293c9ca2 100644
--- a/deps/npm/lib/unbuild.js
+++ b/deps/npm/lib/unbuild.js
@@ -38,14 +38,14 @@ function unbuild_ (silent) {
if (er) return gentlyRm(folder, false, base, cb)
chain(
[
- [lifecycle, pkg, 'preuninstall', folder, false, true],
- [lifecycle, pkg, 'uninstall', folder, false, true],
+ [lifecycle, pkg, 'preuninstall', folder, { failOk: true }],
+ [lifecycle, pkg, 'uninstall', folder, { failOk: true }],
!silent && function (cb) {
output('unbuild ' + pkg._id)
cb()
},
[rmStuff, pkg, folder],
- [lifecycle, pkg, 'postuninstall', folder, false, true],
+ [lifecycle, pkg, 'postuninstall', folder, { failOk: true }],
[gentlyRm, folder, false, base]
],
cb
@@ -60,7 +60,9 @@ function rmStuff (pkg, folder, cb) {
// otherwise, then bins are in folder/../.bin
var parent = pkg.name[0] === '@' ? path.dirname(path.dirname(folder)) : path.dirname(folder)
var gnm = npm.dir
- var top = gnm === parent
+ // gnm might be an absolute path, parent might be relative
+ // this checks they're the same directory regardless
+ var top = path.relative(gnm, parent) === ''
log.verbose('unbuild rmStuff', pkg._id, 'from', gnm)
if (!top) log.verbose('unbuild rmStuff', 'in', parent)
diff --git a/deps/npm/lib/utils/convert-line-endings.js b/deps/npm/lib/utils/convert-line-endings.js
new file mode 100644
index 0000000000..b05d328aac
--- /dev/null
+++ b/deps/npm/lib/utils/convert-line-endings.js
@@ -0,0 +1,49 @@
+'use strict'
+
+const Transform = require('stream').Transform
+const Bluebird = require('bluebird')
+const fs = require('graceful-fs')
+const stat = Bluebird.promisify(fs.stat)
+const chmod = Bluebird.promisify(fs.chmod)
+const fsWriteStreamAtomic = require('fs-write-stream-atomic')
+
+module.exports.dos2Unix = dos2Unix
+
+function dos2Unix (file) {
+ return stat(file).then((stats) => {
+ let previousChunkEndedInCR = false
+ return new Bluebird((resolve, reject) => {
+ fs.createReadStream(file)
+ .on('error', reject)
+ .pipe(new Transform({
+ transform: function (chunk, encoding, done) {
+ let data = chunk.toString()
+ if (previousChunkEndedInCR) {
+ data = '\r' + data
+ }
+ if (data[data.length - 1] === '\r') {
+ data = data.slice(0, -1)
+ previousChunkEndedInCR = true
+ } else {
+ previousChunkEndedInCR = false
+ }
+ done(null, data.replace(/\r\n/g, '\n'))
+ },
+ flush: function (done) {
+ if (previousChunkEndedInCR) {
+ this.push('\r')
+ }
+ done()
+ }
+ }))
+ .on('error', reject)
+ .pipe(fsWriteStreamAtomic(file))
+ .on('error', reject)
+ .on('finish', function () {
+ resolve(chmod(file, stats.mode))
+ })
+ })
+ })
+}
+
+// could add unix2Dos and legacy Mac functions if need be
diff --git a/deps/npm/lib/utils/did-you-mean.js b/deps/npm/lib/utils/did-you-mean.js
new file mode 100644
index 0000000000..8e72dde5fa
--- /dev/null
+++ b/deps/npm/lib/utils/did-you-mean.js
@@ -0,0 +1,20 @@
+var meant = require('meant')
+var output = require('./output.js')
+
+function didYouMean (scmd, commands) {
+ var bestSimilarity = meant(scmd, commands).map(function (str) {
+ return ' ' + str
+ })
+
+ if (bestSimilarity.length === 0) return
+ if (bestSimilarity.length === 1) {
+ output('\nDid you mean this?\n' + bestSimilarity[0])
+ } else {
+ output(
+ ['\nDid you mean one of these?']
+ .concat(bestSimilarity.slice(0, 3)).join('\n')
+ )
+ }
+}
+
+module.exports = didYouMean
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
index 52a675bea6..b2fd45a5f3 100644
--- a/deps/npm/lib/utils/error-handler.js
+++ b/deps/npm/lib/utils/error-handler.js
@@ -170,20 +170,12 @@ function errorHandler (er) {
;[
'type',
- 'fstream_path',
- 'fstream_unc_path',
- 'fstream_type',
- 'fstream_class',
- 'fstream_finish_call',
- 'fstream_linkpath',
'stack',
- 'fstream_stack',
'statusCode',
'pkgid'
].forEach(function (k) {
var v = er[k]
if (!v) return
- if (k === 'fstream_stack') v = v.join('\n')
log.verbose(k, v)
})
diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js
index 49aa9124ec..028a18bbb6 100644
--- a/deps/npm/lib/utils/error-message.js
+++ b/deps/npm/lib/utils/error-message.js
@@ -66,8 +66,52 @@ function errorMessage (er) {
])
break
- // TODO(isaacs)
- // Add a special case here for E401 and E403 explaining auth issues?
+ case 'EOTP':
+ short.push(['', 'This operation requires a one-time password from your authenticator.'])
+ detail.push([
+ '',
+ [
+ 'You can provide a one-time password by passing --otp=<code> to the command you ran.',
+ 'If you already provided a one-time password then it is likely that you either typoed',
+ 'it, or it timed out. Please try again.'
+ ].join('\n')
+ ])
+ break
+
+ case 'E401':
+ // npm ERR! code E401
+ // npm ERR! Unable to authenticate, need: Basic
+ if (er.headers && er.headers['www-authenticate']) {
+ const auth = er.headers['www-authenticate']
+ if (auth.indexOf('Bearer') !== -1) {
+ short.push(['', 'Unable to authenticate, your authentication token seems to be invalid.'])
+ detail.push([
+ '',
+ [
+ 'To correct this please trying logging in again with:',
+ ' npm login'
+ ].join('\n')
+ ])
+ break
+ } else if (auth.indexOf('Basic') !== -1) {
+ short.push(['', 'Incorrect or missing password.'])
+ detail.push([
+ '',
+ [
+ 'If you were trying to login, change your password, create an',
+ 'authentication token or enable two-factor authentication then',
+ 'that means you likely typed your password in incorectly.',
+ 'Please try again, or recover your password at:',
+ ' https://www.npmjs.com/forgot',
+ '',
+ 'If you were doing some other operation then your saved credentials are',
+ 'probably out of date. To correct this please try logging in again with:',
+ ' npm login'
+ ].join('\n')
+ ])
+ break
+ }
+ }
case 'E404':
// There's no need to have 404 in the message as well.
diff --git a/deps/npm/lib/utils/is-hashbang-file.js b/deps/npm/lib/utils/is-hashbang-file.js
new file mode 100644
index 0000000000..f1677381fa
--- /dev/null
+++ b/deps/npm/lib/utils/is-hashbang-file.js
@@ -0,0 +1,19 @@
+'use strict'
+const Bluebird = require('bluebird')
+const fs = require('graceful-fs')
+const open = Bluebird.promisify(fs.open)
+const close = Bluebird.promisify(fs.close)
+
+module.exports = isHashbangFile
+
+function isHashbangFile (file) {
+ return open(file, 'r').then((fileHandle) => {
+ return new Bluebird((resolve, reject) => {
+ fs.read(fileHandle, new Buffer(new Array(2)), 0, 2, 0, function (err, bytesRead, buffer) {
+ close(fileHandle).then(() => {
+ resolve(!err && buffer.toString() === '#!')
+ }).catch(reject)
+ })
+ })
+ })
+}
diff --git a/deps/npm/lib/utils/lifecycle-cmd.js b/deps/npm/lib/utils/lifecycle-cmd.js
new file mode 100644
index 0000000000..bb802f45ee
--- /dev/null
+++ b/deps/npm/lib/utils/lifecycle-cmd.js
@@ -0,0 +1,18 @@
+exports = module.exports = cmd
+
+var npm = require('../npm.js')
+var usage = require('./usage.js')
+
+function cmd (stage) {
+ function CMD (args, cb) {
+ npm.commands['run-script']([stage].concat(args), cb)
+ }
+ CMD.usage = usage(stage, 'npm ' + stage + ' [-- <args>]')
+ var installedShallow = require('./completion/installed-shallow.js')
+ CMD.completion = function (opts, cb) {
+ installedShallow(opts, function (d) {
+ return d.scripts && d.scripts[stage]
+ }, cb)
+ }
+ return CMD
+}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index 412c1c6944..2d3265e0eb 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -1,458 +1,14 @@
-exports = module.exports = lifecycle
-exports.cmd = cmd
-exports.makeEnv = makeEnv
-exports._incorrectWorkingDirectory = _incorrectWorkingDirectory
+exports = module.exports = runLifecycle
-var log = require('npmlog')
-var spawn = require('./spawn')
-var npm = require('../npm.js')
-var path = require('path')
-var fs = require('graceful-fs')
-var chain = require('slide').chain
-var Stream = require('stream').Stream
-var PATH = 'PATH'
-var uidNumber = require('uid-number')
-var umask = require('./umask')
-var usage = require('./usage')
-var output = require('./output.js')
-var which = require('which')
+const lifecycleOpts = require('../config/lifecycle')
+const lifecycle = require('npm-lifecycle')
-// windows calls it's path 'Path' usually, but this is not guaranteed.
-if (process.platform === 'win32') {
- PATH = 'Path'
- Object.keys(process.env).forEach(function (e) {
- if (e.match(/^PATH$/i)) {
- PATH = e
- }
- })
-}
-
-function logid (pkg, stage) {
- return pkg._id + '~' + stage + ':'
-}
-
-function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
- if (typeof cb !== 'function') {
- cb = failOk
- failOk = false
- }
- if (typeof cb !== 'function') {
- cb = unsafe
- unsafe = false
- }
- if (typeof cb !== 'function') {
- cb = wd
- wd = null
- }
-
- while (pkg && pkg._data) pkg = pkg._data
- if (!pkg) return cb(new Error('Invalid package data'))
-
- log.info('lifecycle', logid(pkg, stage), pkg._id)
- if (!pkg.scripts) pkg.scripts = {}
-
- if (npm.config.get('ignore-scripts')) {
- log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
- pkg.scripts = {}
- }
- if (stage === 'prepublish' && npm.config.get('ignore-prepublish')) {
- log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
- delete pkg.scripts.prepublish
- }
-
- if (!pkg.scripts[stage]) return cb()
-
- validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
- if (er) return cb(er)
-
- unsafe = unsafe || npm.config.get('unsafe-perm')
-
- if ((wd.indexOf(npm.dir) !== 0 || _incorrectWorkingDirectory(wd, pkg)) &&
- !unsafe && pkg.scripts[stage]) {
- log.warn('lifecycle', logid(pkg, stage), 'cannot run in wd',
- '%s %s (wd=%s)', pkg._id, pkg.scripts[stage], wd
- )
- return cb()
- }
-
- // set the env variables, then run scripts as a child process.
- var env = makeEnv(pkg)
- env.npm_lifecycle_event = stage
- env.npm_node_execpath = env.NODE = env.NODE || process.execPath
- env.npm_execpath = require.main.filename
-
- // 'nobody' typically doesn't have permission to write to /tmp
- // even if it's never used, sh freaks out.
- if (!npm.config.get('unsafe-perm')) env.TMPDIR = wd
-
- lifecycle_(pkg, stage, wd, env, unsafe, failOk, cb)
- })
-}
-
-function _incorrectWorkingDirectory (wd, pkg) {
- return wd.lastIndexOf(pkg.name) !== wd.length - pkg.name.length
-}
-
-function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
- var pathArr = []
- var p = wd.split(/[\\\/]node_modules[\\\/]/)
- var acc = path.resolve(p.shift())
-
- p.forEach(function (pp) {
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
- acc = path.join(acc, 'node_modules', pp)
- })
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
-
- // we also unshift the bundled node-gyp-bin folder so that
- // the bundled one will be used for installing things.
- pathArr.unshift(path.join(__dirname, '..', '..', 'bin', 'node-gyp-bin'))
-
- if (shouldPrependCurrentNodeDirToPATH()) {
- // prefer current node interpreter in child scripts
- pathArr.push(path.dirname(process.execPath))
- }
-
- if (env[PATH]) pathArr.push(env[PATH])
- env[PATH] = pathArr.join(process.platform === 'win32' ? ';' : ':')
-
- var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
-
- if (packageLifecycle) {
- // define this here so it's available to all scripts.
- env.npm_lifecycle_script = pkg.scripts[stage]
- } else {
- log.silly('lifecycle', logid(pkg, stage), 'no script for ' + stage + ', continuing')
- }
-
- function done (er) {
- if (er) {
- if (npm.config.get('force')) {
- log.info('lifecycle', logid(pkg, stage), 'forced, continuing', er)
- er = null
- } else if (failOk) {
- log.warn('lifecycle', logid(pkg, stage), 'continuing anyway', er.message)
- er = null
- }
- }
- cb(er)
- }
-
- chain(
- [
- packageLifecycle && [runPackageLifecycle, pkg, env, wd, unsafe],
- [runHookLifecycle, pkg, env, wd, unsafe]
- ],
- done
- )
-}
-
-function shouldPrependCurrentNodeDirToPATH () {
- var cfgsetting = npm.config.get('scripts-prepend-node-path')
- if (cfgsetting === false) return false
- if (cfgsetting === true) return true
-
- var isDifferentNodeInPath
-
- var isWindows = process.platform === 'win32'
- var foundExecPath
- try {
- foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'})
- // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
- isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
- fs.realpathSync(foundExecPath).toUpperCase()
- } catch (e) {
- isDifferentNodeInPath = true
- }
-
- if (cfgsetting === 'warn-only') {
- if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
- if (foundExecPath) {
- log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- } else {
- log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- }
- shouldPrependCurrentNodeDirToPATH.hasWarned = true
- }
-
- return false
- }
-
- return isDifferentNodeInPath
-}
-
-function validWd (d, cb) {
- fs.stat(d, function (er, st) {
- if (er || !st.isDirectory()) {
- var p = path.dirname(d)
- if (p === d) {
- return cb(new Error('Could not find suitable wd'))
- }
- return validWd(p, cb)
- }
- return cb(null, d)
- })
-}
-
-function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
- // run package lifecycle scripts in the package root, or the nearest parent.
- var stage = env.npm_lifecycle_event
- var cmd = env.npm_lifecycle_script
-
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd + '\n'
- runCmd(note, cmd, pkg, env, stage, wd, unsafe, cb)
-}
-
-var running = false
-var queue = []
-function dequeue () {
- running = false
- if (queue.length) {
- var r = queue.shift()
- runCmd.apply(null, r)
- }
-}
-
-function runCmd (note, cmd, pkg, env, stage, wd, unsafe, cb) {
- if (running) {
- queue.push([note, cmd, pkg, env, stage, wd, unsafe, cb])
- return
- }
-
- running = true
- log.pause()
- var user = unsafe ? null : npm.config.get('user')
- var group = unsafe ? null : npm.config.get('group')
-
- if (log.level !== 'silent') {
- output(note)
- }
- log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe)
-
- if (process.platform === 'win32') {
- unsafe = true
- }
-
- if (unsafe) {
- runCmd_(cmd, pkg, env, wd, stage, unsafe, 0, 0, cb)
- } else {
- uidNumber(user, group, function (er, uid, gid) {
- runCmd_(cmd, pkg, env, wd, stage, unsafe, uid, gid, cb)
- })
- }
-}
-
-function runCmd_ (cmd, pkg, env, wd, stage, unsafe, uid, gid, cb_) {
- function cb (er) {
- cb_.apply(null, arguments)
- log.resume()
- process.nextTick(dequeue)
- }
-
- var conf = {
- cwd: wd,
- env: env,
- stdio: [ 0, 1, 2 ]
- }
-
- if (!unsafe) {
- conf.uid = uid ^ 0
- conf.gid = gid ^ 0
- }
-
- var sh = 'sh'
- var shFlag = '-c'
-
- var customShell = npm.config.get('script-shell')
-
- if (customShell) {
- sh = customShell
- } else if (process.platform === 'win32') {
- sh = process.env.comspec || 'cmd'
- shFlag = '/d /s /c'
- conf.windowsVerbatimArguments = true
- }
-
- log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH])
- log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd)
- log.silly('lifecycle', logid(pkg, stage), 'Args:', [shFlag, cmd])
-
- var proc = spawn(sh, [shFlag, cmd], conf)
-
- proc.on('error', procError)
- proc.on('close', function (code, signal) {
- log.silly('lifecycle', logid(pkg, stage), 'Returned: code:', code, ' signal:', signal)
- if (signal) {
- process.kill(process.pid, signal)
- } else if (code) {
- var er = new Error('Exit status ' + code)
- er.errno = code
- }
- procError(er)
- })
- process.once('SIGTERM', procKill)
- process.once('SIGINT', procInterupt)
-
- function procError (er) {
- if (er) {
- log.info('lifecycle', logid(pkg, stage), 'Failed to exec ' + stage + ' script')
- er.message = pkg._id + ' ' + stage + ': `' + cmd + '`\n' +
- er.message
- if (er.code !== 'EPERM') {
- er.code = 'ELIFECYCLE'
- }
- fs.stat(npm.dir, function (statError, d) {
- if (statError && statError.code === 'ENOENT' && npm.dir.split(path.sep).slice(-1)[0] === 'node_modules') {
- log.warn('', 'Local package.json exists, but node_modules missing, did you mean to install?')
- }
- })
- er.pkgid = pkg._id
- er.stage = stage
- er.script = cmd
- er.pkgname = pkg.name
- }
- process.removeListener('SIGTERM', procKill)
- process.removeListener('SIGTERM', procInterupt)
- process.removeListener('SIGINT', procKill)
- return cb(er)
- }
- function procKill () {
- proc.kill()
- }
- function procInterupt () {
- proc.kill('SIGINT')
- proc.on('exit', function () {
- process.exit()
- })
- process.once('SIGINT', procKill)
- }
-}
-
-function runHookLifecycle (pkg, env, wd, unsafe, cb) {
- // check for a hook script, run if present.
- var stage = env.npm_lifecycle_event
- var hook = path.join(npm.dir, '.hooks', stage)
- var cmd = hook
-
- fs.stat(hook, function (er) {
- if (er) return cb()
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd
- runCmd(note, hook, pkg, env, stage, wd, unsafe, cb)
- })
-}
-
-function makeEnv (data, prefix, env) {
- prefix = prefix || 'npm_package_'
- if (!env) {
- env = {}
- for (var i in process.env) {
- if (!i.match(/^npm_/)) {
- env[i] = process.env[i]
- }
- }
-
- // express and others respect the NODE_ENV value.
- if (npm.config.get('production')) env.NODE_ENV = 'production'
- } else if (!data.hasOwnProperty('_lifecycleEnv')) {
- Object.defineProperty(data, '_lifecycleEnv',
- {
- value: env,
- enumerable: false
- }
- )
- }
-
- for (i in data) {
- if (i.charAt(0) !== '_') {
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- if (i === 'readme') {
- continue
- }
- if (data[i] && typeof data[i] === 'object') {
- try {
- // quick and dirty detection for cyclical structures
- JSON.stringify(data[i])
- makeEnv(data[i], envKey + '_', env)
- } catch (ex) {
- // usually these are package objects.
- // just get the path and basic details.
- var d = data[i]
- makeEnv(
- { name: d.name, version: d.version, path: d.path },
- envKey + '_',
- env
- )
- }
- } else {
- env[envKey] = String(data[i])
- env[envKey] = env[envKey].indexOf('\n') !== -1
- ? JSON.stringify(env[envKey])
- : env[envKey]
- }
- }
+function runLifecycle (pkg, stage, wd, moreOpts, cb) {
+ if (typeof moreOpts === 'function') {
+ cb = moreOpts
+ moreOpts = null
}
- if (prefix !== 'npm_package_') return env
-
- prefix = 'npm_config_'
- var pkgConfig = {}
- var keys = npm.config.keys
- var pkgVerConfig = {}
- var namePref = data.name + ':'
- var verPref = data.name + '@' + data.version + ':'
-
- keys.forEach(function (i) {
- // in some rare cases (e.g. working with nerf darts), there are segmented
- // "private" (underscore-prefixed) config names -- don't export
- if (i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0 || i.match(/:_/)) {
- return
- }
- var value = npm.config.get(i)
- if (value instanceof Stream || Array.isArray(value)) return
- if (i.match(/umask/)) value = umask.toString(value)
- if (!value) value = ''
- else if (typeof value === 'number') value = '' + value
- else if (typeof value !== 'string') value = JSON.stringify(value)
-
- value = value.indexOf('\n') !== -1
- ? JSON.stringify(value)
- : value
- i = i.replace(/^_+/, '')
- var k
- if (i.indexOf(namePref) === 0) {
- k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgConfig[k] = value
- } else if (i.indexOf(verPref) === 0) {
- k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgVerConfig[k] = value
- }
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- env[envKey] = value
- })
-
- prefix = 'npm_package_config_'
- ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
- for (var i in conf) {
- var envKey = (prefix + i)
- env[envKey] = conf[i]
- }
- })
-
- return env
-}
-
-function cmd (stage) {
- function CMD (args, cb) {
- npm.commands['run-script']([stage].concat(args), cb)
- }
- CMD.usage = usage(stage, 'npm ' + stage + ' [-- <args>]')
- var installedShallow = require('./completion/installed-shallow.js')
- CMD.completion = function (opts, cb) {
- installedShallow(opts, function (d) {
- return d.scripts && d.scripts[stage]
- }, cb)
- }
- return CMD
+ const opts = lifecycleOpts(moreOpts)
+ lifecycle(pkg, stage, wd, opts).then(cb, cb)
}
diff --git a/deps/npm/lib/utils/map-to-registry.js b/deps/npm/lib/utils/map-to-registry.js
index 1f9798c09f..d6e0a5b01f 100644
--- a/deps/npm/lib/utils/map-to-registry.js
+++ b/deps/npm/lib/utils/map-to-registry.js
@@ -2,6 +2,7 @@ var url = require('url')
var log = require('npmlog')
var npa = require('npm-package-arg')
+var config
module.exports = mapToRegistry
@@ -94,6 +95,8 @@ function scopeAuth (uri, registry, auth) {
} else {
log.silly('scopeAuth', uri, "doesn't share host with registry", registry)
}
+ if (!config) config = require('../npm').config
+ if (config.get('otp')) cleaned.otp = config.get('otp')
}
return cleaned
diff --git a/deps/npm/lib/utils/module-name.js b/deps/npm/lib/utils/module-name.js
index 43e0f5fb12..89957b181f 100644
--- a/deps/npm/lib/utils/module-name.js
+++ b/deps/npm/lib/utils/module-name.js
@@ -11,7 +11,7 @@ function pathToPackageName (dir) {
var name = path.relative(path.resolve(dir, '..'), dir)
var scoped = path.relative(path.resolve(dir, '../..'), dir)
if (scoped[0] === '@') return scoped.replace(/\\/g, '/')
- return name
+ return name.trim()
}
module.exports.test.isNotEmpty = isNotEmpty
@@ -22,7 +22,7 @@ function isNotEmpty (str) {
var unknown = 0
function moduleName (tree) {
var pkg = tree.package || tree
- if (isNotEmpty(pkg.name)) return pkg.name
+ if (isNotEmpty(pkg.name) && typeof pkg.name === 'string') return pkg.name.trim()
var pkgName = pathToPackageName(tree.path)
if (pkgName !== '') return pkgName
if (tree._invalidName != null) return tree._invalidName
diff --git a/deps/npm/lib/utils/pulse-till-done.js b/deps/npm/lib/utils/pulse-till-done.js
index 2669241306..b292c2fa56 100644
--- a/deps/npm/lib/utils/pulse-till-done.js
+++ b/deps/npm/lib/utils/pulse-till-done.js
@@ -1,22 +1,38 @@
'use strict'
-var validate = require('aproba')
-var log = require('npmlog')
+const validate = require('aproba')
+const log = require('npmlog')
+const Bluebird = require('bluebird')
-var pulsers = 0
-var pulse
+let pulsers = 0
+let pulse
+
+function pulseStart (prefix) {
+ if (++pulsers > 1) return
+ pulse = setInterval(function () {
+ log.gauge.pulse(prefix)
+ }, 150)
+}
+function pulseStop () {
+ if (--pulsers > 0) return
+ clearInterval(pulse)
+}
module.exports = function (prefix, cb) {
validate('SF', [prefix, cb])
if (!prefix) prefix = 'network'
- if (!pulsers++) {
- pulse = setInterval(function () {
- log.gauge.pulse(prefix)
- }, 250)
- }
+ pulseStart(prefix)
return function () {
- if (!--pulsers) {
- clearInterval(pulse)
- }
+ pulseStop()
cb.apply(null, arguments)
}
}
+module.exports.withPromise = pulseWhile
+
+function pulseWhile (prefix, promise) {
+ if (!promise) {
+ promise = prefix
+ prefix = ''
+ }
+ pulseStart(prefix)
+ return Bluebird.resolve(promise).finally(() => pulseStop())
+}
diff --git a/deps/npm/lib/utils/read-user-info.js b/deps/npm/lib/utils/read-user-info.js
new file mode 100644
index 0000000000..81bb44c98f
--- /dev/null
+++ b/deps/npm/lib/utils/read-user-info.js
@@ -0,0 +1,65 @@
+'use strict'
+const Bluebird = require('bluebird')
+const readAsync = Bluebird.promisify(require('read'))
+const userValidate = require('npm-user-validate')
+const log = require('npmlog')
+
+exports.otp = readOTP
+exports.password = readPassword
+exports.username = readUsername
+exports.email = readEmail
+
+function read (opts) {
+ return Bluebird.try(() => {
+ log.clearProgress()
+ return readAsync(opts)
+ }).finally(() => {
+ log.showProgress()
+ })
+}
+
+function readOTP (msg, otp, isRetry) {
+ if (!msg) msg = 'Enter OTP: '
+ if (isRetry && otp && /^[\d ]+$|^[A-Fa-f0-9]{64,64}$/.test(otp)) return otp.replace(/\s+/g, '')
+
+ return read({prompt: msg, default: otp || ''})
+ .then((otp) => readOTP(msg, otp, true))
+}
+
+function readPassword (msg, password, isRetry) {
+ if (!msg) msg = 'npm password: '
+ if (isRetry && password) return password
+
+ return read({prompt: msg, silent: true, default: password || ''})
+ .then((password) => readPassword(msg, password, true))
+}
+
+function readUsername (msg, username, opts, isRetry) {
+ if (!msg) msg = 'npm username: '
+ if (isRetry && username) {
+ const error = userValidate.username(username)
+ if (error) {
+ opts.log && opts.log.warn(error.message)
+ } else {
+ return Promise.resolve(username.trim())
+ }
+ }
+
+ return read({prompt: msg, default: username || ''})
+ .then((username) => readUsername(msg, username, opts, true))
+}
+
+function readEmail (msg, email, opts, isRetry) {
+ if (!msg) msg = 'email (this IS public): '
+ if (isRetry && email) {
+ const error = userValidate.email(email)
+ if (error) {
+ opts.log && opts.log.warn(error.message)
+ } else {
+ return email.trim()
+ }
+ }
+
+ return read({prompt: msg, default: email || ''})
+ .then((username) => readEmail(msg, username, opts, true))
+}
diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js
deleted file mode 100644
index 12719e37e2..0000000000
--- a/deps/npm/lib/utils/tar.js
+++ /dev/null
@@ -1,454 +0,0 @@
-'use strict'
-
-// commands for packing and unpacking tarballs
-// this file is used by lib/cache.js
-
-var fs = require('graceful-fs')
-var path = require('path')
-var writeFileAtomic = require('write-file-atomic')
-var writeStreamAtomic = require('fs-write-stream-atomic')
-var log = require('npmlog')
-var uidNumber = require('uid-number')
-var readJson = require('read-package-json')
-var tar = require('tar')
-var zlib = require('zlib')
-var fstream = require('fstream')
-var Packer = require('fstream-npm')
-var iferr = require('iferr')
-var inherits = require('inherits')
-var npm = require('../npm.js')
-var rm = require('./gently-rm.js')
-var myUid = process.getuid && process.getuid()
-var myGid = process.getgid && process.getgid()
-var readPackageTree = require('read-package-tree')
-var union = require('lodash.union')
-var moduleName = require('./module-name.js')
-var packageId = require('./package-id.js')
-var pulseTillDone = require('../utils/pulse-till-done.js')
-
-if (process.env.SUDO_UID && myUid === 0) {
- if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
- if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
-}
-
-exports.pack = pack
-exports.unpack = unpack
-
-function pack (tarball, folder, pkg, cb) {
- log.verbose('tar pack', [tarball, folder])
-
- log.verbose('tarball', tarball)
- log.verbose('folder', folder)
-
- readJson(path.join(folder, 'package.json'), function (er, pkg) {
- if (er || !pkg.bundleDependencies) {
- pack_(tarball, folder, null, pkg, cb)
- } else {
- // we require this at runtime due to load-order issues, because recursive
- // requires fail if you replace the exports object, and we do, not in deps, but
- // in a dep of it.
- var computeMetadata = require('../install/deps.js').computeMetadata
-
- readPackageTree(folder, pulseTillDone('pack:readTree:' + packageId(pkg), iferr(cb, function (tree) {
- computeMetadata(tree)
- pack_(tarball, folder, tree, pkg, pulseTillDone('pack:' + packageId(pkg), cb))
- })))
- }
- })
-}
-
-function BundledPacker (props) {
- Packer.call(this, props)
-}
-inherits(BundledPacker, Packer)
-
-BundledPacker.prototype.applyIgnores = function (entry, partial, entryObj) {
- if (!entryObj || entryObj.type !== 'Directory') {
- // package.json files can never be ignored.
- if (entry === 'package.json') return true
-
- // readme files should never be ignored.
- if (entry.match(/^readme(\.[^\.]*)$/i)) return true
-
- // license files should never be ignored.
- if (entry.match(/^(license|licence)(\.[^\.]*)?$/i)) return true
-
- // copyright notice files should never be ignored.
- if (entry.match(/^(notice)(\.[^\.]*)?$/i)) return true
-
- // changelogs should never be ignored.
- if (entry.match(/^(changes|changelog|history)(\.[^\.]*)?$/i)) return true
- }
-
- // special rules. see below.
- if (entry === 'node_modules' && this.packageRoot) return true
-
- // package.json main file should never be ignored.
- var mainFile = this.package && this.package.main
- if (mainFile && path.resolve(this.path, entry) === path.resolve(this.path, mainFile)) return true
-
- // some files are *never* allowed under any circumstances
- // (VCS folders, native build cruft, npm cruft, regular cruft)
- if (entry === '.git' ||
- entry === 'CVS' ||
- entry === '.svn' ||
- entry === '.hg' ||
- entry === '.lock-wscript' ||
- entry.match(/^\.wafpickle-[0-9]+$/) ||
- (this.parent && this.parent.packageRoot && this.basename === 'build' &&
- entry === 'config.gypi') ||
- entry === 'npm-debug.log' ||
- entry === '.npmrc' ||
- entry.match(/^\..*\.swp$/) ||
- entry === '.DS_Store' ||
- entry.match(/^\._/) ||
- entry.match(/^.*\.orig$/) ||
- // Package locks are never allowed in tarballs -- use shrinkwrap instead
- entry === 'package-lock.json'
- ) {
- return false
- }
-
- // in a node_modules folder, we only include bundled dependencies
- // also, prevent packages in node_modules from being affected
- // by rules set in the containing package, so that
- // bundles don't get busted.
- // Also, once in a bundle, everything is installed as-is
- // To prevent infinite cycles in the case of cyclic deps that are
- // linked with npm link, even in a bundle, deps are only bundled
- // if they're not already present at a higher level.
- if (this.bundleMagic) {
- // bubbling up. stop here and allow anything the bundled pkg allows
- if (entry.charAt(0) === '@') {
- var firstSlash = entry.indexOf('/')
- // continue to list the packages in this scope
- if (firstSlash === -1) return true
-
- // bubbling up. stop here and allow anything the bundled pkg allows
- if (entry.indexOf('/', firstSlash + 1) !== -1) return true
- // bubbling up. stop here and allow anything the bundled pkg allows
- } else if (entry.indexOf('/') !== -1) {
- return true
- }
-
- // never include the .bin. It's typically full of platform-specific
- // stuff like symlinks and .cmd files anyway.
- if (entry === '.bin') return false
-
- // the package root.
- var p = this.parent
- // the directory before this one.
- var pp = p && p.parent
- // the directory before that (if this is scoped)
- if (pp && pp.basename[0] === '@') pp = pp && pp.parent
-
- // if this entry has already been bundled, and is a symlink,
- // and it is the *same* symlink as this one, then exclude it.
- if (pp && pp.bundleLinks && this.bundleLinks &&
- pp.bundleLinks[entry] &&
- pp.bundleLinks[entry] === this.bundleLinks[entry]) {
- return false
- }
-
- // since it's *not* a symbolic link, if we're *already* in a bundle,
- // then we should include everything.
- if (pp && pp.package && pp.basename === 'node_modules') {
- return true
- }
-
- // only include it at this point if it's a bundleDependency
- return this.isBundled(entry)
- }
- // if (this.bundled) return true
-
- return Packer.prototype.applyIgnores.call(this, entry, partial, entryObj)
-}
-
-function nameMatch (name) { return function (other) { return name === moduleName(other) } }
-
-function pack_ (tarball, folder, tree, pkg, cb) {
- function InstancePacker (props) {
- BundledPacker.call(this, props)
- }
- inherits(InstancePacker, BundledPacker)
- InstancePacker.prototype.isBundled = function (name) {
- var bd = this.package && this.package.bundleDependencies
- if (!bd) return false
-
- if (!Array.isArray(bd)) {
- throw new Error(packageId(this) + '\'s `bundledDependencies` should ' +
- 'be an array')
- }
- if (!tree) return false
-
- if (bd.indexOf(name) !== -1) return true
- var pkg = tree.children.filter(nameMatch(name))[0]
- if (!pkg) return false
- var requiredBy = [].concat(pkg.requiredBy)
- var seen = new Set()
- while (requiredBy.length) {
- var reqPkg = requiredBy.shift()
- if (seen.has(reqPkg)) continue
- seen.add(reqPkg)
- if (!reqPkg) continue
- if (reqPkg.parent === tree && bd.indexOf(moduleName(reqPkg)) !== -1) {
- return true
- }
- requiredBy = union(requiredBy, reqPkg.requiredBy)
- }
- return false
- }
-
- new InstancePacker({ path: folder, type: 'Directory', isDirectory: true })
- .on('error', function (er) {
- if (er) log.error('tar pack', 'Error reading ' + folder)
- return cb(er)
- })
-
- // By default, npm includes some proprietary attributes in the
- // package tarball. This is sane, and allowed by the spec.
- // However, npm *itself* excludes these from its own package,
- // so that it can be more easily bootstrapped using old and
- // non-compliant tar implementations.
- .pipe(tar.Pack({ noProprietary: !npm.config.get('proprietary-attribs') }))
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'tar creation error', tarball)
- cb(er)
- })
- .pipe(zlib.Gzip())
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'gzip error ' + tarball)
- cb(er)
- })
- .pipe(writeStreamAtomic(tarball))
- .on('error', function (er) {
- if (er) log.error('tar.pack', 'Could not write ' + tarball)
- cb(er)
- })
- .on('close', cb)
-}
-
-function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
- log.verbose('tar', 'unpack', tarball)
- log.verbose('tar', 'unpacking to', unpackTarget)
- if (typeof cb !== 'function') {
- cb = gid
- gid = null
- }
- if (typeof cb !== 'function') {
- cb = uid
- uid = null
- }
- if (typeof cb !== 'function') {
- cb = fMode
- fMode = npm.modes.file
- }
- if (typeof cb !== 'function') {
- cb = dMode
- dMode = npm.modes.exec
- }
-
- uidNumber(uid, gid, function (er, uid, gid) {
- if (er) return cb(er)
- unpack_(tarball, unpackTarget, dMode, fMode, uid, gid, cb)
- })
-}
-
-function unpack_ (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
- rm(unpackTarget, function (er) {
- if (er) return cb(er)
- // gzip {tarball} --decompress --stdout \
- // | tar -mvxpf - --strip-components=1 -C {unpackTarget}
- gunzTarPerm(tarball, unpackTarget,
- dMode, fMode,
- uid, gid,
- function (er, folder) {
- if (er) return cb(er)
- readJson(path.resolve(folder, 'package.json'), cb)
- })
- })
-}
-
-function gunzTarPerm (tarball, target, dMode, fMode, uid, gid, cb_) {
- if (!dMode) dMode = npm.modes.exec
- if (!fMode) fMode = npm.modes.file
- log.silly('gunzTarPerm', 'modes', [dMode.toString(8), fMode.toString(8)])
-
- var cbCalled = false
- function cb (er) {
- if (cbCalled) return
- cbCalled = true
- cb_(er, target)
- }
-
- var fst = fs.createReadStream(tarball)
-
- fst.on('open', function (fd) {
- fs.fstat(fd, function (er, st) {
- if (er) return fst.emit('error', er)
- if (st.size === 0) {
- er = new Error('0-byte tarball\n' +
- 'Please run `npm cache clean`')
- fst.emit('error', er)
- }
- })
- })
-
- // figure out who we're supposed to be, if we're not pretending
- // to be a specific user.
- if (npm.config.get('unsafe-perm') && process.platform !== 'win32') {
- uid = myUid
- gid = myGid
- }
-
- function extractEntry (entry) {
- log.silly('gunzTarPerm', 'extractEntry', entry.path)
- // never create things that are user-unreadable,
- // or dirs that are user-un-listable. Only leads to headaches.
- var originalMode = entry.mode = entry.mode || entry.props.mode
- entry.mode = entry.mode | (entry.type === 'Directory' ? dMode : fMode)
- entry.mode = entry.mode & (~npm.modes.umask)
- entry.props.mode = entry.mode
- if (originalMode !== entry.mode) {
- log.silly('gunzTarPerm', 'modified mode',
- [entry.path, originalMode, entry.mode])
- }
-
- // if there's a specific owner uid/gid that we want, then set that
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- entry.props.uid = entry.uid = uid
- entry.props.gid = entry.gid = gid
- }
- }
-
- var extractOpts = { type: 'Directory', path: target, strip: 1 }
-
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- extractOpts.uid = uid
- extractOpts.gid = gid
- }
-
- var sawIgnores = {}
- extractOpts.filter = function () {
- // symbolic links are not allowed in packages.
- if (this.type.match(/^.*Link$/)) {
- log.warn('excluding symbolic link',
- this.path.substr(target.length + 1) +
- ' -> ' + this.linkpath)
- return false
- }
-
- // Note: This mirrors logic in the fs read operations that are
- // employed during tarball creation, in the fstream-npm module.
- // It is duplicated here to handle tarballs that are created
- // using other means, such as system tar or git archive.
- if (this.type === 'File') {
- var base = path.basename(this.path)
- if (base === '.npmignore') {
- sawIgnores[ this.path ] = true
- } else if (base === '.gitignore') {
- var npmignore = this.path.replace(/\.gitignore$/, '.npmignore')
- if (sawIgnores[npmignore]) {
- // Skip this one, already seen.
- return false
- } else {
- // Rename, may be clobbered later.
- this.path = npmignore
- this._path = npmignore
- }
- }
- }
-
- return true
- }
-
- fst
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'error reading ' + tarball)
- cb(er)
- })
- .on('data', function OD (c) {
- // detect what it is.
- // Then, depending on that, we'll figure out whether it's
- // a single-file module, gzipped tarball, or naked tarball.
- // gzipped files all start with 1f8b08
- if (c[0] === 0x1F &&
- c[1] === 0x8B &&
- c[2] === 0x08) {
- fst
- .pipe(zlib.Unzip())
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'unzip error ' + tarball)
- cb(er)
- })
- .pipe(tar.Extract(extractOpts))
- .on('entry', extractEntry)
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'untar error ' + tarball)
- cb(er)
- })
- .on('close', cb)
- } else if (hasTarHeader(c)) {
- // naked tar
- fst
- .pipe(tar.Extract(extractOpts))
- .on('entry', extractEntry)
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'untar error ' + tarball)
- cb(er)
- })
- .on('close', cb)
- } else {
- // naked js file
- var jsOpts = { path: path.resolve(target, 'index.js') }
-
- if (process.platform !== 'win32' &&
- typeof uid === 'number' &&
- typeof gid === 'number') {
- jsOpts.uid = uid
- jsOpts.gid = gid
- }
-
- fst
- .pipe(fstream.Writer(jsOpts))
- .on('error', function (er) {
- if (er) log.error('tar.unpack', 'copy error ' + tarball)
- cb(er)
- })
- .on('close', function () {
- var j = path.resolve(target, 'package.json')
- readJson(j, function (er, d) {
- if (er) {
- log.error('not a package', tarball)
- return cb(er)
- }
- writeFileAtomic(j, JSON.stringify(d) + '\n', cb)
- })
- })
- }
-
- // now un-hook, and re-emit the chunk
- fst.removeListener('data', OD)
- fst.emit('data', c)
- })
-}
-
-function hasTarHeader (c) {
- return c[257] === 0x75 && // tar archives have 7573746172 at position
- c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
- c[259] === 0x74 &&
- c[260] === 0x61 &&
- c[261] === 0x72 &&
-
- ((c[262] === 0x00 &&
- c[263] === 0x30 &&
- c[264] === 0x30) ||
-
- (c[262] === 0x20 &&
- c[263] === 0x20 &&
- c[264] === 0x00))
-}
diff --git a/deps/npm/lib/utils/unsupported.js b/deps/npm/lib/utils/unsupported.js
index 91f494f4be..13535515e4 100644
--- a/deps/npm/lib/utils/unsupported.js
+++ b/deps/npm/lib/utils/unsupported.js
@@ -1,13 +1,19 @@
'use strict'
var semver = require('semver')
-var supportedNode = '>= 4'
-var knownBroken = '>=0.1 <=0.7'
+var supportedNode = [
+ {ver: '4', min: '4.7.0'},
+ {ver: '6', min: '6.0.0'},
+ {ver: '7', min: '7.0.0'},
+ {ver: '8', min: '8.0.0'}
+]
+var knownBroken = '<4.7.0'
var checkVersion = exports.checkVersion = function (version) {
var versionNoPrerelease = version.replace(/-.*$/, '')
return {
+ version: versionNoPrerelease,
broken: semver.satisfies(versionNoPrerelease, knownBroken),
- unsupported: !semver.satisfies(versionNoPrerelease, supportedNode)
+ unsupported: !semver.satisfies(versionNoPrerelease, supportedNode.map(function (n) { return '^' + n.min }).join('||'))
}
}
@@ -15,8 +21,18 @@ exports.checkForBrokenNode = function () {
var nodejs = checkVersion(process.version)
if (nodejs.broken) {
console.error('ERROR: npm is known not to run on Node.js ' + process.version)
+ supportedNode.forEach(function (rel) {
+ if (semver.satisfies(nodejs.version, rel.ver)) {
+ console.error('Node.js ' + rel.ver + " is supported but the specific version you're running has")
+ console.error(`a bug known to break npm. Please update to at least ${rel.min} to use this`)
+ console.error('version of npm. You can find the latest release of Node.js at https://nodejs.org/')
+ process.exit(1)
+ }
+ })
+ var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ')
console.error("You'll need to upgrade to a newer version in order to use this")
- console.error('version of npm. You can find the latest version at https://nodejs.org/')
+ console.error('version of npm. Supported versions are ' + supportedMajors + '. You can find the')
+ console.error('latest version at https://nodejs.org/')
process.exit(1)
}
}
@@ -25,9 +41,11 @@ exports.checkForUnsupportedNode = function () {
var nodejs = checkVersion(process.version)
if (nodejs.unsupported) {
var log = require('npmlog')
+ var supportedMajors = supportedNode.map(function (n) { return n.ver }).join(', ')
log.warn('npm', 'npm does not support Node.js ' + process.version)
log.warn('npm', 'You should probably upgrade to a newer version of node as we')
log.warn('npm', "can't make any promises that npm will work with this version.")
+ log.warn('npm', 'Supported releases of Node.js are the latest release of ' + supportedMajors + '.')
log.warn('npm', 'You can find the latest version at https://nodejs.org/')
}
}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index c52b5158a0..edcd664f2a 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -278,14 +278,22 @@ function checkGit (localData, cb) {
})
}
+module.exports.buildCommitArgs = buildCommitArgs
+function buildCommitArgs (args) {
+ args = args || [ 'commit' ]
+ if (!npm.config.get('commit-hooks')) args.push('-n')
+ return args
+}
+
function _commit (version, localData, cb) {
const options = { env: process.env }
const message = npm.config.get('message').replace(/%s/g, version)
const sign = npm.config.get('sign-git-tag')
+ const commitArgs = buildCommitArgs([ 'commit', '-m', message ])
const flagForTag = sign ? '-sm' : '-am'
stagePackageFiles(localData, options).then(() => {
- return git.exec([ 'commit', '-m', message ], options)
+ return git.exec(commitArgs, options)
}).then(() => {
if (!localData.existingTag) {
return git.exec([