summaryrefslogtreecommitdiff
path: root/deps/npm/lib
diff options
context:
space:
mode:
authorKat Marchán <kzm@sykosomatic.org>2017-05-09 14:46:02 -0700
committerAnna Henningsen <anna@addaleax.net>2017-05-23 19:39:43 +0200
commitc0d858f8bb8ba5212548da2fba6a7bc02db0462b (patch)
tree99f043ec5aec3f5150a2aed0f62597234b158140 /deps/npm/lib
parent994617370e8e66f3ea9488fec32fd912e7902396 (diff)
downloadandroid-node-v8-c0d858f8bb8ba5212548da2fba6a7bc02db0462b.tar.gz
android-node-v8-c0d858f8bb8ba5212548da2fba6a7bc02db0462b.tar.bz2
android-node-v8-c0d858f8bb8ba5212548da2fba6a7bc02db0462b.zip
deps: upgrade npm beta to 5.0.0-beta.56
PR-URL: https://github.com/nodejs/node/pull/12936 Reviewed-By: Anna Henningsen <anna@addaleax.net>
Diffstat (limited to 'deps/npm/lib')
-rw-r--r--deps/npm/lib/build.js2
-rw-r--r--deps/npm/lib/cache.js380
-rw-r--r--deps/npm/lib/cache/add-local-tarball.js175
-rw-r--r--deps/npm/lib/cache/add-local.js153
-rw-r--r--deps/npm/lib/cache/add-named.js289
-rw-r--r--deps/npm/lib/cache/add-remote-git.js496
-rw-r--r--deps/npm/lib/cache/add-remote-tarball.js132
-rw-r--r--deps/npm/lib/cache/cached-package-root.js14
-rw-r--r--deps/npm/lib/cache/caching-client.js220
-rw-r--r--deps/npm/lib/cache/get-stat.js6
-rw-r--r--deps/npm/lib/config/defaults.js18
-rw-r--r--deps/npm/lib/config/pacote.js175
-rw-r--r--deps/npm/lib/dedupe.js20
-rw-r--r--deps/npm/lib/deprecate.js4
-rw-r--r--deps/npm/lib/doctor.js86
-rw-r--r--deps/npm/lib/doctor/check-files-permission.js20
-rw-r--r--deps/npm/lib/doctor/check-ping.js5
-rw-r--r--deps/npm/lib/doctor/checksum-cached-files.js62
-rw-r--r--deps/npm/lib/doctor/get-latest-npm-version.js5
-rw-r--r--deps/npm/lib/doctor/verify-cached-files.js19
-rw-r--r--deps/npm/lib/explore.js2
-rw-r--r--deps/npm/lib/fetch-package-metadata.js391
-rw-r--r--deps/npm/lib/help.js6
-rw-r--r--deps/npm/lib/install.js302
-rw-r--r--deps/npm/lib/install/action/extract.js163
-rw-r--r--deps/npm/lib/install/action/fetch.js31
-rw-r--r--deps/npm/lib/install/action/finalize.js131
-rw-r--r--deps/npm/lib/install/action/global-install.js2
-rw-r--r--deps/npm/lib/install/action/refresh-package-json.js38
-rw-r--r--deps/npm/lib/install/action/update-linked.js16
-rw-r--r--deps/npm/lib/install/actions.js202
-rw-r--r--deps/npm/lib/install/copy-tree.js32
-rw-r--r--deps/npm/lib/install/decompose-actions.js11
-rw-r--r--deps/npm/lib/install/deps.js475
-rw-r--r--deps/npm/lib/install/diff-trees.js32
-rw-r--r--deps/npm/lib/install/filter-invalid-actions.js36
-rw-r--r--deps/npm/lib/install/get-requested.js12
-rw-r--r--deps/npm/lib/install/inflate-bundled.js3
-rw-r--r--deps/npm/lib/install/inflate-shrinkwrap.js244
-rw-r--r--deps/npm/lib/install/is-registry-specifier.js6
-rw-r--r--deps/npm/lib/install/node.js27
-rw-r--r--deps/npm/lib/install/read-shrinkwrap.js70
-rw-r--r--deps/npm/lib/install/realize-shrinkwrap-specifier.js37
-rw-r--r--deps/npm/lib/install/save.js147
-rw-r--r--deps/npm/lib/install/update-package-json.js13
-rw-r--r--deps/npm/lib/ls.js39
-rw-r--r--deps/npm/lib/npm.js44
-rw-r--r--deps/npm/lib/outdated.js19
-rw-r--r--deps/npm/lib/pack.js134
-rw-r--r--deps/npm/lib/ping.js4
-rw-r--r--deps/npm/lib/prune.js8
-rw-r--r--deps/npm/lib/publish.js217
-rw-r--r--deps/npm/lib/search/all-package-metadata.js4
-rw-r--r--deps/npm/lib/shrinkwrap.js264
-rw-r--r--deps/npm/lib/uninstall.js7
-rw-r--r--deps/npm/lib/unpublish.js28
-rw-r--r--deps/npm/lib/utils/depr-check.js32
-rw-r--r--deps/npm/lib/utils/error-handler.js76
-rw-r--r--deps/npm/lib/utils/error-message.js43
-rw-r--r--deps/npm/lib/utils/gently-rm.js15
-rw-r--r--deps/npm/lib/utils/get-publish-config.js4
-rw-r--r--deps/npm/lib/utils/lifecycle.js4
-rw-r--r--deps/npm/lib/utils/map-to-registry.js2
-rw-r--r--deps/npm/lib/utils/package-integrity.js21
-rw-r--r--deps/npm/lib/utils/perf.js27
-rw-r--r--deps/npm/lib/utils/rename.js9
-rw-r--r--deps/npm/lib/utils/tar.js34
-rw-r--r--deps/npm/lib/version.js112
-rw-r--r--deps/npm/lib/view.js7
69 files changed, 2310 insertions, 3554 deletions
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
index e6f600799d..5dd243a795 100644
--- a/deps/npm/lib/build.js
+++ b/deps/npm/lib/build.js
@@ -211,7 +211,7 @@ function linkBins (pkg, folder, parent, gtop, cb) {
var out = npm.config.get('parseable')
? dest + '::' + src + ':BINFILE'
: dest + ' -> ' + src
- output(out)
+ if (!npm.config.get('json') && !npm.config.get('parseable')) output(out)
cb()
})
}
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js
index 5ad07dfdcd..af1ac57e71 100644
--- a/deps/npm/lib/cache.js
+++ b/deps/npm/lib/cache.js
@@ -1,367 +1,135 @@
-// XXX lib/utils/tar.js and this file need to be rewritten.
-
-// URL-to-cache folder mapping:
-// : -> !
-// @ -> _
-// http://registry.npmjs.org/foo/version -> cache/http!/...
-//
-
-/*
-fetching a URL:
-1. Check for URL in inflight URLs. If present, add cb, and return.
-2. Acquire lock at {cache}/{sha(url)}.lock
- retries = {cache-lock-retries, def=10}
- stale = {cache-lock-stale, def=60000}
- wait = {cache-lock-wait, def=10000}
-3. if lock can't be acquired, then fail
-4. fetch url, clear lock, call cbs
-
-cache folders:
-1. urls: http!/server.com/path/to/thing
-2. c:\path\to\thing: file!/c!/path/to/thing
-3. /path/to/thing: file!/path/to/thing
-4. git@ private: git_github.com!npm/npm
-5. git://public: git!/github.com/npm/npm
-6. git+blah:// git-blah!/server.com/foo/bar
-
-adding a folder:
-1. tar into tmp/random/package.tgz
-2. untar into tmp/random/contents/package, stripping one dir piece
-3. tar tmp/random/contents/package to cache/n/v/package.tgz
-4. untar cache/n/v/package.tgz into cache/n/v/package
-5. rm tmp/random
-
-Adding a url:
-1. fetch to tmp/random/package.tgz
-2. goto folder(2)
-
-adding a name@version:
-1. registry.get(name/version)
-2. if response isn't 304, add url(dist.tarball)
-
-adding a name@range:
-1. registry.get(name)
-2. Find a version that satisfies
-3. add name@version
-
-adding a local tarball:
-1. untar to tmp/random/{blah}
-2. goto folder(2)
-
-adding a namespaced package:
-1. lookup registry for @namespace
-2. namespace_registry.get('name')
-3. add url(namespace/latest.tarball)
-*/
-
-exports = module.exports = cache
-
-cache.unpack = unpack
-cache.clean = clean
-cache.read = read
-
-var npm = require('./npm.js')
-var fs = require('graceful-fs')
-var writeFileAtomic = require('write-file-atomic')
-var assert = require('assert')
-var rm = require('./utils/gently-rm.js')
-var readJson = require('read-package-json')
-var log = require('npmlog')
-var path = require('path')
-var asyncMap = require('slide').asyncMap
-var tar = require('./utils/tar.js')
-var fileCompletion = require('./utils/completion/file-completion.js')
-var deprCheck = require('./utils/depr-check.js')
-var addNamed = require('./cache/add-named.js')
-var addLocal = require('./cache/add-local.js')
-var addRemoteTarball = require('./cache/add-remote-tarball.js')
-var addRemoteGit = require('./cache/add-remote-git.js')
-var inflight = require('inflight')
-var realizePackageSpecifier = require('realize-package-specifier')
-var npa = require('npm-package-arg')
-var getStat = require('./cache/get-stat.js')
-var cachedPackageRoot = require('./cache/cached-package-root.js')
-var mapToRegistry = require('./utils/map-to-registry.js')
-var output = require('./utils/output.js')
+'use strict'
+
+const BB = require('bluebird')
+
+const assert = require('assert')
+const cacache = require('cacache')
+const log = require('npmlog')
+const npa = require('npm-package-arg')
+const npm = require('./npm.js')
+const output = require('./utils/output.js')
+const pacote = require('pacote')
+const pacoteOpts = require('./config/pacote')
+const path = require('path')
+const rm = BB.promisify(require('./utils/gently-rm.js'))
+const unbuild = BB.promisify(npm.commands.unbuild)
cache.usage = 'npm cache add <tarball file>' +
'\nnpm cache add <folder>' +
'\nnpm cache add <tarball url>' +
'\nnpm cache add <git url>' +
'\nnpm cache add <name>@<version>' +
- '\nnpm cache ls [<path>]' +
- '\nnpm cache clean [<pkg>[@<version>]]'
+ '\nnpm cache clean' +
+ '\nnpm cache verify'
cache.completion = function (opts, cb) {
var argv = opts.conf.argv.remain
if (argv.length === 2) {
- return cb(null, ['add', 'ls', 'clean'])
+ return cb(null, ['add', 'clean'])
}
+ // TODO - eventually...
switch (argv[2]) {
case 'clean':
- case 'ls':
- // cache and ls are easy, because the completion is
- // what ls_ returns anyway.
- // just get the partial words, minus the last path part
- var p = path.dirname(opts.partialWords.slice(3).join('/'))
- if (p === '.') p = ''
- return ls_(p, 2, cb)
case 'add':
- // Same semantics as install and publish.
- return npm.commands.install.completion(opts, cb)
+ return cb(null, [])
}
}
+exports = module.exports = cache
function cache (args, cb) {
- var cmd = args.shift()
+ const cmd = args.shift()
+ let result
switch (cmd) {
- case 'rm': case 'clear': case 'clean': return clean(args, cb)
- case 'list': case 'sl': case 'ls': return ls(args, cb)
- case 'add': return add(args, npm.prefix, cb)
+ case 'rm': case 'clear': case 'clean':
+ result = clean(args)
+ break
+ case 'add':
+ result = add(args, npm.prefix)
+ break
+ case 'verify': case 'check':
+ result = verify()
+ break
default: return cb('Usage: ' + cache.usage)
}
-}
-
-// if the pkg and ver are in the cache, then
-// just do a readJson and return.
-// if they're not, then fetch them from the registry.
-function read (name, ver, forceBypass, cb) {
- assert(typeof name === 'string', 'must include name of module to install')
- assert(typeof cb === 'function', 'must include callback')
-
- if (forceBypass === undefined || forceBypass === null) forceBypass = true
-
- var root = cachedPackageRoot({name: name, version: ver})
- function c (er, data) {
- if (er) log.verbose('cache', 'addNamed error for', name + '@' + ver, er)
- if (data) deprCheck(data)
-
- return cb(er, data)
- }
-
- if (forceBypass && npm.config.get('force')) {
- log.verbose('using force', 'skipping cache')
- return addNamed(name, ver, null, c)
- }
-
- readJson(path.join(root, 'package', 'package.json'), function (er, data) {
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
-
- if (data) {
- if (!data.name) return cb(new Error('No name provided'))
- if (!data.version) return cb(new Error('No version provided'))
- }
-
- if (er) return addNamed(name, ver, null, c)
- else c(er, data)
- })
-}
-
-function normalize (args) {
- var normalized = ''
- if (args.length > 0) {
- var a = npa(args[0])
- if (a.name) normalized = a.name
- if (a.rawSpec) normalized = [normalized, a.rawSpec].join('/')
- if (args.length > 1) normalized = [normalized].concat(args.slice(1)).join('/')
- }
-
- if (normalized.substr(-1) === '/') {
- normalized = normalized.substr(0, normalized.length - 1)
- }
- normalized = path.normalize(normalized)
- log.silly('ls', 'normalized', normalized)
-
- return normalized
-}
-
-// npm cache ls [<path>]
-function ls (args, cb) {
- var prefix = npm.config.get('cache')
- if (prefix.indexOf(process.env.HOME) === 0) {
- prefix = '~' + prefix.substr(process.env.HOME.length)
+ if (!result || !result.then) {
+ throw new Error(`npm cache ${cmd} handler did not return a Promise`)
}
- ls_(normalize(args), npm.config.get('depth'), function (er, files) {
- output(files.map(function (f) {
- return path.join(prefix, f)
- }).join('\n').trim())
- cb(er, files)
- })
+ result.then(() => cb(), cb)
}
-// Calls cb with list of cached pkgs matching show.
-function ls_ (req, depth, cb) {
- return fileCompletion(npm.cache, req, depth, cb)
-}
-
-// npm cache clean [<path>]
-function clean (args, cb) {
- assert(typeof cb === 'function', 'must include callback')
-
+// npm cache clean [pkg]*
+cache.clean = clean
+function clean (args) {
if (!args) args = []
-
- var f = path.join(npm.cache, normalize(args))
- if (f === npm.cache) {
- fs.readdir(npm.cache, function (er, files) {
- if (er) return cb()
- asyncMap(
- files.filter(function (f) {
- return npm.config.get('force') || f !== '-'
- }).map(function (f) {
- return path.join(npm.cache, f)
- }),
- rm,
- cb
- )
- })
- } else {
- rm(f, cb)
+ if (args.length) {
+ return BB.reject(new Error('npm cache clear does not accept arguments'))
}
+ const cachePath = path.join(npm.cache, '_cacache')
+ if (!npm.config.get('force')) {
+ return BB.reject(new Error("As of npm@5, the npm cache self-heals from corruption issues and data extracted from the cache is guaranteed to be valid. If you want to make sure everything is consistent, use 'npm cache verify' instead.\n\nIf you're sure you want to delete the entire cache, rerun this command with --force."))
+ }
+ // TODO - remove specific packages or package versions
+ return rm(cachePath)
}
// npm cache add <tarball-url>
// npm cache add <pkg> <ver>
// npm cache add <tarball>
// npm cache add <folder>
-cache.add = function (pkg, ver, where, scrub, cb) {
+cache.add = function (pkg, ver, where, scrub) {
assert(typeof pkg === 'string', 'must include name of package to install')
- assert(typeof cb === 'function', 'must include callback')
-
if (scrub) {
- return clean([], function (er) {
- if (er) return cb(er)
- add([pkg, ver], where, cb)
+ return clean([]).then(() => {
+ return add([pkg, ver], where)
})
}
- return add([pkg, ver], where, cb)
+ return add([pkg, ver], where)
}
-var adding = 0
-function add (args, where, cb) {
- // this is hot code. almost everything passes through here.
- // the args can be any of:
- // ['url']
- // ['pkg', 'version']
- // ['pkg@version']
- // ['pkg', 'url']
- // This is tricky, because urls can contain @
- // Also, in some cases we get [name, null] rather
- // that just a single argument.
-
+function add (args, where) {
var usage = 'Usage:\n' +
' npm cache add <tarball-url>\n' +
' npm cache add <pkg>@<ver>\n' +
' npm cache add <tarball>\n' +
' npm cache add <folder>\n'
var spec
-
log.silly('cache add', 'args', args)
-
if (args[1] === undefined) args[1] = null
-
// at this point the args length must ==2
if (args[1] !== null) {
spec = args[0] + '@' + args[1]
} else if (args.length === 2) {
spec = args[0]
}
-
log.verbose('cache add', 'spec', spec)
-
- if (!spec) return cb(usage)
-
- adding++
- cb = afterAdd(cb)
-
- realizePackageSpecifier(spec, where, function (err, p) {
- if (err) return cb(err)
-
- log.silly('cache add', 'parsed spec', p)
-
- switch (p.type) {
- case 'local':
- case 'directory':
- addLocal(p, null, cb)
- break
- case 'remote':
- // get auth, if possible
- mapToRegistry(p.raw, npm.config, function (err, uri, auth) {
- if (err) return cb(err)
-
- addRemoteTarball(p.spec, { name: p.name }, null, auth, cb)
- })
- break
- case 'git':
- case 'hosted':
- addRemoteGit(p.rawSpec, cb)
- break
- default:
- if (p.name) return addNamed(p.name, p.spec, null, cb)
-
- cb(new Error("couldn't figure out how to install " + spec))
- }
- })
+ if (!spec) return BB.reject(new Error(usage))
+ log.silly('cache add', 'parsed spec', spec)
+ return pacote.prefetch(spec, pacoteOpts({where}))
}
-function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
- if (typeof cb !== 'function') {
- cb = gid
- gid = null
- }
- if (typeof cb !== 'function') {
- cb = uid
- uid = null
- }
- if (typeof cb !== 'function') {
- cb = fMode
- fMode = null
- }
- if (typeof cb !== 'function') {
- cb = dMode
- dMode = null
+cache.verify = verify
+function verify () {
+ const cache = path.join(npm.config.get('cache'), '_cacache')
+ let prefix = cache
+ if (prefix.indexOf(process.env.HOME) === 0) {
+ prefix = '~' + prefix.substr(process.env.HOME.length)
}
-
- read(pkg, ver, false, function (er) {
- if (er) {
- log.error('unpack', 'Could not read data for %s', pkg + '@' + ver)
- return cb(er)
- }
- npm.commands.unbuild([unpackTarget], true, function (er) {
- if (er) return cb(er)
- tar.unpack(
- path.join(cachedPackageRoot({ name: pkg, version: ver }), 'package.tgz'),
- unpackTarget,
- dMode, fMode,
- uid, gid,
- cb
- )
- })
+ return cacache.verify(cache).then((stats) => {
+ output(`Cache verified and compressed (${prefix}):`)
+ output(`Content verified: ${stats.verifiedContent} (${stats.keptSize} bytes)`)
+ stats.badContentCount && output(`Corrupted content removed: ${stats.badContentCount}`)
+ stats.reclaimedCount && output(`Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
+ stats.missingContent && output(`Missing content: ${stats.missingContent}`)
+ output(`Index entries: ${stats.totalEntries}`)
+ output(`Finished in ${stats.runTime.total / 1000}s`)
})
}
-function afterAdd (cb) {
- return function (er, data) {
- adding--
-
- if (er || !data || !data.name || !data.version) return cb(er, data)
- log.silly('cache', 'afterAdd', data.name + '@' + data.version)
-
- // Save the resolved, shasum, etc. into the data so that the next
- // time we load from this cached data, we have all the same info.
- // Ignore if it fails.
- var pj = path.join(cachedPackageRoot(data), 'package', 'package.json')
-
- var done = inflight(pj, cb)
- if (!done) return log.verbose('afterAdd', pj, 'already in flight; not writing')
- log.verbose('afterAdd', pj, 'not in flight; writing')
-
- getStat(function (er, cs) {
- if (er) return done(er)
- writeFileAtomic(pj, JSON.stringify(data), { chown: cs }, function (er) {
- if (!er) log.verbose('afterAdd', pj, 'written')
- return done(null, data)
- })
- })
- }
+cache.unpack = unpack
+function unpack (pkg, ver, unpackTarget, dmode, fmode, uid, gid) {
+ return unbuild([unpackTarget], true).then(() => {
+ const opts = pacoteOpts({dmode, fmode, uid, gid, offline: true})
+ return pacote.extract(npa.resolve(pkg, ver), unpackTarget, opts)
+ })
}
diff --git a/deps/npm/lib/cache/add-local-tarball.js b/deps/npm/lib/cache/add-local-tarball.js
deleted file mode 100644
index d0952b64ef..0000000000
--- a/deps/npm/lib/cache/add-local-tarball.js
+++ /dev/null
@@ -1,175 +0,0 @@
-var mkdir = require('mkdirp')
-var assert = require('assert')
-var fs = require('graceful-fs')
-var writeFileAtomic = require('write-file-atomic')
-var path = require('path')
-var sha = require('sha')
-var npm = require('../npm.js')
-var log = require('npmlog')
-var tar = require('../utils/tar.js')
-var pathIsInside = require('path-is-inside')
-var getCacheStat = require('./get-stat.js')
-var cachedPackageRoot = require('./cached-package-root.js')
-var chownr = require('chownr')
-var inflight = require('inflight')
-var once = require('once')
-var writeStreamAtomic = require('fs-write-stream-atomic')
-var tempFilename = require('../utils/temp-filename.js')
-var rimraf = require('rimraf')
-var packageId = require('../utils/package-id.js')
-
-module.exports = addLocalTarball
-
-function addLocalTarball (p, pkgData, shasum, cb) {
- assert(typeof p === 'string', 'must have path')
- assert(typeof cb === 'function', 'must have callback')
-
- if (!pkgData) pkgData = {}
-
- // If we don't have a shasum yet, compute it.
- if (!shasum) {
- return sha.get(p, function (er, shasum) {
- if (er) return cb(er)
- log.silly('addLocalTarball', 'shasum (computed)', shasum)
- addLocalTarball(p, pkgData, shasum, cb)
- })
- }
-
- if (pathIsInside(p, npm.cache)) {
- if (path.basename(p) !== 'package.tgz') {
- return cb(new Error('Not a valid cache tarball name: ' + p))
- }
- log.verbose('addLocalTarball', 'adding from inside cache', p)
- return addPlacedTarball(p, pkgData, shasum, cb)
- }
-
- addTmpTarball(p, pkgData, shasum, function (er, data) {
- if (data) {
- data._resolved = p
- data._shasum = data._shasum || shasum
- }
- return cb(er, data)
- })
-}
-
-function addPlacedTarball (p, pkgData, shasum, cb) {
- assert(pkgData, 'should have package data by now')
- assert(typeof cb === 'function', 'cb function required')
-
- getCacheStat(function (er, cs) {
- if (er) return cb(er)
- return addPlacedTarball_(p, pkgData, cs.uid, cs.gid, shasum, cb)
- })
-}
-
-function addPlacedTarball_ (p, pkgData, uid, gid, resolvedSum, cb) {
- var folder = path.join(cachedPackageRoot(pkgData), 'package')
-
- // First, make sure we have the shasum, if we don't already.
- if (!resolvedSum) {
- sha.get(p, function (er, shasum) {
- if (er) return cb(er)
- addPlacedTarball_(p, pkgData, uid, gid, shasum, cb)
- })
- return
- }
-
- mkdir(folder, function (er) {
- if (er) return cb(er)
- var pj = path.join(folder, 'package.json')
- var json = JSON.stringify(pkgData, null, 2)
- writeFileAtomic(pj, json, function (er) {
- cb(er, pkgData)
- })
- })
-}
-
-function addTmpTarball (tgz, pkgData, shasum, cb) {
- assert(typeof cb === 'function', 'must have callback function')
- assert(shasum, 'must have shasum by now')
-
- cb = inflight('addTmpTarball:' + tgz, cb)
- if (!cb) return log.verbose('addTmpTarball', tgz, 'already in flight; not adding')
- log.verbose('addTmpTarball', tgz, 'not in flight; adding')
-
- // we already have the package info, so just move into place
- if (pkgData && pkgData.name && pkgData.version) {
- log.verbose(
- 'addTmpTarball',
- 'already have metadata; skipping unpack for',
- packageId(pkgData)
- )
- return addTmpTarball_(tgz, pkgData, shasum, cb)
- }
-
- // This is a tarball we probably downloaded from the internet. The shasum's
- // already been checked, but we haven't ever had a peek inside, so we unpack
- // it here just to make sure it is what it says it is.
- //
- // NOTE: we might not have any clue what we think it is, for example if the
- // user just did `npm install ./foo.tgz`
-
- var target = tempFilename('unpack')
- getCacheStat(function (er, cs) {
- if (er) return cb(er)
-
- log.verbose('addTmpTarball', 'validating metadata from', tgz)
- tar.unpack(tgz, target, null, null, cs.uid, cs.gid, function (unpackEr, data) {
- // cleanup the extracted package and move on with the metadata
- rimraf(target, function () {
- if (unpackEr) return cb(unpackEr)
- // check that this is what we expected.
- if (!data.name) {
- return cb(new Error('No name provided'))
- } else if (pkgData.name && data.name !== pkgData.name) {
- return cb(new Error('Invalid Package: expected ' + pkgData.name +
- ' but found ' + data.name))
- }
-
- if (!data.version) {
- return cb(new Error('No version provided'))
- } else if (pkgData.version && data.version !== pkgData.version) {
- return cb(new Error('Invalid Package: expected ' +
- packageId(pkgData) +
- ' but found ' + packageId(data)))
- }
-
- addTmpTarball_(tgz, data, shasum, cb)
- })
- })
- })
-}
-
-function addTmpTarball_ (tgz, data, shasum, cb) {
- assert(typeof cb === 'function', 'must have callback function')
- cb = once(cb)
-
- assert(data.name, 'should have package name by now')
- assert(data.version, 'should have package version by now')
-
- var root = cachedPackageRoot(data)
- var pkg = path.resolve(root, 'package')
- var target = path.resolve(root, 'package.tgz')
- getCacheStat(function (er, cs) {
- if (er) return cb(er)
- mkdir(pkg, function (er, created) {
- // chown starting from the first dir created by mkdirp,
- // or the root dir, if none had to be created, so that
- // we know that we get all the children.
- function chown () {
- chownr(created || root, cs.uid, cs.gid, done)
- }
-
- if (er) return cb(er)
- var read = fs.createReadStream(tgz)
- var write = writeStreamAtomic(target, { mode: npm.modes.file })
- var fin = cs.uid && cs.gid ? chown : done
- read.on('error', cb).pipe(write).on('error', cb).on('close', fin)
- })
- })
-
- function done () {
- data._shasum = data._shasum || shasum
- cb(null, data)
- }
-}
diff --git a/deps/npm/lib/cache/add-local.js b/deps/npm/lib/cache/add-local.js
deleted file mode 100644
index 221505625e..0000000000
--- a/deps/npm/lib/cache/add-local.js
+++ /dev/null
@@ -1,153 +0,0 @@
-var assert = require('assert')
-var path = require('path')
-var mkdir = require('mkdirp')
-var chownr = require('chownr')
-var pathIsInside = require('path-is-inside')
-var readJson = require('read-package-json')
-var log = require('npmlog')
-var npm = require('../npm.js')
-var tar = require('../utils/tar.js')
-var deprCheck = require('../utils/depr-check.js')
-var prepublishWarning = require('../utils/warn-deprecated.js')('prepublish-on-install')
-var getCacheStat = require('./get-stat.js')
-var cachedPackageRoot = require('./cached-package-root.js')
-var addLocalTarball = require('./add-local-tarball.js')
-var sha = require('sha')
-var inflight = require('inflight')
-var lifecycle = require('../utils/lifecycle.js')
-var iferr = require('iferr')
-var chain = require('slide').chain
-
-module.exports = addLocal
-
-function addLocal (p, pkgData, cb_) {
- assert(typeof p === 'object', 'must have spec info')
- assert(typeof cb_ === 'function', 'must have callback')
-
- pkgData = pkgData || {}
-
- function cb (er, data) {
- if (er) {
- log.error('addLocal', 'Could not install %s', p.spec)
- return cb_(er)
- }
- if (data && !data._fromHosted) {
- data._from = path.relative(npm.prefix, p.spec) || '.'
- var resolved = path.relative(npm.prefix, p.spec)
- if (resolved) data._resolved = 'file:' + resolved
- }
- return cb_(er, data)
- }
-
- if (p.type === 'directory') {
- addLocalDirectory(p.spec, pkgData, null, cb)
- } else {
- addLocalTarball(p.spec, pkgData, null, cb)
- }
-}
-
-// At this point, if shasum is set, it's something that we've already
-// read and checked. Just stashing it in the data at this point.
-function addLocalDirectory (p, pkgData, shasum, cb) {
- assert(pkgData, 'must pass package data')
- assert(typeof cb === 'function', 'must have callback')
-
- // if it's a folder, then read the package.json,
- // tar it to the proper place, and add the cache tar
- if (pathIsInside(p, npm.cache)) {
- return cb(new Error(
- 'Adding a cache directory to the cache will make the world implode.'
- ))
- }
-
- readJson(path.join(p, 'package.json'), false, function (er, data) {
- if (er) return cb(er)
-
- if (!data.name) {
- return cb(new Error('No name provided in package.json'))
- } else if (pkgData.name && pkgData.name !== data.name) {
- return cb(new Error(
- 'Invalid package: expected ' + pkgData.name + ' but found ' + data.name
- ))
- }
-
- if (!data.version) {
- return cb(new Error('No version provided in package.json'))
- } else if (pkgData.version && pkgData.version !== data.version) {
- return cb(new Error(
- 'Invalid package: expected ' + pkgData.name + '@' + pkgData.version +
- ' but found ' + data.name + '@' + data.version
- ))
- }
-
- deprCheck(data)
-
- // pack to {cache}/name/ver/package.tgz
- var root = cachedPackageRoot(data)
- var tgz = path.resolve(root, 'package.tgz')
- var pj = path.resolve(root, 'package/package.json')
-
- var wrapped = inflight(tgz, next)
- if (!wrapped) return log.verbose('addLocalDirectory', tgz, 'already in flight; waiting')
- log.verbose('addLocalDirectory', tgz, 'not in flight; packing')
-
- getCacheStat(function (er, cs) {
- mkdir(path.dirname(pj), function (er, made) {
- if (er) return wrapped(er)
- var doPrePublish = !pathIsInside(p, npm.tmp)
- if (doPrePublish) {
- // TODO: for `npm@5`, change the behavior and remove this warning.
- // see https://github.com/npm/npm/issues/10074 for details
- if (data && data.scripts && data.scripts.prepublish) {
- prepublishWarning([
- 'As of npm@5, `prepublish` scripts will run only for `npm publish`.',
- '(In npm@4 and previous versions, it also runs for `npm install`.)',
- 'See the deprecation note in `npm help scripts` for more information.'
- ])
- }
-
- chain(
- [
- [lifecycle, data, 'prepublish', p],
- [lifecycle, data, 'prepare', p]
- ],
- iferr(wrapped, thenPack)
- )
- } else {
- thenPack()
- }
- function thenPack () {
- tar.pack(tgz, p, data, function (er) {
- if (er) {
- log.error('addLocalDirectory', 'Could not pack', p, 'to', tgz)
- return wrapped(er)
- }
-
- if (!cs || isNaN(cs.uid) || isNaN(cs.gid)) return wrapped()
-
- chownr(made || tgz, cs.uid, cs.gid, function (er) {
- if (er && er.code === 'ENOENT') return wrapped()
- wrapped(er)
- })
- })
- }
- })
- })
-
- function next (er) {
- if (er) return cb(er)
- // if we have the shasum already, just add it
- if (shasum) {
- return addLocalTarball(tgz, data, shasum, cb)
- } else {
- sha.get(tgz, function (er, shasum) {
- if (er) {
- return cb(er)
- }
- data._shasum = shasum
- return addLocalTarball(tgz, data, shasum, cb)
- })
- }
- }
- })
-}
diff --git a/deps/npm/lib/cache/add-named.js b/deps/npm/lib/cache/add-named.js
deleted file mode 100644
index 3a48b42e7f..0000000000
--- a/deps/npm/lib/cache/add-named.js
+++ /dev/null
@@ -1,289 +0,0 @@
-var path = require('path')
-var assert = require('assert')
-var fs = require('graceful-fs')
-var http = require('http')
-var log = require('npmlog')
-var semver = require('semver')
-var readJson = require('read-package-json')
-var url = require('url')
-var npm = require('../npm.js')
-var deprCheck = require('../utils/depr-check.js')
-var inflight = require('inflight')
-var addRemoteTarball = require('./add-remote-tarball.js')
-var cachedPackageRoot = require('./cached-package-root.js')
-var mapToRegistry = require('../utils/map-to-registry.js')
-var pulseTillDone = require('../utils/pulse-till-done.js')
-var packageId = require('../utils/package-id.js')
-var pickManifestFromRegistryMetadata = require('../utils/pick-manifest-from-registry-metadata.js')
-
-module.exports = addNamed
-
-function getOnceFromRegistry (name, from, next, done) {
- function fixName (err, data, json, resp) {
- // this is only necessary until npm/npm-registry-client#80 is fixed
- if (err && err.pkgid && err.pkgid !== name) {
- err.message = err.message.replace(
- new RegExp(': ' + err.pkgid.replace(/(\W)/g, '\\$1') + '$'),
- ': ' + name
- )
- err.pkgid = name
- }
- next(err, data, json, resp)
- }
-
- mapToRegistry(name, npm.config, function (er, uri, auth) {
- if (er) return done(er)
-
- var key = 'registry:' + uri
- next = inflight(key, next)
- if (!next) return log.verbose(from, key, 'already in flight; waiting')
- else log.verbose(from, key, 'not in flight; fetching')
-
- npm.registry.get(uri, { auth: auth }, pulseTillDone('fetchRegistry', fixName))
- })
-}
-
-function addNamed (name, version, data, cb_) {
- assert(typeof name === 'string', 'must have module name')
- assert(typeof cb_ === 'function', 'must have callback')
-
- var key = name + '@' + version
- log.silly('addNamed', key)
-
- function cb (er, data) {
- if (data && !data._fromHosted) data._from = key
- cb_(er, data)
- }
-
- if (semver.valid(version, true)) {
- log.verbose('addNamed', JSON.stringify(version), 'is a plain semver version for', name)
- addNameVersion(name, version, data, cb)
- } else if (semver.validRange(version, true)) {
- log.verbose('addNamed', JSON.stringify(version), 'is a valid semver range for', name)
- addNameRange(name, version, data, cb)
- } else {
- log.verbose('addNamed', JSON.stringify(version), 'is being treated as a dist-tag for', name)
- addNameTag(name, version, data, cb)
- }
-}
-
-function addNameTag (name, tag, data, cb) {
- log.info('addNameTag', [name, tag])
- var explicit = true
- if (!tag) {
- explicit = false
- tag = npm.config.get('tag')
- }
-
- getOnceFromRegistry(name, 'addNameTag', next, cb)
-
- function next (er, data, json, resp) {
- if (!er) er = errorResponse(name, resp)
- if (er) return cb(er)
-
- log.silly('addNameTag', 'next cb for', name, 'with tag', tag)
-
- engineFilter(data)
- if (data['dist-tags'] && data['dist-tags'][tag] &&
- data.versions[data['dist-tags'][tag]]) {
- var ver = data['dist-tags'][tag]
- return addNamed(name, ver, data.versions[ver], cb)
- }
- if (!explicit && Object.keys(data.versions).length) {
- return addNamed(name, '*', data, cb)
- }
-
- er = installTargetsError(tag, data)
- return cb(er)
- }
-}
-
-function engineFilter (data) {
- var npmv = npm.version
- var nodev = npm.config.get('node-version')
- var strict = npm.config.get('engine-strict')
-
- if (!nodev || npm.config.get('force')) return data
-
- Object.keys(data.versions || {}).forEach(function (v) {
- var eng = data.versions[v].engines
- if (!eng) return
- if (!strict) return
- if (eng.node && !semver.satisfies(nodev, eng.node, true) ||
- eng.npm && !semver.satisfies(npmv, eng.npm, true)) {
- delete data.versions[v]
- }
- })
-}
-
-function addNameVersion (name, v, data, cb) {
- var ver = semver.valid(v, true)
- if (!ver) return cb(new Error('Invalid version: ' + v))
-
- var response
-
- if (data) {
- response = null
- return next()
- }
-
- getOnceFromRegistry(name, 'addNameVersion', setData, cb)
-
- function setData (er, d, json, resp) {
- if (!er) {
- er = errorResponse(name, resp)
- }
- if (er) return cb(er)
- data = d && d.versions[ver]
- if (!data) {
- er = new Error('version not found: ' + name + '@' + ver)
- er.package = name
- er.statusCode = 404
- return cb(er)
- }
- response = resp
- next()
- }
-
- function next () {
- deprCheck(data)
- var dist = data.dist
-
- if (!dist) return cb(new Error('No dist in ' + packageId(data) + ' package'))
-
- if (!dist.tarball) {
- return cb(new Error(
- 'No dist.tarball in ' + packageId(data) + ' package'
- ))
- }
-
- if ((response && response.statusCode !== 304) || npm.config.get('force')) {
- return fetchit()
- }
-
- // we got cached data, so let's see if we have a tarball.
- var pkgroot = cachedPackageRoot({ name: name, version: ver })
- var pkgtgz = path.join(pkgroot, 'package.tgz')
- var pkgjson = path.join(pkgroot, 'package', 'package.json')
- fs.stat(pkgtgz, function (er) {
- if (!er) {
- readJson(pkgjson, function (er, data) {
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
-
- if (data) {
- if (!data.name) return cb(new Error('No name provided'))
- if (!data.version) return cb(new Error('No version provided'))
-
- // check the SHA of the package we have, to ensure it wasn't installed
- // from somewhere other than the registry (eg, a fork)
- if (data._shasum && dist.shasum && data._shasum !== dist.shasum) {
- return fetchit()
- }
- }
-
- if (er) return fetchit()
- else return cb(null, data)
- })
- } else return fetchit()
- })
-
- function fetchit () {
- mapToRegistry(name, npm.config, function (er, _, auth, ruri) {
- if (er) return cb(er)
-
- // Use the same protocol as the registry. https registry --> https
- // tarballs, but only if they're the same hostname, or else detached
- // tarballs may not work.
- var tb = url.parse(dist.tarball)
- var rp = url.parse(ruri)
- if (tb.hostname === rp.hostname && tb.protocol !== rp.protocol) {
- tb.protocol = rp.protocol
- // If a different port is associated with the other protocol
- // we need to update that as well
- if (rp.port !== tb.port) {
- tb.port = rp.port
- delete tb.host
- }
- delete tb.href
- }
- tb = url.format(tb)
-
- // Only add non-shasum'ed packages if --forced. Only ancient things
- // would lack this for good reasons nowadays.
- if (!dist.shasum && !npm.config.get('force')) {
- return cb(new Error('package lacks shasum: ' + packageId(data)))
- }
-
- addRemoteTarball(tb, data, dist.shasum, auth, cb)
- })
- }
- }
-}
-
-function addNameRange (name, range, data, cb) {
- range = semver.validRange(range, true)
- if (range === null) {
- return cb(new Error(
- 'Invalid version range: ' + range
- ))
- }
-
- log.silly('addNameRange', { name: name, range: range, hasData: !!data })
-
- if (data) return next()
-
- getOnceFromRegistry(name, 'addNameRange', setData, cb)
-
- function setData (er, d, json, resp) {
- if (!er) {
- er = errorResponse(name, resp)
- }
- if (er) return cb(er)
- data = d
- next()
- }
-
- function next () {
- log.silly(
- 'addNameRange',
- 'number 2', { name: name, range: range, hasData: !!data }
- )
- engineFilter(data)
-
- log.silly('addNameRange', 'versions'
- , [data.name, Object.keys(data.versions || {})])
-
- var versions = Object.keys(data.versions).filter(function (v) { return semver.valid(v) })
- var picked = pickManifestFromRegistryMetadata(range, npm.config.get('tag'), versions, data)
- if (picked) return addNamed(name, picked.resolvedTo, picked.manifest, cb)
- return cb(installTargetsError(range, data))
- }
-}
-
-function installTargetsError (requested, data) {
- var targets = Object.keys(data['dist-tags']).filter(function (f) {
- return (data.versions || {}).hasOwnProperty(f)
- }).concat(Object.keys(data.versions || {}))
-
- requested = data.name + (requested ? "@'" + requested + "'" : '')
-
- targets = targets.length
- ? 'Valid install targets:\n' + targets.join(', ') + '\n'
- : 'No valid targets found.\n' +
- 'Perhaps not compatible with your version of node?'
-
- var er = new Error('No compatible version found: ' + requested + '\n' + targets)
- er.code = 'ETARGET'
- return er
-}
-
-function errorResponse (name, response) {
- var er
- if (response.statusCode >= 400) {
- er = new Error(http.STATUS_CODES[response.statusCode])
- er.statusCode = response.statusCode
- er.code = 'E' + er.statusCode
- er.pkgid = name
- }
- return er
-}
diff --git a/deps/npm/lib/cache/add-remote-git.js b/deps/npm/lib/cache/add-remote-git.js
deleted file mode 100644
index 0da1554f80..0000000000
--- a/deps/npm/lib/cache/add-remote-git.js
+++ /dev/null
@@ -1,496 +0,0 @@
-var assert = require('assert')
-var fs = require('graceful-fs')
-var path = require('path')
-var url = require('url')
-
-var chownr = require('chownr')
-var dezalgo = require('dezalgo')
-var hostedFromURL = require('hosted-git-info').fromUrl
-var inflight = require('inflight')
-var log = require('npmlog')
-var mkdir = require('mkdirp')
-var normalizeGitUrl = require('normalize-git-url')
-var npa = require('npm-package-arg')
-var realizePackageSpecifier = require('realize-package-specifier')
-var uniqueFilename = require('unique-filename')
-
-var addLocal = require('./add-local.js')
-var correctMkdir = require('../utils/correct-mkdir.js')
-var git = require('../utils/git.js')
-var npm = require('../npm.js')
-var rm = require('../utils/gently-rm.js')
-var tempFilename = require('../utils/temp-filename.js')
-
-var remotes = path.resolve(npm.config.get('cache'), '_git-remotes')
-var templates = path.join(remotes, '_templates')
-
-var VALID_VARIABLES = [
- 'GIT_ASKPASS',
- 'GIT_EXEC_PATH',
- 'GIT_PROXY_COMMAND',
- 'GIT_SSH',
- 'GIT_SSH_COMMAND',
- 'GIT_SSL_CAINFO',
- 'GIT_SSL_NO_VERIFY'
-]
-
-module.exports = addRemoteGit
-function addRemoteGit (uri, _cb) {
- assert(typeof uri === 'string', 'must have git URL')
- assert(typeof _cb === 'function', 'must have callback')
- var cb = dezalgo(_cb)
-
- log.verbose('addRemoteGit', 'caching', uri)
-
- // the URL comes in exactly as it was passed on the command line, or as
- // normalized by normalize-package-data / read-package-json / read-installed,
- // so figure out what to do with it using hosted-git-info
- var parsed = hostedFromURL(uri)
- if (parsed) {
- // normalize GitHub syntax to org/repo (for now)
- var from
- if (parsed.type === 'github' && parsed.getDefaultRepresentation() === 'shortcut') {
- from = parsed.path()
- } else {
- from = parsed.toString()
- }
-
- log.verbose('addRemoteGit', from, 'is a repository hosted by', parsed.type)
-
- // prefer explicit URLs to pushing everything through shortcuts
- if (parsed.getDefaultRepresentation() !== 'shortcut') {
- return tryClone(from, parsed.toString(), false, cb)
- }
-
- // try git:, then git+ssh:, then git+https: before failing
- tryGitProto(from, parsed, cb)
- } else {
- // verify that this is a Git URL before continuing
- parsed = npa(uri)
- if (parsed.type !== 'git') {
- return cb(new Error(uri + 'is not a Git or GitHub URL'))
- }
-
- tryClone(parsed.rawSpec, uri, false, cb)
- }
-}
-
-function tryGitProto (from, hostedInfo, cb) {
- var gitURL = hostedInfo.git()
- if (!gitURL) return tryHTTPS(from, hostedInfo, cb)
-
- log.silly('tryGitProto', 'attempting to clone', gitURL)
- tryClone(from, gitURL, true, function (er) {
- if (er) return tryHTTPS(from, hostedInfo, cb)
-
- cb.apply(this, arguments)
- })
-}
-
-function tryHTTPS (from, hostedInfo, cb) {
- var httpsURL = hostedInfo.https()
- if (!httpsURL) {
- return cb(new Error(from + ' can not be cloned via Git, SSH, or HTTPS'))
- }
-
- log.silly('tryHTTPS', 'attempting to clone', httpsURL)
- tryClone(from, httpsURL, true, function (er) {
- if (er) return trySSH(from, hostedInfo, cb)
-
- cb.apply(this, arguments)
- })
-}
-
-function trySSH (from, hostedInfo, cb) {
- var sshURL = hostedInfo.ssh()
- if (!sshURL) return tryHTTPS(from, hostedInfo, cb)
-
- log.silly('trySSH', 'attempting to clone', sshURL)
- tryClone(from, sshURL, false, cb)
-}
-
-function tryClone (from, combinedURL, silent, cb) {
- log.silly('tryClone', 'cloning', from, 'via', combinedURL)
-
- var normalized = normalizeGitUrl(combinedURL)
- var cloneURL = normalized.url
- var treeish = normalized.branch
-
- // ensure that similarly-named remotes don't collide
- var cachedRemote = uniqueFilename(remotes, combinedURL.replace(/[^a-zA-Z0-9]+/g, '-'), cloneURL)
- var repoID = path.relative(remotes, cachedRemote)
- cachedRemote = path.join(remotes, repoID)
-
- cb = inflight(repoID, cb)
- if (!cb) {
- return log.verbose('tryClone', repoID, 'already in flight; waiting')
- }
- log.verbose('tryClone', repoID, 'not in flight; caching')
-
- // initialize the remotes cache with the correct perms
- getGitDir(function (er) {
- if (er) return cb(er)
- fs.stat(cachedRemote, function (er, s) {
- if (er) return mirrorRemote(from, cloneURL, treeish, cachedRemote, silent, finish)
- if (!s.isDirectory()) return resetRemote(from, cloneURL, treeish, cachedRemote, finish)
-
- validateExistingRemote(from, cloneURL, treeish, cachedRemote, finish)
- })
-
- // always set permissions on the cached remote
- function finish (er, data) {
- if (er) return cb(er, data)
- addModeRecursive(cachedRemote, npm.modes.file, function (er) {
- return cb(er, data)
- })
- }
- })
-}
-
-// don't try too hard to hold on to a remote
-function resetRemote (from, cloneURL, treeish, cachedRemote, cb) {
- log.info('resetRemote', 'resetting', cachedRemote, 'for', from)
- rm(cachedRemote, function (er) {
- if (er) return cb(er)
- mirrorRemote(from, cloneURL, treeish, cachedRemote, false, cb)
- })
-}
-
-// reuse a cached remote when possible, but nuke it if it's in an
-// inconsistent state
-function validateExistingRemote (from, cloneURL, treeish, cachedRemote, cb) {
- git.whichAndExec(
- ['config', '--get', 'remote.origin.url'],
- { cwd: cachedRemote, env: gitEnv() },
- function (er, stdout, stderr) {
- var originURL
- if (stdout) {
- originURL = stdout.trim()
- log.silly('validateExistingRemote', from, 'remote.origin.url:', originURL)
- }
-
- if (stderr) stderr = stderr.trim()
- if (stderr || er) {
- log.warn('addRemoteGit', from, 'resetting remote', cachedRemote, 'because of error:', stderr || er)
- return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
- } else if (cloneURL !== originURL) {
- log.warn(
- 'addRemoteGit',
- from,
- 'pre-existing cached repo', cachedRemote, 'points to', originURL, 'and not', cloneURL
- )
- return resetRemote(from, cloneURL, treeish, cachedRemote, cb)
- }
-
- log.verbose('validateExistingRemote', from, 'is updating existing cached remote', cachedRemote)
- updateRemote(from, cloneURL, treeish, cachedRemote, cb)
- }
- )
-}
-
-// make a complete bare mirror of the remote repo
-// NOTE: npm uses a blank template directory to prevent weird inconsistencies
-// https://github.com/npm/npm/issues/5867
-function mirrorRemote (from, cloneURL, treeish, cachedRemote, silent, cb) {
- mkdir(cachedRemote, function (er) {
- if (er) return cb(er)
-
- var args = [
- 'clone',
- '--template=' + templates,
- '--mirror',
- cloneURL, cachedRemote
- ]
- git.whichAndExec(
- ['clone', '--template=' + templates, '--mirror', cloneURL, cachedRemote],
- { cwd: cachedRemote, env: gitEnv() },
- function (er, stdout, stderr) {
- if (er) {
- var combined = (stdout + '\n' + stderr).trim()
- var command = 'git ' + args.join(' ') + ':'
- if (silent) {
- log.verbose(command, combined)
- } else {
- log.error(command, combined)
- }
- return cb(er)
- }
- log.verbose('mirrorRemote', from, 'git clone ' + cloneURL, stdout.trim())
- setPermissions(from, cloneURL, treeish, cachedRemote, cb)
- }
- )
- })
-}
-
-function setPermissions (from, cloneURL, treeish, cachedRemote, cb) {
- if (process.platform === 'win32') {
- log.verbose('setPermissions', from, 'skipping chownr on Windows')
- resolveHead(from, cloneURL, treeish, cachedRemote, cb)
- } else {
- getGitDir(function (er, cs) {
- if (er) {
- log.error('setPermissions', from, 'could not get cache stat')
- return cb(er)
- }
-
- chownr(cachedRemote, cs.uid, cs.gid, function (er) {
- if (er) {
- log.error(
- 'setPermissions',
- 'Failed to change git repository ownership under npm cache for',
- cachedRemote
- )
- return cb(er)
- }
-
- log.verbose('setPermissions', from, 'set permissions on', cachedRemote)
- resolveHead(from, cloneURL, treeish, cachedRemote, cb)
- })
- })
- }
-}
-
-// always fetch the origin, even right after mirroring, because this way
-// permissions will get set correctly
-function updateRemote (from, cloneURL, treeish, cachedRemote, cb) {
- git.whichAndExec(
- ['fetch', '-a', 'origin'],
- { cwd: cachedRemote, env: gitEnv() },
- function (er, stdout, stderr) {
- if (er) {
- var combined = (stdout + '\n' + stderr).trim()
- log.error('git fetch -a origin (' + cloneURL + ')', combined)
- return cb(er)
- }
- log.verbose('updateRemote', 'git fetch -a origin (' + cloneURL + ')', stdout.trim())
-
- setPermissions(from, cloneURL, treeish, cachedRemote, cb)
- }
- )
-}
-
-// branches and tags are both symbolic labels that can be attached to different
-// commits, so resolve the commit-ish to the current actual treeish the label
-// corresponds to
-//
-// important for shrinkwrap
-function resolveHead (from, cloneURL, treeish, cachedRemote, cb) {
- log.verbose('resolveHead', from, 'original treeish:', treeish)
- var args = ['rev-list', '-n1', treeish]
- git.whichAndExec(
- args,
- { cwd: cachedRemote, env: gitEnv() },
- function (er, stdout, stderr) {
- if (er) {
- log.error('git ' + args.join(' ') + ':', stderr)
- return cb(er)
- }
-
- var resolvedTreeish = stdout.trim()
- log.silly('resolveHead', from, 'resolved treeish:', resolvedTreeish)
-
- var resolvedURL = getResolved(cloneURL, resolvedTreeish)
- if (!resolvedURL) {
- return cb(new Error(
- 'unable to clone ' + from + ' because git clone string ' +
- cloneURL + ' is in a form npm can\'t handle'
- ))
- }
- log.verbose('resolveHead', from, 'resolved Git URL:', resolvedURL)
-
- // generate a unique filename
- var tmpdir = path.join(tempFilename('git-cache'), resolvedTreeish)
- log.silly('resolveHead', 'Git working directory:', tmpdir)
-
- mkdir(tmpdir, function (er) {
- if (er) return cb(er)
-
- cloneResolved(from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb)
- })
- }
- )
-}
-
-// make a clone from the mirrored cache so we have a temporary directory in
-// which we can check out the resolved treeish
-function cloneResolved (from, resolvedURL, resolvedTreeish, cachedRemote, tmpdir, cb) {
- var args = ['clone', cachedRemote, tmpdir]
- git.whichAndExec(
- args,
- { cwd: cachedRemote, env: gitEnv() },
- function (er, stdout, stderr) {
- stdout = (stdout + '\n' + stderr).trim()
- if (er) {
- log.error('git ' + args.join(' ') + ':', stderr)
- return cb(er)
- }
- log.verbose('cloneResolved', from, 'clone', stdout)
-
- checkoutTreeish(from, resolvedURL, resolvedTreeish, tmpdir, cb)
- }
- )
-}
-
-// there is no safe way to do a one-step clone to a treeish that isn't
-// guaranteed to be a branch, so explicitly check out the treeish once it's
-// cloned
-function checkoutTreeish (from, resolvedURL, resolvedTreeish, tmpdir, cb) {
- var args = ['checkout', resolvedTreeish]
- git.whichAndExec(
- args,
- { cwd: tmpdir, env: gitEnv() },
- function (er, stdout, stderr) {
- stdout = (stdout + '\n' + stderr).trim()
- if (er) {
- log.error('git ' + args.join(' ') + ':', stderr)
- return cb(er)
- }
- log.verbose('checkoutTreeish', from, 'checkout', stdout)
-
- updateSubmodules(from, resolvedURL, tmpdir, cb)
- }
- )
-}
-
-function updateSubmodules (from, resolvedURL, tmpdir, cb) {
- var args = ['submodule', '-q', 'update', '--init', '--recursive']
- git.whichAndExec(
- args,
- { cwd: tmpdir, env: gitEnv() },
- function (er, stdout, stderr) {
- stdout = (stdout + '\n' + stderr).trim()
- if (er) {
- log.error('git ' + args.join(' ') + ':', stderr)
- return cb(er)
- }
- log.verbose('updateSubmodules', from, 'submodule update', stdout)
-
- // convince addLocal that the checkout is a local dependency
- realizePackageSpecifier(tmpdir, function (er, spec) {
- if (er) {
- log.error('addRemoteGit', 'Failed to map', tmpdir, 'to a package specifier')
- return cb(er)
- }
-
- // ensure pack logic is applied
- // https://github.com/npm/npm/issues/6400
- addLocal(spec, null, function (er, data) {
- if (data) {
- if (npm.config.get('save-exact')) {
- log.verbose('addRemoteGit', 'data._from:', resolvedURL, '(save-exact)')
- data._from = resolvedURL
- } else {
- log.verbose('addRemoteGit', 'data._from:', from)
- data._from = from
- }
-
- log.verbose('addRemoteGit', 'data._resolved:', resolvedURL)
- data._resolved = resolvedURL
- }
-
- cb(er, data)
- })
- })
- }
- )
-}
-
-function getGitDir (cb) {
- correctMkdir(remotes, function (er, stats) {
- if (er) return cb(er)
-
- // We don't need global templates when cloning. Use an empty directory for
- // the templates, creating it (and setting its permissions) if necessary.
- mkdir(templates, function (er) {
- if (er) return cb(er)
-
- // Ensure that both the template and remotes directories have the correct
- // permissions.
- fs.chown(templates, stats.uid, stats.gid, function (er) {
- cb(er, stats)
- })
- })
- })
-}
-
-var gitEnv_
-function gitEnv () {
- // git responds to env vars in some weird ways in post-receive hooks
- // so don't carry those along.
- if (gitEnv_) return gitEnv_
-
- // allow users to override npm's insistence on not prompting for
- // passphrases, but default to just failing when credentials
- // aren't available
- gitEnv_ = { GIT_ASKPASS: 'echo' }
-
- for (var k in process.env) {
- if (!~VALID_VARIABLES.indexOf(k) && k.match(/^GIT/)) continue
- gitEnv_[k] = process.env[k]
- }
- return gitEnv_
-}
-
-addRemoteGit.getResolved = getResolved
-function getResolved (uri, treeish) {
- // normalize hosted-git-info clone URLs back into regular URLs
- // this will only work on URLs that hosted-git-info recognizes
- // https://github.com/npm/npm/issues/7961
- var rehydrated = hostedFromURL(uri)
- if (rehydrated) uri = rehydrated.toString()
-
- var parsed = url.parse(uri)
-
- // Checks for known protocols:
- // http:, https:, ssh:, and git:, with optional git+ prefix.
- if (!parsed.protocol ||
- !parsed.protocol.match(/^(((git\+)?(https?|ssh|file))|git|file):$/)) {
- uri = 'git+ssh://' + uri
- }
-
- if (!/^git[+:]/.test(uri)) {
- uri = 'git+' + uri
- }
-
- // Not all URIs are actually URIs, so use regex for the treeish.
- return uri.replace(/(?:#.*)?$/, '#' + treeish)
-}
-
-// similar to chmodr except it add permissions rather than overwriting them
-// adapted from https://github.com/isaacs/chmodr/blob/master/chmodr.js
-function addModeRecursive (cachedRemote, mode, cb) {
- fs.readdir(cachedRemote, function (er, children) {
- // Any error other than ENOTDIR means it's not readable, or doesn't exist.
- // Give up.
- if (er && er.code !== 'ENOTDIR') return cb(er)
- if (er || !children.length) return addMode(cachedRemote, mode, cb)
-
- var len = children.length
- var errState = null
- children.forEach(function (child) {
- addModeRecursive(path.resolve(cachedRemote, child), mode, then)
- })
-
- function then (er) {
- if (errState) return undefined
- if (er) return cb(errState = er)
- if (--len === 0) return addMode(cachedRemote, dirMode(mode), cb)
- }
- })
-}
-
-function addMode (cachedRemote, mode, cb) {
- fs.stat(cachedRemote, function (er, stats) {
- if (er) return cb(er)
- mode = stats.mode | mode
- fs.chmod(cachedRemote, mode, cb)
- })
-}
-
-// taken from https://github.com/isaacs/chmodr/blob/master/chmodr.js
-function dirMode (mode) {
- if (mode & parseInt('0400', 8)) mode |= parseInt('0100', 8)
- if (mode & parseInt('040', 8)) mode |= parseInt('010', 8)
- if (mode & parseInt('04', 8)) mode |= parseInt('01', 8)
- return mode
-}
diff --git a/deps/npm/lib/cache/add-remote-tarball.js b/deps/npm/lib/cache/add-remote-tarball.js
deleted file mode 100644
index 90296c111f..0000000000
--- a/deps/npm/lib/cache/add-remote-tarball.js
+++ /dev/null
@@ -1,132 +0,0 @@
-var mkdir = require('mkdirp')
-var assert = require('assert')
-var log = require('npmlog')
-var path = require('path')
-var sha = require('sha')
-var retry = require('retry')
-var writeStreamAtomic = require('fs-write-stream-atomic')
-var PassThrough = require('readable-stream').PassThrough
-var npm = require('../npm.js')
-var inflight = require('inflight')
-var addLocalTarball = require('./add-local-tarball.js')
-var cacheFile = require('npm-cache-filename')
-var rimraf = require('rimraf')
-var pulseTillDone = require('../utils/pulse-till-done.js')
-
-module.exports = addRemoteTarball
-
-function addRemoteTarball (u, pkgData, shasum, auth, cb_) {
- assert(typeof u === 'string', 'must have module URL')
- assert(typeof cb_ === 'function', 'must have callback')
-
- function cb (er, data) {
- if (data) {
- data._from = u
- data._resolved = u
- data._shasum = data._shasum || shasum
- }
- cb_(er, data)
- }
-
- cb_ = inflight(u, cb_)
- if (!cb_) return log.verbose('addRemoteTarball', u, 'already in flight; waiting')
- log.verbose('addRemoteTarball', u, 'not in flight; adding')
-
- // XXX Fetch direct to cache location, store tarballs under
- // ${cache}/registry.npmjs.org/pkg/-/pkg-1.2.3.tgz
- var tmp = cacheFile(npm.tmp, u)
-
- function next (er, resp, shasum) {
- if (er) return cb(er)
- addLocalTarball(tmp, pkgData, shasum, cleanup)
- }
- function cleanup (er, data) {
- if (er) return cb(er)
- rimraf(tmp, function () {
- cb(er, data)
- })
- }
-
- log.verbose('addRemoteTarball', [u, shasum])
- mkdir(path.dirname(tmp), function (er) {
- if (er) return cb(er)
- addRemoteTarball_(u, tmp, shasum, auth, next)
- })
-}
-
-function addRemoteTarball_ (u, tmp, shasum, auth, cb) {
- // Tuned to spread 3 attempts over about a minute.
- // See formula at <https://github.com/tim-kos/node-retry>.
- var operation = retry.operation({
- retries: npm.config.get('fetch-retries'),
- factor: npm.config.get('fetch-retry-factor'),
- minTimeout: npm.config.get('fetch-retry-mintimeout'),
- maxTimeout: npm.config.get('fetch-retry-maxtimeout')
- })
-
- operation.attempt(function (currentAttempt) {
- log.info(
- 'retry',
- 'fetch attempt', currentAttempt,
- 'at', (new Date()).toLocaleTimeString()
- )
- fetchAndShaCheck(u, tmp, shasum, auth, function (er, response, shasum) {
- // Only retry on 408, 5xx or no `response`.
- var sc = response && response.statusCode
- var statusRetry = !sc || (sc === 408 || sc >= 500)
- if (er && statusRetry && operation.retry(er)) {
- log.warn('retry', 'will retry, error on last attempt: ' + er)
- return
- }
- cb(er, response, shasum)
- })
- })
-}
-
-function fetchAndShaCheck (u, tmp, shasum, auth, cb) {
- cb = pulseTillDone('fetchTarball', cb)
- npm.registry.fetch(u, { auth: auth }, function (er, response) {
- if (er) {
- log.error('fetch failed', u)
- return cb(er, response)
- }
-
- var tarball = writeStreamAtomic(tmp, { mode: npm.modes.file })
- tarball.on('error', function (er) {
- cb(er)
- tarball.destroy()
- })
-
- tarball.on('finish', function () {
- if (!shasum) {
- // Well, we weren't given a shasum, so at least sha what we have
- // in case we want to compare it to something else later
- return sha.get(tmp, function (er, shasum) {
- log.silly('fetchAndShaCheck', 'shasum', shasum)
- cb(er, response, shasum)
- })
- }
-
- // validate that the url we just downloaded matches the expected shasum.
- log.silly('fetchAndShaCheck', 'shasum', shasum)
- sha.check(tmp, shasum, function (er) {
- if (er && er.message) {
- // add original filename for better debuggability
- er.message = er.message + '\n' + 'From: ' + u
- }
- return cb(er, response, shasum)
- })
- })
-
- // 0.8 http streams have a bug, where if they're paused with data in
- // their buffers when the socket closes, they call `end` before emptying
- // those buffers, which results in the entire pipeline ending and thus
- // the point that applied backpressure never being able to trigger a
- // `resume`.
- // We work around this by piping into a pass through stream that has
- // unlimited buffering. The pass through stream is from readable-stream
- // and is thus a current streams3 implementation that is free of these
- // bugs even on 0.8.
- response.pipe(PassThrough({highWaterMark: Infinity})).pipe(tarball)
- })
-}
diff --git a/deps/npm/lib/cache/cached-package-root.js b/deps/npm/lib/cache/cached-package-root.js
deleted file mode 100644
index b47fac6c9e..0000000000
--- a/deps/npm/lib/cache/cached-package-root.js
+++ /dev/null
@@ -1,14 +0,0 @@
-var assert = require('assert')
-var resolve = require('path').resolve
-
-var npm = require('../npm.js')
-
-module.exports = getCacheRoot
-
-function getCacheRoot (data) {
- assert(data, 'must pass package metadata')
- assert(data.name, 'package metadata must include name')
- assert(data.version, 'package metadata must include version')
-
- return resolve(npm.cache, data.name, data.version)
-}
diff --git a/deps/npm/lib/cache/caching-client.js b/deps/npm/lib/cache/caching-client.js
deleted file mode 100644
index 61fb83b311..0000000000
--- a/deps/npm/lib/cache/caching-client.js
+++ /dev/null
@@ -1,220 +0,0 @@
-module.exports = CachingRegistryClient
-
-var path = require('path')
-var fs = require('graceful-fs')
-var url = require('url')
-var assert = require('assert')
-var inherits = require('util').inherits
-
-var RegistryClient = require('npm-registry-client')
-var npm = require('../npm.js')
-var log = require('npmlog')
-var getCacheStat = require('./get-stat.js')
-var cacheFile = require('npm-cache-filename')
-var mkdirp = require('mkdirp')
-var rimraf = require('rimraf')
-var chownr = require('chownr')
-var writeFile = require('write-file-atomic')
-var parseJSON = require('../utils/parse-json')
-
-function CachingRegistryClient (config) {
- RegistryClient.call(this, adaptConfig(config))
-
- this._mapToCache = cacheFile(config.get('cache'))
-
- // swizzle in our custom cache invalidation logic
- this._request = this.request
- this.request = this._invalidatingRequest
- this.get = get
-}
-inherits(CachingRegistryClient, RegistryClient)
-
-CachingRegistryClient.prototype._invalidatingRequest = function (uri, params, cb) {
- var client = this
- this._request(uri, params, function () {
- var args = arguments
-
- var method = params.method
- if (method !== 'HEAD' && method !== 'GET') {
- var invalidated = client._mapToCache(uri)
- // invalidate cache
- //
- // This is irrelevant for commands that do etag / last-modified caching,
- // but ls and view also have a timed cache, so this keeps the user from
- // thinking that it didn't work when it did.
- // Note that failure is an acceptable option here, since the only
- // result will be a stale cache for some helper commands.
- log.verbose('request', 'invalidating', invalidated, 'on', method)
- return rimraf(invalidated, function () {
- cb.apply(undefined, args)
- })
- }
-
- cb.apply(undefined, args)
- })
-}
-
-function get (uri, params, cb) {
- assert(typeof uri === 'string', 'must pass registry URI to get')
- assert(params && typeof params === 'object', 'must pass params to get')
- assert(typeof cb === 'function', 'must pass callback to get')
-
- var parsed = url.parse(uri)
- assert(
- parsed.protocol === 'http:' || parsed.protocol === 'https:',
- 'must have a URL that starts with http: or https:'
- )
-
- var cacheBase = cacheFile(npm.config.get('cache'))(uri)
- var cachePath = path.join(cacheBase, '.cache.json')
-
- // If the GET is part of a write operation (PUT or DELETE), then
- // skip past the cache entirely, but still save the results.
- if (uri.match(/\?write=true$/)) {
- log.verbose('get', 'GET as part of write; not caching result')
- return get_.call(this, uri, cachePath, params, cb)
- }
-
- if (params.skipCache) {
- return get_.call(this, uri, cachePath, params, cb)
- }
-
- var client = this
- fs.stat(cachePath, function (er, stat) {
- if (!er) {
- fs.readFile(cachePath, function (er, data) {
- data = parseJSON.noExceptions(data)
-
- params.stat = stat
- params.data = data
-
- get_.call(client, uri, cachePath, params, cb)
- })
- } else {
- get_.call(client, uri, cachePath, params, cb)
- }
- })
-}
-
-function get_ (uri, cachePath, params, cb) {
- var staleOk = params.staleOk === undefined ? false : params.staleOk
- var timeout = params.timeout === undefined ? -1 : params.timeout
- var data = params.data
- var stat = params.stat
- var etag
- var lastModified
-
- timeout = Math.min(timeout, npm.config.get('cache-max') || 0)
- timeout = Math.max(timeout, npm.config.get('cache-min') || -Infinity)
- if (process.env.COMP_CWORD !== undefined &&
- process.env.COMP_LINE !== undefined &&
- process.env.COMP_POINT !== undefined) {
- timeout = Math.max(timeout, 60000)
- }
-
- if (data) {
- if (data._etag) etag = data._etag
- if (data._lastModified) lastModified = data._lastModified
-
- data._cached = true
-
- if (stat && timeout && timeout > 0) {
- if ((Date.now() - stat.mtime.getTime()) / 1000 < timeout) {
- log.verbose('get', uri, 'not expired, no request')
- delete data._etag
- delete data._lastModified
- return cb(null, data, JSON.stringify(data), { statusCode: 304 })
- }
-
- if (staleOk) {
- log.verbose('get', uri, 'staleOk, background update')
- delete data._etag
- delete data._lastModified
- process.nextTick(
- cb.bind(null, null, data, JSON.stringify(data), { statusCode: 304 })
- )
- cb = function () {}
- }
- }
- }
-
- var options = {
- etag: etag,
- lastModified: lastModified,
- follow: params.follow,
- auth: params.auth
- }
- this.request(uri, options, function (er, remoteData, raw, response) {
- // if we get an error talking to the registry, but we have it
- // from the cache, then just pretend we got it.
- if (er && cachePath && data && !data.error) {
- er = null
- response = { statusCode: 304 }
- }
-
- if (response) {
- log.silly('get', 'cb', [response.statusCode, response.headers])
- if (response.statusCode === 304 && (etag || lastModified)) {
- remoteData = data
- log.verbose(etag ? 'etag' : 'lastModified', uri + ' from cache')
- }
- }
-
- data = remoteData
- if (!data) er = er || new Error('failed to fetch from registry: ' + uri)
-
- if (er) return cb(er, data, raw, response)
-
- saveToCache(cachePath, data, saved)
-
- // just give the write the old college try. if it fails, whatever.
- function saved () {
- delete data._etag
- delete data._lastModified
- cb(er, data, raw, response)
- }
-
- function saveToCache (cachePath, data, saved) {
- log.verbose('get', 'saving', data.name, 'to', cachePath)
- getCacheStat(function (er, st) {
- mkdirp(path.dirname(cachePath), function (er, made) {
- if (er) return saved()
-
- writeFile(cachePath, JSON.stringify(data), function (er) {
- if (er) return saved()
-
- chownr(made || cachePath, st.uid, st.gid, saved)
- })
- })
- })
- }
- })
-}
-
-function adaptConfig (config) {
- return {
- proxy: {
- http: config.get('proxy'),
- https: config.get('https-proxy'),
- localAddress: config.get('local-address')
- },
- ssl: {
- certificate: config.get('cert'),
- key: config.get('key'),
- ca: config.get('ca'),
- strict: config.get('strict-ssl')
- },
- retry: {
- retries: config.get('fetch-retries'),
- factor: config.get('fetch-retry-factor'),
- minTimeout: config.get('fetch-retry-mintimeout'),
- maxTimeout: config.get('fetch-retry-maxtimeout')
- },
- userAgent: config.get('user-agent'),
- log: log,
- defaultTag: config.get('tag'),
- couchToken: config.get('_token'),
- maxSockets: config.get('maxsockets'),
- scope: npm.projectScope
- }
-}
diff --git a/deps/npm/lib/cache/get-stat.js b/deps/npm/lib/cache/get-stat.js
deleted file mode 100644
index 6ea797a4da..0000000000
--- a/deps/npm/lib/cache/get-stat.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var npm = require('../npm.js')
-var correctMkdir = require('../utils/correct-mkdir.js')
-
-module.exports = function getCacheStat (cb) {
- correctMkdir(npm.cache, cb)
-}
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index 96499d1718..517d82ae1e 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -106,6 +106,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
defaults = {
access: null,
+ 'allow-same-version': false,
'always-auth': false,
also: null,
'auth-type': 'legacy',
@@ -152,6 +153,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'ham-it-up': false,
heading: 'npm',
'if-present': false,
+ 'ignore-prepublish': false,
'ignore-scripts': false,
'init-module': path.resolve(home, '.npm-init.js'),
'init-author-name': '',
@@ -164,7 +166,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'legacy-bundling': false,
link: false,
'local-address': undefined,
- loglevel: 'warn',
+ loglevel: 'notice',
logstream: process.stderr,
'logs-max': 10,
long: false,
@@ -172,10 +174,13 @@ Object.defineProperty(exports, 'defaults', {get: function () {
message: '%s',
'metrics-registry': null,
'node-version': process.version,
+ 'offline': false,
'onload-script': false,
only: null,
optional: true,
parseable: false,
+ 'prefer-offline': false,
+ 'prefer-online': false,
prefix: globalPrefix,
production: process.env.NODE_ENV === 'production',
'progress': !process.env.TRAVIS && !process.env.CI,
@@ -189,7 +194,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'rebuild-bundle': true,
registry: 'https://registry.npmjs.org/',
rollback: true,
- save: false,
+ save: true,
'save-bundle': false,
'save-dev': false,
'save-exact': false,
@@ -210,6 +215,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'strict-ssl': true,
tag: 'latest',
'tag-version-prefix': 'v',
+ timing: false,
tmp: temp,
unicode: hasUnicode(),
'unsafe-perm': process.platform === 'win32' ||
@@ -233,6 +239,7 @@ Object.defineProperty(exports, 'defaults', {get: function () {
exports.types = {
access: [null, 'restricted', 'public'],
+ 'allow-same-version': Boolean,
'always-auth': Boolean,
also: [null, 'dev', 'development'],
'auth-type': ['legacy', 'sso', 'saml', 'oauth'],
@@ -270,6 +277,7 @@ exports.types = {
'ham-it-up': Boolean,
'heading': String,
'if-present': Boolean,
+ 'ignore-prepublish': Boolean,
'ignore-scripts': Boolean,
'init-module': path,
'init-author-name': String,
@@ -284,7 +292,7 @@ exports.types = {
// local-address must be listed as an IP for a local network interface
// must be IPv4 due to node bug
'local-address': getLocalAddresses(),
- loglevel: ['silent', 'error', 'warn', 'http', 'info', 'verbose', 'silly'],
+ loglevel: ['silent', 'error', 'warn', 'notice', 'http', 'timing', 'info', 'verbose', 'silly'],
logstream: Stream,
'logs-max': Number,
long: Boolean,
@@ -292,10 +300,13 @@ exports.types = {
message: String,
'metrics-registry': [null, String],
'node-version': [null, semver],
+ offline: Boolean,
'onload-script': [null, String],
only: [null, 'dev', 'development', 'prod', 'production'],
optional: Boolean,
parseable: Boolean,
+ 'prefer-offline': Boolean,
+ 'prefer-online': Boolean,
prefix: path,
production: Boolean,
progress: Boolean,
@@ -324,6 +335,7 @@ exports.types = {
'sso-type': [null, 'oauth', 'saml'],
'strict-ssl': Boolean,
tag: String,
+ timing: Boolean,
tmp: path,
unicode: Boolean,
'unsafe-perm': Boolean,
diff --git a/deps/npm/lib/config/pacote.js b/deps/npm/lib/config/pacote.js
new file mode 100644
index 0000000000..13b7b53f52
--- /dev/null
+++ b/deps/npm/lib/config/pacote.js
@@ -0,0 +1,175 @@
+'use strict'
+
+const BB = require('bluebird')
+
+const cp = require('child_process')
+const npm = require('../npm')
+const log = require('npmlog')
+const packToStream = require('../utils/tar').packToStream
+const path = require('path')
+const pipe = BB.promisify(require('mississippi').pipe)
+const readJson = BB.promisify(require('read-package-json'))
+const PassThrough = require('stream').PassThrough
+
+let effectiveOwner
+
+module.exports = pacoteOpts
+function pacoteOpts (moreOpts) {
+ const ownerStats = calculateOwner()
+ const opts = {
+ cache: path.join(npm.config.get('cache'), '_cacache'),
+ defaultTag: npm.config.get('tag'),
+ dirPacker: prepareAndPack,
+ hashAlgorithm: 'sha1',
+ localAddress: npm.config.get('local-address'),
+ log: log,
+ maxAge: npm.config.get('cache-min'),
+ maxSockets: npm.config.get('maxsockets'),
+ offline: npm.config.get('offline'),
+ preferOffline: npm.config.get('prefer-offline') || npm.config.get('cache-min') > 9999,
+ preferOnline: npm.config.get('prefer-online') || npm.config.get('cache-max') <= 0,
+ projectScope: npm.projectScope,
+ proxy: npm.config.get('https-proxy') || npm.config.get('proxy'),
+ refer: npm.registry.refer,
+ registry: npm.config.get('registry'),
+ retry: {
+ retries: npm.config.get('fetch-retries'),
+ factor: npm.config.get('fetch-retry-factor'),
+ minTimeout: npm.config.get('fetch-retry-mintimeout'),
+ maxTimeout: npm.config.get('fetch-retry-maxtimeout')
+ },
+ scope: npm.config.get('scope'),
+ strictSSL: npm.config.get('strict-ssl'),
+ userAgent: npm.config.get('user-agent')
+ }
+
+ if (ownerStats.uid || ownerStats.gid) {
+ Object.assign(opts, ownerStats, {
+ cacheUid: ownerStats.uid,
+ cacheGid: ownerStats.gid
+ })
+ }
+
+ npm.config.keys.forEach(function (k) {
+ if (k[0] === '/' && k.match(/.*:_authToken$/)) {
+ if (!opts.auth) { opts.auth = {} }
+ opts.auth[k.replace(/:_authToken$/, '')] = {
+ token: npm.config.get(k)
+ }
+ }
+ if (k[0] === '@') {
+ if (!opts.scopeTargets) { opts.scopeTargets = {} }
+ opts.scopeTargets[k.replace(/:registry$/, '')] = npm.config.get(k)
+ }
+ })
+
+ Object.keys(moreOpts || {}).forEach((k) => {
+ opts[k] = moreOpts[k]
+ })
+
+ return opts
+}
+
+function calculateOwner () {
+ if (!effectiveOwner) {
+ effectiveOwner = { uid: 0, gid: 0 }
+
+ // Pretty much only on windows
+ if (!process.getuid) {
+ return effectiveOwner
+ }
+
+ effectiveOwner.uid = +process.getuid()
+ effectiveOwner.gid = +process.getgid()
+
+ if (effectiveOwner.uid === 0) {
+ if (process.env.SUDO_UID) effectiveOwner.uid = +process.env.SUDO_UID
+ if (process.env.SUDO_GID) effectiveOwner.gid = +process.env.SUDO_GID
+ }
+ }
+
+ return effectiveOwner
+}
+
+const PASSTHROUGH_OPTS = [
+ 'always-auth',
+ 'auth-type',
+ 'ca',
+ 'cafile',
+ 'cert',
+ 'git',
+ 'local-address',
+ 'maxsockets',
+ 'offline',
+ 'prefer-offline',
+ 'prefer-online',
+ 'proxy',
+ 'https-proxy',
+ 'registry',
+ 'send-metrics',
+ 'sso-poll-frequency',
+ 'sso-type',
+ 'strict-ssl'
+]
+
+function prepareAndPack (manifest, dir) {
+ const stream = new PassThrough()
+ readJson(path.join(dir, 'package.json')).then((pkg) => {
+ if (pkg.scripts && pkg.scripts.prepare) {
+ log.verbose('prepareGitDep', `${manifest._spec}: installing devDeps and running prepare script.`)
+ const cliArgs = PASSTHROUGH_OPTS.reduce((acc, opt) => {
+ if (npm.config.get(opt, 'cli') != null) {
+ acc.push(`--${opt}=${npm.config.get(opt)}`)
+ }
+ return acc
+ }, [])
+ const child = cp.spawn(process.env.NODE || process.execPath, [
+ require.main.filename,
+ 'install',
+ '--ignore-prepublish',
+ '--no-progress',
+ '--no-save'
+ ].concat(cliArgs), {
+ cwd: dir,
+ env: process.env
+ })
+ let errData = []
+ let errDataLen = 0
+ let outData = []
+ let outDataLen = 0
+ child.stdout.on('data', (data) => {
+ outData.push(data)
+ outDataLen += data.length
+ log.gauge.pulse('preparing git package')
+ })
+ child.stderr.on('data', (data) => {
+ errData.push(data)
+ errDataLen += data.length
+ log.gauge.pulse('preparing git package')
+ })
+ return BB.fromNode((cb) => {
+ child.on('error', cb)
+ child.on('exit', (code, signal) => {
+ if (code > 0) {
+ const err = new Error(`${signal}: npm exited with code ${code} while attempting to build ${manifest._requested}. Clone the repository manually and run 'npm install' in it for more information.`)
+ err.code = code
+ err.signal = signal
+ cb(err)
+ } else {
+ cb()
+ }
+ })
+ }).then(() => {
+ if (outDataLen > 0) log.silly('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString())
+ if (errDataLen > 0) log.silly('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString())
+ }, (err) => {
+ if (outDataLen > 0) log.error('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString())
+ if (errDataLen > 0) log.error('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString())
+ throw err
+ })
+ }
+ }).then(() => {
+ return pipe(packToStream(manifest, dir), stream)
+ }).catch((err) => stream.emit('error', err))
+ return stream
+}
diff --git a/deps/npm/lib/dedupe.js b/deps/npm/lib/dedupe.js
index d1bd374e9b..ded309c21a 100644
--- a/deps/npm/lib/dedupe.js
+++ b/deps/npm/lib/dedupe.js
@@ -13,13 +13,13 @@ var earliestInstallable = require('./install/deps.js').earliestInstallable
var checkPermissions = require('./install/check-permissions.js')
var decomposeActions = require('./install/decompose-actions.js')
var loadExtraneous = require('./install/deps.js').loadExtraneous
-var filterInvalidActions = require('./install/filter-invalid-actions.js')
-var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var computeMetadata = require('./install/deps.js').computeMetadata
var sortActions = require('./install/diff-trees.js').sortActions
var moduleName = require('./utils/module-name.js')
var packageId = require('./utils/package-id.js')
var childPath = require('./utils/child-path.js')
var usage = require('./utils/usage')
+var getRequested = require('./install/get-requested.js')
module.exports = dedupe
module.exports.Deduper = Deduper
@@ -36,6 +36,7 @@ function dedupe (args, cb) {
var dryrun = false
if (npm.command.match(/^find/)) dryrun = true
if (npm.config.get('dry-run')) dryrun = true
+ if (dryrun && !npm.config.get('json')) npm.config.set('parseable', true)
new Deduper(where, dryrun).run(cb)
}
@@ -64,10 +65,16 @@ Deduper.prototype.loadIdealTree = function (cb) {
} ],
[this, this.finishTracker, 'loadAllDepsIntoIdealTree'],
- [this, function (next) { recalculateMetadata(this.idealTree, log, next) }]
+ [this, andComputeMetadata(this.idealTree)]
], cb)
}
+function andComputeMetadata (tree) {
+ return function (next) {
+ next(null, computeMetadata(tree))
+ }
+}
+
Deduper.prototype.generateActionsToTake = function (cb) {
validate('F', arguments)
log.silly('dedupe', 'generateActionsToTake')
@@ -81,7 +88,6 @@ Deduper.prototype.generateActionsToTake = function (cb) {
next()
}],
[this, this.finishTracker, 'sort-actions'],
- [filterInvalidActions, this.where, this.differences],
[checkPermissions, this.differences],
[decomposeActions, this.differences, this.todo]
], cb)
@@ -129,18 +135,18 @@ function hoistChildren_ (tree, diff, seen, next) {
seen[tree.path] = true
asyncMap(tree.children, function (child, done) {
if (!tree.parent) return hoistChildren_(child, diff, seen, done)
- var better = findRequirement(tree.parent, moduleName(child), child.package._requested || npa(packageId(child)))
+ var better = findRequirement(tree.parent, moduleName(child), getRequested(child) || npa(packageId(child)))
if (better) {
return chain([
[remove, child, diff],
- [recalculateMetadata, tree, log]
+ [andComputeMetadata(tree)]
], done)
}
var hoistTo = earliestInstallable(tree, tree.parent, child.package)
if (hoistTo) {
move(child, hoistTo, diff)
chain([
- [recalculateMetadata, hoistTo, log],
+ [andComputeMetadata(hoistTo)],
[hoistChildren_, child, diff, seen],
[ function (next) {
moveRemainingChildren(child, diff)
diff --git a/deps/npm/lib/deprecate.js b/deps/npm/lib/deprecate.js
index dff61973ed..15ae58e014 100644
--- a/deps/npm/lib/deprecate.js
+++ b/deps/npm/lib/deprecate.js
@@ -39,13 +39,13 @@ function deprecate (args, cb) {
// npa makes the default spec "latest", but for deprecation
// "*" is the appropriate default.
- if (p.rawSpec === '') p.spec = '*'
+ var spec = p.rawSpec === '' ? '*' : p.fetchSpec
mapToRegistry(p.name, npm.config, function (er, uri, auth) {
if (er) return cb(er)
var params = {
- version: p.spec,
+ version: spec,
message: msg,
auth: auth
}
diff --git a/deps/npm/lib/doctor.js b/deps/npm/lib/doctor.js
index 18453491e4..357ba91a5a 100644
--- a/deps/npm/lib/doctor.js
+++ b/deps/npm/lib/doctor.js
@@ -1,22 +1,27 @@
-var path = require('path')
-var chain = require('slide').chain
-var table = require('text-table')
-var color = require('ansicolors')
-var styles = require('ansistyles')
-var semver = require('semver')
-var npm = require('./npm.js')
-var log = require('npmlog')
-var ansiTrim = require('./utils/ansi-trim.js')
-var output = require('./utils/output.js')
-var defaultRegistry = require('./config/defaults.js').defaults.registry
-var checkPing = require('./doctor/check-ping.js')
-var getGitPath = require('./doctor/get-git-path.js')
-var checksumCachedFiles = require('./doctor/checksum-cached-files.js')
-var checkFilesPermission = require('./doctor/check-files-permission.js')
-var getLatestNodejsVersion = require('./doctor/get-latest-nodejs-version.js')
-var getLatestNpmVersion = require('./doctor/get-latest-npm-version')
-var globalNodeModules = path.join(npm.config.globalPrefix, 'lib', 'node_modules')
-var localNodeModules = path.join(npm.config.localPrefix, 'node_modules')
+'use strict'
+
+const ansiTrim = require('./utils/ansi-trim')
+const chain = require('slide').chain
+const color = require('ansicolors')
+const defaultRegistry = require('./config/defaults').defaults.registry
+const log = require('npmlog')
+const npm = require('./npm')
+const output = require('./utils/output')
+const path = require('path')
+const semver = require('semver')
+const styles = require('ansistyles')
+const table = require('text-table')
+
+// steps
+const checkFilesPermission = require('./doctor/check-files-permission')
+const checkPing = require('./doctor/check-ping')
+const getGitPath = require('./doctor/get-git-path')
+const getLatestNodejsVersion = require('./doctor/get-latest-nodejs-version')
+const getLatestNpmVersion = require('./doctor/get-latest-npm-version')
+const verifyCachedFiles = require('./doctor/verify-cached-files')
+
+const globalNodeModules = path.join(npm.config.globalPrefix, 'lib', 'node_modules')
+const localNodeModules = path.join(npm.config.localPrefix, 'node_modules')
module.exports = doctor
@@ -29,7 +34,7 @@ function doctor (args, silent, cb) {
silent = false
}
- var actionsToRun = [
+ const actionsToRun = [
[checkPing],
[getLatestNpmVersion],
[getLatestNodejsVersion, args['node-url']],
@@ -37,15 +42,15 @@ function doctor (args, silent, cb) {
[checkFilesPermission, npm.cache, 6],
[checkFilesPermission, globalNodeModules, 4],
[checkFilesPermission, localNodeModules, 6],
- [checksumCachedFiles]
+ [verifyCachedFiles, path.join(npm.cache, '_cacache')]
]
log.info('doctor', 'Running checkup')
chain(actionsToRun, function (stderr, stdout) {
if (stderr && stderr.message !== 'not found: git') return cb(stderr)
- var outHead = ['Check', 'Value', 'Recommendation']
- var list = makePretty(stdout)
- var outBody = list
+ const list = makePretty(stdout)
+ let outHead = ['Check', 'Value', 'Recommendation']
+ let outBody = list
if (npm.color) {
outHead = outHead.map(function (item) {
@@ -60,8 +65,8 @@ function doctor (args, silent, cb) {
})
}
- var outTable = [outHead].concat(outBody)
- var tableOpts = {
+ const outTable = [outHead].concat(outBody)
+ const tableOpts = {
stringLength: function (s) { return ansiTrim(s).length }
}
@@ -72,18 +77,18 @@ function doctor (args, silent, cb) {
}
function makePretty (p) {
- var ping = p[0] ? 'ok' : 'notOk'
- var npmLTS = p[1]
- var nodeLTS = p[2].replace('v', '')
- var whichGit = p[3] || 'not installed'
- var readbleCaches = p[4] ? 'ok' : 'notOk'
- var executableGlobalModules = p[5] ? 'ok' : 'notOk'
- var executableLocalModules = p[6] ? 'ok' : 'notOk'
- var checksumCachedFiles = p[7] ? 'ok' : 'notOk'
- var npmV = npm.version
- var nodeV = process.version.replace('v', '')
- var registry = npm.config.get('registry')
- var list = [
+ const ping = p[1]
+ const npmLTS = p[2]
+ const nodeLTS = p[3].replace('v', '')
+ const whichGit = p[4] || 'not installed'
+ const readbleCaches = p[5] ? 'ok' : 'notOk'
+ const executableGlobalModules = p[6] ? 'ok' : 'notOk'
+ const executableLocalModules = p[7] ? 'ok' : 'notOk'
+ const cacheStatus = p[8] ? `verified ${p[8].verifiedContent} tarballs` : 'notOk'
+ const npmV = npm.version
+ const nodeV = process.version.replace('v', '')
+ const registry = npm.config.get('registry')
+ const list = [
['npm ping', ping],
['npm -v', 'v' + npmV],
['node -v', 'v' + nodeV],
@@ -92,10 +97,10 @@ function makePretty (p) {
['Perms check on cached files', readbleCaches],
['Perms check on global node_modules', executableGlobalModules],
['Perms check on local node_modules', executableLocalModules],
- ['Checksum cached files', checksumCachedFiles]
+ ['Verify cache contents', cacheStatus]
]
- if (ping !== 'ok') list[0][2] = 'Check your internet connection'
+ if (p[0] !== 200) list[0][2] = 'Check your internet connection'
if (!semver.satisfies(npmV, '>=' + npmLTS)) list[1][2] = 'Use npm v' + npmLTS
if (!semver.satisfies(nodeV, '>=' + nodeLTS)) list[2][2] = 'Use node v' + nodeLTS
if (registry !== defaultRegistry) list[3][2] = 'Try `npm config set registry ' + defaultRegistry + '`'
@@ -103,7 +108,6 @@ function makePretty (p) {
if (readbleCaches !== 'ok') list[5][2] = 'Check the permissions of your files in ' + npm.config.get('cache')
if (executableGlobalModules !== 'ok') list[6][2] = globalNodeModules + ' must be readable and writable by the current user.'
if (executableLocalModules !== 'ok') list[7][2] = localNodeModules + ' must be readable and writable by the current user.'
- if (checksumCachedFiles !== 'ok') list[8][2] = 'You have some broken packages in your cache.'
return list
}
diff --git a/deps/npm/lib/doctor/check-files-permission.js b/deps/npm/lib/doctor/check-files-permission.js
index 74393596f6..50014fd232 100644
--- a/deps/npm/lib/doctor/check-files-permission.js
+++ b/deps/npm/lib/doctor/check-files-permission.js
@@ -9,12 +9,12 @@ var fileCompletion = require('../utils/completion/file-completion.js')
function checkFilesPermission (root, mask, cb) {
if (process.platform === 'win32') return cb(null, true)
getUid(npm.config.get('user'), npm.config.get('group'), function (e, uid, gid) {
+ var tracker = log.newItem('checkFilePermissions', 1)
if (e) {
tracker.finish()
tracker.warn('checkFilePermissions', 'Error looking up user and group:', e)
return cb(e)
}
- var tracker = log.newItem('checkFilePermissions', 1)
tracker.info('checkFilePermissions', 'Building file list of ' + root)
fileCompletion(root, '.', Infinity, function (e, files) {
if (e) {
@@ -38,14 +38,16 @@ function checkFilesPermission (root, mask, cb) {
tracker.completeWork(1)
if (e) return next(e)
if (!stat.isFile()) return next()
- var mode = stat.mode
- var isGroup = stat.gid ? stat.gid === gid : true
- var isUser = stat.uid ? stat.uid === uid : true
- if ((mode & parseInt('000' + mask, 8))) return next()
- if ((isGroup && mode & parseInt('00' + mask + '0', 8))) return next()
- if ((isUser && mode & parseInt('0' + mask + '00', 8))) return next()
- tracker.error('checkFilePermissions', 'Missing permissions on (' + isGroup + ', ' + isUser + ', ' + mode + ')', file)
- return next(new Error('Missing permissions for ' + file))
+ // 6 = fs.constants.R_OK | fs.constants.W_OK
+ // constants aren't available on v4
+ fs.access(file, 6, (err) => {
+ if (err) {
+ tracker.error('checkFilePermissions', `Missing permissions on ${file}`)
+ return next(new Error('Missing permissions for ' + file))
+ } else {
+ return next()
+ }
+ })
})
}
})
diff --git a/deps/npm/lib/doctor/check-ping.js b/deps/npm/lib/doctor/check-ping.js
index 99e4ea2ba2..29a25c2fb7 100644
--- a/deps/npm/lib/doctor/check-ping.js
+++ b/deps/npm/lib/doctor/check-ping.js
@@ -4,9 +4,10 @@ var ping = require('../ping.js')
function checkPing (cb) {
var tracker = log.newItem('checkPing', 1)
tracker.info('checkPing', 'Pinging registry')
- ping({}, true, function (err, pong) {
+ ping({}, true, function (err, pong, data, res) {
+ if (err) { return cb(err) }
tracker.finish()
- cb(err, pong)
+ cb(null, [res.statusCode, res.statusMessage])
})
}
diff --git a/deps/npm/lib/doctor/checksum-cached-files.js b/deps/npm/lib/doctor/checksum-cached-files.js
deleted file mode 100644
index d50c326852..0000000000
--- a/deps/npm/lib/doctor/checksum-cached-files.js
+++ /dev/null
@@ -1,62 +0,0 @@
-var crypto = require('crypto')
-var fs = require('fs')
-var path = require('path')
-var chain = require('slide').chain
-var log = require('npmlog')
-var npm = require('../npm')
-var fileCompletion = require('../utils/completion/file-completion.js')
-
-function checksum (str) {
- return crypto
- .createHash('sha1')
- .update(str, 'utf8')
- .digest('hex')
-}
-
-function checksumCachedFiles (cb) {
- var tracker = log.newItem('checksumCachedFiles', 1)
- tracker.info('checksumCachedFiles', 'Building file list of ' + npm.cache)
- fileCompletion(npm.cache, '.', Infinity, function (e, files) {
- if (e) {
- tracker.finish()
- return cb(e)
- }
- tracker.addWork(files.length)
- tracker.completeWork(1)
- chain(files.map(andChecksumFile), function (er) {
- tracker.finish()
- cb(null, !er)
- })
- function andChecksumFile (f) {
- return [function (next) { process.nextTick(function () { checksumFile(f, next) }) }]
- }
- function checksumFile (f, next) {
- var file = path.join(npm.cache, f)
- tracker.silly('checksumFile', f)
- if (!/.tgz$/.test(file)) {
- tracker.completeWork(1)
- return next()
- }
- fs.readFile(file, function (err, tgz) {
- tracker.completeWork(1)
- if (err) return next(err)
- try {
- var pkgJSON = fs.readFileSync(path.join(path.dirname(file), 'package/package.json'))
- } catch (e) {
- return next() // no package.json in cche is ok
- }
- try {
- var pkg = JSON.parse(pkgJSON)
- var shasum = (pkg.dist && pkg.dist.shasum) || pkg._shasum
- var actual = checksum(tgz)
- if (actual !== shasum) return next(new Error('Checksum mismatch on ' + file + ', expected: ' + shasum + ', got: ' + shasum))
- return next()
- } catch (e) {
- return next(new Error('Error parsing JSON in ' + file + ': ' + e))
- }
- })
- }
- })
-}
-
-module.exports = checksumCachedFiles
diff --git a/deps/npm/lib/doctor/get-latest-npm-version.js b/deps/npm/lib/doctor/get-latest-npm-version.js
index aba9b773cd..5a096ab895 100644
--- a/deps/npm/lib/doctor/get-latest-npm-version.js
+++ b/deps/npm/lib/doctor/get-latest-npm-version.js
@@ -4,9 +4,10 @@ var fetchPackageMetadata = require('../fetch-package-metadata')
function getLatestNpmVersion (cb) {
var tracker = log.newItem('getLatestNpmVersion', 1)
tracker.info('getLatestNpmVersion', 'Getting npm package information')
- fetchPackageMetadata('npm@latest', '.', {fullMetadata: true}, function (e, d) {
+ fetchPackageMetadata('npm@latest', '.', {}, function (err, d) {
tracker.finish()
- cb(e, d.version)
+ if (err) { return cb(err) }
+ cb(null, d.version)
})
}
diff --git a/deps/npm/lib/doctor/verify-cached-files.js b/deps/npm/lib/doctor/verify-cached-files.js
new file mode 100644
index 0000000000..f00dbd6256
--- /dev/null
+++ b/deps/npm/lib/doctor/verify-cached-files.js
@@ -0,0 +1,19 @@
+'use strict'
+
+const cacache = require('cacache')
+const log = require('npmlog')
+
+module.exports = verifyCachedFiles
+function verifyCachedFiles (cache, cb) {
+ log.info('verifyCachedFiles', `Verifying cache at ${cache}`)
+ cacache.verify(cache).then((stats) => {
+ log.info('verifyCachedFiles', `Verification complete. Stats: ${JSON.stringify(stats, 2)}`)
+ if (stats.reclaimedCount || stats.badContentCount || stats.missingContent) {
+ stats.badContentCount && log.warn('verifyCachedFiles', `Corrupted content removed: ${stats.badContentCount}`)
+ stats.reclaimedCount && log.warn('verifyCachedFiles', `Content garbage-collected: ${stats.reclaimedCount} (${stats.reclaimedSize} bytes)`)
+ stats.missingContent && log.warn('verifyCachedFiles', `Missing content: ${stats.missingContent}`)
+ log.warn('verifyCachedFiles', 'Cache issues have been fixed')
+ }
+ return stats
+ }).then((s) => cb(null, s), cb)
+}
diff --git a/deps/npm/lib/explore.js b/deps/npm/lib/explore.js
index 5640d5f157..826a527fa7 100644
--- a/deps/npm/lib/explore.js
+++ b/deps/npm/lib/explore.js
@@ -2,7 +2,7 @@
// open a subshell to the package folder.
module.exports = explore
-explore.usage = 'npm explore <pkg> [ -- <cmd>]'
+explore.usage = 'npm explore <pkg> [ -- <command>]'
explore.completion = require('./utils/completion/installed-shallow.js')
var npm = require('./npm.js')
diff --git a/deps/npm/lib/fetch-package-metadata.js b/deps/npm/lib/fetch-package-metadata.js
index ae22004e5b..45d6acbfae 100644
--- a/deps/npm/lib/fetch-package-metadata.js
+++ b/deps/npm/lib/fetch-package-metadata.js
@@ -1,47 +1,37 @@
'use strict'
-var fs = require('graceful-fs')
-var path = require('path')
-var zlib = require('zlib')
-var log = require('npmlog')
-var realizePackageSpecifier = require('realize-package-specifier')
-var tar = require('tar')
-var once = require('once')
-var semver = require('semver')
-var readPackageTree = require('read-package-tree')
-var readPackageJson = require('read-package-json')
-var iferr = require('iferr')
-var rimraf = require('rimraf')
-var clone = require('lodash.clonedeep')
-var validate = require('aproba')
-var unpipe = require('unpipe')
-var normalizePackageData = require('normalize-package-data')
-var limit = require('call-limit')
-
-var npm = require('./npm.js')
-var mapToRegistry = require('./utils/map-to-registry.js')
-var cache = require('./cache.js')
-var cachedPackageRoot = require('./cache/cached-package-root.js')
-var tempFilename = require('./utils/temp-filename.js')
-var getCacheStat = require('./cache/get-stat.js')
-var unpack = require('./utils/tar.js').unpack
-var pulseTillDone = require('./utils/pulse-till-done.js')
-var parseJSON = require('./utils/parse-json.js')
-var pickManifestFromRegistryMetadata = require('./utils/pick-manifest-from-registry-metadata.js')
+const deprCheck = require('./utils/depr-check')
+const path = require('path')
+const log = require('npmlog')
+const readPackageTree = require('read-package-tree')
+const rimraf = require('rimraf')
+const validate = require('aproba')
+const npa = require('npm-package-arg')
+const npm = require('./npm')
+const npmlog = require('npmlog')
+const limit = require('call-limit')
+const tempFilename = require('./utils/temp-filename')
+const pacote = require('pacote')
+const pacoteOpts = require('./config/pacote')
+const isWindows = require('./utils/is-windows.js')
function andLogAndFinish (spec, tracker, done) {
- validate('SF', [spec, done])
- return function (er, pkg) {
+ validate('SOF|SZF|OOF|OZF', [spec, tracker, done])
+ return (er, pkg) => {
if (er) {
- log.silly('fetchPackageMetaData', 'error for ' + spec, er)
+ log.silly('fetchPackageMetaData', 'error for ' + String(spec), er.message)
if (tracker) tracker.finish()
}
return done(er, pkg)
}
}
-module.exports = limit(fetchPackageMetadata, npm.limit.fetch)
+const CACHE = require('lru-cache')({
+ max: 300 * 1024 * 1024,
+ length: (p) => p._contentLength
+})
+module.exports = limit(fetchPackageMetadata, npm.limit.fetch)
function fetchPackageMetadata (spec, where, opts, done) {
validate('SSOF|SSFZ|OSOF|OSFZ', [spec, where, opts, done])
@@ -50,305 +40,74 @@ function fetchPackageMetadata (spec, where, opts, done) {
opts = {}
}
var tracker = opts.tracker
+ const logAndFinish = andLogAndFinish(spec, tracker, done)
+
if (typeof spec === 'object') {
var dep = spec
- spec = dep.raw
- }
- var logAndFinish = andLogAndFinish(spec, tracker, done)
- if (!dep) {
- log.silly('fetchPackageMetaData', spec)
- return realizePackageSpecifier(spec, where, iferr(logAndFinish, function (dep) {
- fetchPackageMetadata(dep, where, {tracker: tracker}, done)
- }))
- }
- if (dep.type === 'version' || dep.type === 'range' || dep.type === 'tag') {
- fetchNamedPackageData(dep, opts, addRequestedAndFinish)
- } else if (dep.type === 'directory') {
- fetchDirectoryPackageData(dep, where, addRequestedAndFinish)
} else {
- fetchOtherPackageData(spec, dep, where, addRequestedAndFinish)
- }
- function addRequestedAndFinish (er, pkg) {
- if (pkg) annotateMetadata(pkg, dep, spec, where)
- logAndFinish(er, pkg)
+ dep = npa(spec)
}
-}
-
-var annotateMetadata = module.exports.annotateMetadata = function (pkg, requested, spec, where) {
- validate('OOSS', arguments)
- pkg._requested = requested
- pkg._spec = spec
- pkg._where = where
- if (!pkg._args) pkg._args = []
- pkg._args.push([requested, where])
- // non-npm registries can and will return unnormalized data, plus
- // even the npm registry may have package data normalized with older
- // normalization rules. This ensures we get package data in a consistent,
- // stable format.
- try {
- normalizePackageData(pkg)
- } catch (ex) {
- // don't care
+ if (!isWindows && dep.type === 'directory' && /^[a-zA-Z]:/.test(dep.fetchSpec)) {
+ var err = new Error(`Can't install from windows path on a non-windows system: ${dep.fetchSpec.replace(/[/]/g, '\\')}`)
+ err.code = 'EWINDOWSPATH'
+ return logAndFinish(err)
}
-}
-
-function fetchOtherPackageData (spec, dep, where, next) {
- validate('SOSF', arguments)
- log.silly('fetchOtherPackageData', spec)
- cache.add(spec, null, where, false, iferr(next, function (pkg) {
- var result = clone(pkg)
- result._inCache = true
- next(null, result)
- }))
-}
-
-function fetchDirectoryPackageData (dep, where, next) {
- validate('OSF', arguments)
- log.silly('fetchDirectoryPackageData', dep.name || dep.rawSpec)
- readPackageJson(path.join(dep.spec, 'package.json'), false, next)
-}
-var regCache = {}
-
-function fetchNamedPackageData (dep, opts, next) {
- validate('OOF', arguments)
- log.silly('fetchNamedPackageData', dep.name || dep.rawSpec)
- mapToRegistry(dep.name || dep.rawSpec, npm.config, iferr(next, function (url, auth) {
- if (regCache[url]) {
- pickVersionFromRegistryDocument(clone(regCache[url]))
- } else {
- var fullMetadata = opts.fullMetadata == null ? true : opts.fullMetadata
- npm.registry.get(url, {auth: auth, fullMetadata: fullMetadata}, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
- }
- function thenAddMetadata (pkg) {
- pkg._from = dep.raw
- pkg._resolved = pkg.dist.tarball
- pkg._shasum = pkg.dist.shasum
-
- next(null, pkg)
- }
- function pickVersionFromRegistryDocument (pkg) {
- if (!regCache[url]) regCache[url] = pkg
- var versions = Object.keys(pkg.versions)
-
- var invalidVersions = versions.filter(function (v) { return !semver.valid(v) })
- if (invalidVersions.length > 0) {
- log.warn('pickVersion', 'The package %s has invalid semver-version(s): %s. This usually only happens for unofficial private registries. ' +
- 'You should delete or re-publish the invalid versions.', pkg.name, invalidVersions.join(', '))
- }
-
- versions = versions.filter(function (v) { return semver.valid(v) })
-
- if (dep.type === 'tag') {
- var tagVersion = pkg['dist-tags'][dep.spec]
- if (pkg.versions[tagVersion]) return thenAddMetadata(pkg.versions[tagVersion])
+ pacote.manifest(dep, pacoteOpts({
+ annotate: true,
+ fullMetadata: opts.fullMetadata,
+ log: tracker || npmlog,
+ memoize: CACHE,
+ where: where
+ })).then(
+ (pkg) => logAndFinish(null, deprCheck(pkg)),
+ (err) => {
+ if (dep.type !== 'directory') return logAndFinish(err)
+ if (err.code === 'ENOTDIR') {
+ var enolocal = new Error(`Could not install "${path.relative(process.cwd(), dep.fetchSpec)}" as it is not a directory and is not a file with a name ending in .tgz, .tar.gz or .tar`)
+ enolocal.code = 'ENOLOCAL'
+ if (err.stack) enolocal.stack = err.stack
+ return logAndFinish(enolocal)
+ } else if (err.code === 'ENOPACKAGEJSON') {
+ var enopackage = new Error(`Could not install from "${path.relative(process.cwd(), dep.fetchSpec)}" as it does not contain a package.json file.`)
+ enopackage.code = 'ENOLOCAL'
+ if (err.stack) enopackage.stack = err.stack
+ return logAndFinish(enopackage)
} else {
- var picked = pickManifestFromRegistryMetadata(dep.spec, npm.config.get('tag'), versions, pkg)
- if (picked) return thenAddMetadata(picked.manifest)
+ return logAndFinish(err)
}
-
- // We didn't manage to find a compatible version
- // If this package was requested from cache, force hitting the network
- if (pkg._cached) {
- log.silly('fetchNamedPackageData', 'No valid target from cache, forcing network')
- return npm.registry.get(url, {
- auth: auth,
- skipCache: true
- }, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
- }
-
- // And failing that, we error out
- var targets = versions.length
- ? 'Valid install targets:\n' + versions.join(', ') + '\n'
- : 'No valid targets found.'
- var er = new Error('No compatible version found: ' +
- dep.raw + '\n' + targets)
- er.code = 'ETARGET'
- return next(er)
}
- }))
+ )
}
-function retryWithCached (pkg, asserter, next) {
- if (!pkg._inCache) {
- cache.add(pkg._spec, null, pkg._where, false, iferr(next, function (newpkg) {
- Object.keys(newpkg).forEach(function (key) {
- if (key[0] !== '_') return
- pkg[key] = newpkg[key]
- })
- pkg._inCache = true
- return asserter(pkg, next)
- }))
- }
- return !pkg._inCache
-}
-
-module.exports.addShrinkwrap = function addShrinkwrap (pkg, next) {
- validate('OF', arguments)
- if (pkg._shrinkwrap !== undefined) return next(null, pkg)
- if (pkg._hasShrinkwrap === false) {
- pkg._shrinkwrap = null
- return next(null, pkg)
- }
- if (retryWithCached(pkg, addShrinkwrap, next)) return
- pkg._shrinkwrap = null
- // FIXME: cache the shrinkwrap directly
- var pkgname = pkg.name
- var ver = pkg.version
- var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
- untarStream(tarball, function (er, untar) {
- if (er) {
- if (er.code === 'ENOTTARBALL') {
- pkg._shrinkwrap = null
- return next()
- } else {
- return next(er)
- }
- }
- if (er) return next(er)
- var foundShrinkwrap = false
- untar.on('entry', function (entry) {
- if (!/^(?:[^\/]+[\/])npm-shrinkwrap.json$/.test(entry.path)) return
- log.silly('addShrinkwrap', 'Found shrinkwrap in ' + pkgname + ' ' + entry.path)
- foundShrinkwrap = true
- var shrinkwrap = ''
- entry.on('data', function (chunk) {
- shrinkwrap += chunk
- })
- entry.on('end', function () {
- untar.close()
- log.silly('addShrinkwrap', 'Completed reading shrinkwrap in ' + pkgname)
- try {
- pkg._shrinkwrap = parseJSON(shrinkwrap)
- } catch (ex) {
- var er = new Error('Error parsing ' + pkgname + '@' + ver + "'s npm-shrinkwrap.json: " + ex.message)
- er.type = 'ESHRINKWRAP'
- return next(er)
- }
- next(null, pkg)
- })
- entry.resume()
- })
- untar.on('end', function () {
- if (!foundShrinkwrap) {
- pkg._shrinkwrap = null
- next(null, pkg)
- }
- })
- })
-}
-
-module.exports.addBundled = function addBundled (pkg, next) {
+module.exports.addBundled = addBundled
+function addBundled (pkg, next) {
validate('OF', arguments)
if (pkg._bundled !== undefined) return next(null, pkg)
- if (!pkg.bundleDependencies) return next(null, pkg)
- if (retryWithCached(pkg, addBundled, next)) return
- pkg._bundled = null
- var pkgname = pkg.name
- var ver = pkg.version
- var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
- var target = tempFilename('unpack')
- getCacheStat(iferr(next, function (cs) {
- log.verbose('addBundled', 'extract', tarball)
- unpack(tarball, target, null, null, cs.uid, cs.gid, iferr(next, function () {
- log.silly('addBundled', 'read tarball')
- readPackageTree(target, function (er, tree) {
- log.silly('cleanup', 'remove extracted module')
- rimraf(target, function () {
- if (tree) {
- pkg._bundled = tree.children
- }
- next(null, pkg)
- })
- })
- }))
- }))
-}
-
-// FIXME: hasGzipHeader / hasTarHeader / untarStream duplicate a lot
-// of code from lib/utils/tar.js– these should be brought together.
-
-function hasGzipHeader (c) {
- return c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08
-}
-
-function hasTarHeader (c) {
- return c[257] === 0x75 && // tar archives have 7573746172 at position
- c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
- c[259] === 0x74 &&
- c[260] === 0x61 &&
- c[261] === 0x72 &&
- ((c[262] === 0x00 &&
- c[263] === 0x30 &&
- c[264] === 0x30) ||
-
- (c[262] === 0x20 &&
- c[263] === 0x20 &&
- c[264] === 0x00))
-}
-
-function untarStream (tarball, cb) {
- validate('SF', arguments)
- cb = once(cb)
-
- var stream
- var file = stream = fs.createReadStream(tarball)
- var tounpipe = [file]
- file.on('error', function (er) {
- er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
- er.code = 'EREADFILE'
- cb(er)
- })
- file.on('data', function OD (c) {
- if (hasGzipHeader(c)) {
- doGunzip()
- } else if (hasTarHeader(c)) {
- doUntar()
- } else {
- if (file.close) file.close()
- if (file.destroy) file.destroy()
- var er = new Error('Non-gzip/tarball ' + tarball)
- er.code = 'ENOTTARBALL'
- return cb(er)
- }
- file.removeListener('data', OD)
- file.emit('data', c)
- cb(null, stream)
- })
-
- function doGunzip () {
- var gunzip = stream.pipe(zlib.createGunzip())
- gunzip.on('error', function (er) {
- er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
- er.code = 'EGUNZIP'
- cb(er)
- })
- tounpipe.push(gunzip)
- stream = gunzip
- doUntar()
- }
-
- function doUntar () {
- var untar = stream.pipe(tar.Parse())
- untar.on('error', function (er) {
- er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
- er.code = 'EUNTAR'
- cb(er)
+ if (!pkg.bundleDependencies && pkg._requested.type !== 'directory') return next(null, pkg)
+ const requested = pkg._requested || npa(pkg._from)
+ if (requested.type === 'directory') {
+ pkg._bundled = null
+ return readPackageTree(pkg._requested.fetchSpec, function (er, tree) {
+ if (tree) pkg._bundled = tree.children
+ return next(null, pkg)
})
- tounpipe.push(untar)
- stream = untar
- addClose()
}
-
- function addClose () {
- stream.close = function () {
- tounpipe.forEach(function (stream) {
- unpipe(stream)
+ pkg._bundled = null
+ const target = tempFilename('unpack')
+ const opts = pacoteOpts({integrity: pkg._integrity})
+ pacote.extract(pkg._resolved || pkg._requested || npa.resolve(pkg.name, pkg.version), target, opts).then(() => {
+ log.silly('addBundled', 'read tarball')
+ readPackageTree(target, (err, tree) => {
+ if (err) { return next(err) }
+ log.silly('cleanup', 'remove extracted module')
+ rimraf(target, function () {
+ if (tree) {
+ pkg._bundled = tree.children
+ }
+ next(null, pkg)
})
-
- if (file.close) file.close()
- if (file.destroy) file.destroy()
- }
- }
+ })
+ }, next)
}
diff --git a/deps/npm/lib/help.js b/deps/npm/lib/help.js
index cfac917999..9763d5fccd 100644
--- a/deps/npm/lib/help.js
+++ b/deps/npm/lib/help.js
@@ -38,7 +38,7 @@ function help (args, cb) {
return npmUsage(valid, cb)
}
- // npm <cmd> -h: show command usage
+ // npm <command> -h: show command usage
if (npm.config.get('usage') &&
npm.commands[section] &&
npm.commands[section].usage) {
@@ -169,7 +169,7 @@ function npmUsage (valid, cb) {
npm.config.get('long') ? usages()
: ' ' + wrap(commands),
'',
- 'npm <cmd> -h quick help on <cmd>',
+ 'npm <command> -h quick help on <command>',
'npm -l display full usage info',
'npm help <term> search for help on <term>',
'npm help npm involved overview',
@@ -185,7 +185,7 @@ function npmUsage (valid, cb) {
}
function usages () {
- // return a string of <cmd>: <usage>
+ // return a string of <command>: <usage>
var maxLen = 0
return Object.keys(npm.commands).filter(function (c) {
return c === npm.deref(c)
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 43ba436630..c567f624f9 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -111,7 +111,6 @@ var npm = require('./npm.js')
var locker = require('./utils/locker.js')
var lock = locker.lock
var unlock = locker.unlock
-var ls = require('./ls.js')
var parseJSON = require('./utils/parse-json.js')
var output = require('./utils/output.js')
var saveMetrics = require('./utils/metrics.js').save
@@ -119,7 +118,8 @@ var saveMetrics = require('./utils/metrics.js').save
// install specific libraries
var copyTree = require('./install/copy-tree.js')
var readShrinkwrap = require('./install/read-shrinkwrap.js')
-var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var computeMetadata = require('./install/deps.js').computeMetadata
+var prefetchDeps = require('./install/deps.js').prefetchDeps
var loadDeps = require('./install/deps.js').loadDeps
var loadDevDeps = require('./install/deps.js').loadDevDeps
var getAllMetadata = require('./install/deps.js').getAllMetadata
@@ -128,7 +128,6 @@ var loadExtraneous = require('./install/deps.js').loadExtraneous
var diffTrees = require('./install/diff-trees.js')
var checkPermissions = require('./install/check-permissions.js')
var decomposeActions = require('./install/decompose-actions.js')
-var filterInvalidActions = require('./install/filter-invalid-actions.js')
var validateTree = require('./install/validate-tree.js')
var validateArgs = require('./install/validate-args.js')
var saveRequested = require('./install/save.js').saveRequested
@@ -141,7 +140,8 @@ var removeObsoleteDep = require('./install/deps.js').removeObsoleteDep
var packageId = require('./utils/package-id.js')
var moduleName = require('./utils/module-name.js')
var errorMessage = require('./utils/error-message.js')
-var andIgnoreErrors = require('./install/and-ignore-errors.js')
+var removeDeps = require('./install/deps.js').removeDeps
+var isExtraneous = require('./install/is-extraneous.js')
function unlockCB (lockPath, name, cb) {
validate('SSF', arguments)
@@ -214,6 +214,7 @@ function Installer (where, dryrun, args) {
this.rollback = npm.config.get('rollback')
this.link = npm.config.get('link')
this.global = this.where === path.resolve(npm.globalDir, '..')
+ this.started = Date.now()
}
Installer.prototype = {}
@@ -279,32 +280,24 @@ Installer.prototype.run = function (_cb) {
[this, this.commit, staging, this.todo],
[this, this.runPostinstallTopLevelLifecycles],
- [this, this.finishTracker, 'runTopLevelLifecycles'])
- if (getSaveType(this.args)) {
+ [this, this.finishTracker, 'runTopLevelLifecycles']
+ )
+ if (getSaveType()) {
postInstallSteps.push(
+ // this is necessary as we don't fill in `dependencies` and `devDependencies` in deps loaded from shrinkwrap
+ // until after we extract them
+ [this, (next) => { computeMetadata(this.idealTree); next() }],
[this, this.saveToDependencies])
}
}
postInstallSteps.push(
+ [this, this.printWarnings],
[this, this.printInstalled])
var self = this
chain(installSteps, function (installEr) {
if (installEr) self.failing = true
chain(postInstallSteps, function (postInstallEr) {
- if (self.idealTree) {
- self.idealTree.warnings.forEach(function (warning) {
- if (warning.code === 'EPACKAGEJSON' && self.global) return
- if (warning.code === 'ENOTDIR') return
- var output = errorMessage(warning)
- output.summary.forEach(function (logline) {
- log.warn.apply(log, logline)
- })
- output.detail.forEach(function (logline) {
- log.verbose.apply(log, logline)
- })
- })
- }
if (installEr && postInstallEr) {
var msg = errorMessage(postInstallEr)
msg.summary.forEach(function (logline) {
@@ -331,17 +324,15 @@ Installer.prototype.newTracker = function (tracker, name, size) {
validate('OS', [tracker, name])
if (size) validate('N', [size])
this.progress[name] = tracker.newGroup(name, size)
- var self = this
return function (next) {
- self.progress[name].silly(name, 'Starting')
+ process.emit('time', 'stage:' + name)
next()
}
}
Installer.prototype.finishTracker = function (name, cb) {
validate('SF', arguments)
- this.progress[name].silly(name, 'Finishing')
- this.progress[name].finish()
+ process.emit('timeEnd', 'stage:' + name)
cb()
}
@@ -354,39 +345,58 @@ Installer.prototype.loadCurrentTree = function (cb) {
} else {
todo.push([this, this.readLocalPackageData])
}
- todo.push(
- [this, this.normalizeTree, log.newGroup('normalizeTree')])
+ todo.push([this, this.normalizeCurrentTree])
chain(todo, cb)
}
+var createNode = require('./install/node.js').create
+var flatNameFromTree = require('./install/flatten-tree.js').flatNameFromTree
+Installer.prototype.normalizeCurrentTree = function (cb) {
+ this.currentTree.isTop = true
+ normalizeTree(this.currentTree)
+ return cb()
+
+ function normalizeTree (tree) {
+ createNode(tree)
+ tree.location = flatNameFromTree(tree)
+ tree.children.forEach(normalizeTree)
+ }
+}
+
Installer.prototype.loadIdealTree = function (cb) {
validate('F', arguments)
log.silly('install', 'loadIdealTree')
chain([
- [this.newTracker(this.progress.loadIdealTree, 'cloneCurrentTree')],
+ [this.newTracker(this.progress.loadIdealTree, 'loadIdealTree:cloneCurrentTree')],
[this, this.cloneCurrentTreeToIdealTree],
- [this, this.finishTracker, 'cloneCurrentTree'],
+ [this, this.finishTracker, 'loadIdealTree:cloneCurrentTree'],
- [this.newTracker(this.progress.loadIdealTree, 'loadShrinkwrap')],
+ [this.newTracker(this.progress.loadIdealTree, 'loadIdealTree:loadShrinkwrap')],
[this, this.loadShrinkwrap],
- [this, this.finishTracker, 'loadShrinkwrap'],
+ [this, this.finishTracker, 'loadIdealTree:loadShrinkwrap'],
- [this.newTracker(this.progress.loadIdealTree, 'loadAllDepsIntoIdealTree', 10)],
+ [this.newTracker(this.progress.loadIdealTree, 'loadIdealTree:loadAllDepsIntoIdealTree', 10)],
[this, this.loadAllDepsIntoIdealTree],
- [this, this.finishTracker, 'loadAllDepsIntoIdealTree'],
-
- // TODO: Remove this (should no longer be necessary, instead counter productive)
- [this, function (next) { recalculateMetadata(this.idealTree, log, next) }]
+ [this, this.finishTracker, 'loadIdealTree:loadAllDepsIntoIdealTree'],
+ [this, function (next) { computeMetadata(this.idealTree); next() }],
+ [this, this.pruneIdealTree]
], cb)
}
+Installer.prototype.pruneIdealTree = function (cb) {
+ var toPrune = this.idealTree.children
+ .filter((n) => !n.fromShrinkwrap && isExtraneous(n))
+ .map((n) => ({name: moduleName(n)}))
+ return removeDeps(toPrune, this.idealTree, null, log.newGroup('pruneDeps'), cb)
+}
+
Installer.prototype.loadAllDepsIntoIdealTree = function (cb) {
validate('F', arguments)
log.silly('install', 'loadAllDepsIntoIdealTree')
- var saveDeps = getSaveType(this.args)
+ var saveDeps = getSaveType()
- var cg = this.progress.loadAllDepsIntoIdealTree
+ var cg = this.progress['loadIdealTree:loadAllDepsIntoIdealTree']
var installNewModules = !!this.args.length
var steps = []
@@ -394,6 +404,14 @@ Installer.prototype.loadAllDepsIntoIdealTree = function (cb) {
steps.push([validateArgs, this.idealTree, this.args])
steps.push([loadRequestedDeps, this.args, this.idealTree, saveDeps, cg.newGroup('loadRequestedDeps')])
} else {
+ const depsToPreload = Object.assign({},
+ this.dev ? this.idealTree.package.devDependencies : {},
+ this.prod ? this.idealTree.package.dependencies : {}
+ )
+ if (this.prod || this.dev) {
+ steps.push(
+ [prefetchDeps, this.idealTree, depsToPreload, cg.newGroup('prefetchDeps')])
+ }
if (this.prod) {
steps.push(
[loadDeps, this.idealTree, cg.newGroup('loadDeps')])
@@ -416,7 +434,6 @@ Installer.prototype.generateActionsToTake = function (cb) {
[validateTree, this.idealTree, cg.newGroup('validateTree')],
[diffTrees, this.currentTree, this.idealTree, this.differences, cg.newGroup('diffTrees')],
[this, this.computeLinked],
- [filterInvalidActions, this.where, this.differences],
[checkPermissions, this.differences],
[decomposeActions, this.differences, this.todo]
], cb)
@@ -476,15 +493,14 @@ Installer.prototype.executeActions = function (cb) {
steps.push(
[doSerialActions, 'global-install', staging, todo, trackLifecycle.newGroup('global-install')],
- [doParallelActions, 'fetch', staging, todo, cg.newGroup('fetch', 10)],
[lock, node_modules, '.staging'],
[rimraf, staging],
- [mkdirp, staging],
- [doParallelActions, 'extract', staging, todo, cg.newGroup('extract', 10)],
- [doParallelActions, 'preinstall', staging, todo, trackLifecycle.newGroup('preinstall')],
+ [doParallelActions, 'extract', staging, todo, cg.newGroup('extract', 100)],
[doReverseSerialActions, 'remove', staging, todo, cg.newGroup('remove')],
[doSerialActions, 'move', staging, todo, cg.newGroup('move')],
[doSerialActions, 'finalize', staging, todo, cg.newGroup('finalize')],
+ [doParallelActions, 'refresh-package-json', staging, todo, cg.newGroup('refresh-package-json')],
+ [doParallelActions, 'preinstall', staging, todo, trackLifecycle.newGroup('preinstall')],
[doSerialActions, 'build', staging, todo, trackLifecycle.newGroup('build')],
[doSerialActions, 'global-link', staging, todo, trackLifecycle.newGroup('global-link')],
[doParallelActions, 'update-linked', staging, todo, trackLifecycle.newGroup('update-linked')],
@@ -619,33 +635,27 @@ Installer.prototype.readLocalPackageData = function (cb) {
Installer.prototype.cloneCurrentTreeToIdealTree = function (cb) {
validate('F', arguments)
log.silly('install', 'cloneCurrentTreeToIdealTree')
- this.idealTree = copyTree(this.currentTree)
+ this.idealTree = copyTree(this.currentTree, (child) => {
+ // Filter out any children we didn't install ourselves. They need to be
+ // reinstalled in order for things to be correct.
+ return child.isTop || isLink(child) || (
+ child.package &&
+ child.package._resolved &&
+ (child.package._integrity || child.package._shasum)
+ )
+ })
this.idealTree.warnings = []
cb()
}
+function isLink (child) {
+ return child.isLink || (child.parent && isLink(child.parent))
+}
+
Installer.prototype.loadShrinkwrap = function (cb) {
validate('F', arguments)
log.silly('install', 'loadShrinkwrap')
- var installNewModules = !!this.args.length
- if (installNewModules) {
- readShrinkwrap(this.idealTree, cb)
- } else {
- readShrinkwrap.andInflate(this.idealTree, cb)
- }
-}
-
-Installer.prototype.normalizeTree = function (log, cb) {
- validate('OF', arguments)
- log.silly('install', 'normalizeTree')
- recalculateMetadata(this.currentTree, log, iferr(cb, function (tree) {
- tree.children.forEach(function (child) {
- if (child.requiredBy.length === 0) {
- child.existing = true
- }
- })
- cb(null, tree)
- }))
+ readShrinkwrap.andInflate(this.idealTree, cb)
}
Installer.prototype.getInstalledModules = function () {
@@ -658,46 +668,162 @@ Installer.prototype.getInstalledModules = function () {
})
}
+Installer.prototype.printWarnings = function (cb) {
+ if (!this.idealTree) return cb()
+
+ var self = this
+ var warned = false
+ this.idealTree.warnings.forEach(function (warning) {
+ if (warning.code === 'EPACKAGEJSON' && self.global) return
+ if (warning.code === 'ENOTDIR') return
+ warned = true
+ var msg = errorMessage(warning)
+ msg.summary.forEach(function (logline) {
+ log.warn.apply(log, logline)
+ })
+ msg.detail.forEach(function (logline) {
+ log.verbose.apply(log, logline)
+ })
+ })
+ if (warned && log.levels[npm.config.get('loglevel')] <= log.levels.warn) console.error()
+ cb()
+}
+
Installer.prototype.printInstalled = function (cb) {
validate('F', arguments)
+ if (this.failing) return cb()
log.silly('install', 'printInstalled')
+ if (npm.config.get('json')) {
+ return this.printInstalledForJSON(cb)
+ } else if (npm.config.get('parseable')) {
+ return this.printInstalledForParseable(cb)
+ } else {
+ return this.printInstalledForHuman(cb)
+ }
+}
+
+Installer.prototype.printInstalledForHuman = function (cb) {
+ var removed = 0
+ var added = 0
+ var updated = 0
+ var moved = 0
+ this.differences.forEach(function (action) {
+ var mutation = action[0]
+ if (mutation === 'remove') {
+ ++removed
+ } else if (mutation === 'move') {
+ ++moved
+ } else if (mutation === 'add') {
+ ++added
+ } else if (mutation === 'update' || mutation === 'update-linked') {
+ ++updated
+ }
+ })
+ var report = ''
+ var actions = []
+ if (added) actions.push('added ' + packages(added))
+ if (removed) actions.push('removed ' + packages(removed))
+ if (updated) actions.push('updated ' + packages(updated))
+ if (moved) actions.push('moved ' + packages(moved))
+ if (actions.length === 0) {
+ report += 'up to date'
+ } else if (actions.length === 1) {
+ report += actions[0]
+ } else {
+ var lastAction = actions.pop()
+ report += actions.join(', ') + ' and ' + lastAction
+ }
+ report += ' in ' + ((Date.now() - this.started) / 1000) + 's'
+
+ output(report)
+ return cb()
+
+ function packages (num) {
+ return num + ' package' + (num > 1 ? 's' : '')
+ }
+}
+
+Installer.prototype.printInstalledForJSON = function (cb) {
+ var result = {
+ added: [],
+ removed: [],
+ updated: [],
+ moved: [],
+ failed: [],
+ warnings: [],
+ elapsed: Date.now() - this.started
+ }
var self = this
+ this.idealTree.warnings.forEach(function (warning) {
+ if (warning.code === 'EPACKAGEJSON' && self.global) return
+ if (warning.code === 'ENOTDIR') return
+ var output = errorMessage(warning)
+ var message = flattenMessage(output.summary)
+ if (output.detail.length) {
+ message += '\n' + flattenMessage(output.detail)
+ }
+ result.warnings.push(message)
+ })
this.differences.forEach(function (action) {
var mutation = action[0]
var child = action[1]
- var name = packageId(child)
- var where = path.relative(self.where, child.path)
- if (mutation === 'remove') {
- output('- ' + name + ' ' + where)
+ var record = recordAction(action)
+ if (child.failed) {
+ result.failed.push(record)
+ } else if (mutation === 'add') {
+ result.added.push(record)
+ } else if (mutation === 'update' || mutation === 'update-linked') {
+ result.updated.push(record)
} else if (mutation === 'move') {
- var oldWhere = path.relative(self.where, child.fromPath)
- output(name + ' ' + oldWhere + ' -> ' + where)
+ result.moved.push(record)
+ } else if (mutation === 'remove') {
+ result.removed.push(record)
}
})
- var addedOrMoved = this.differences.filter(function (action) {
+ output(JSON.stringify(result, null, 2))
+ cb()
+
+ function flattenMessage (msg) {
+ return msg.map(function (logline) { return logline.slice(1).join(' ') }).join('\n')
+ }
+
+ function recordAction (action) {
var mutation = action[0]
var child = action[1]
- return !child.failed && (mutation === 'add' || mutation === 'update')
- }).map(function (action) {
+ var result = {
+ action: mutation,
+ name: moduleName(child),
+ version: child.package && child.package.version,
+ path: child.path
+ }
+ if (mutation === 'move') {
+ result.previousPath = child.fromPath
+ } else if (mutation === 'update') {
+ result.previousVersion = child.oldPkg.package && child.oldPkg.package.version
+ }
+ return result
+ }
+}
+
+Installer.prototype.printInstalledForParseable = function (cb) {
+ var self = this
+ this.differences.forEach(function (action) {
+ var mutation = action[0]
var child = action[1]
- return child.path
+ if (mutation === 'move') {
+ var previousPath = path.relative(self.where, child.fromPath)
+ } else if (mutation === 'update') {
+ var previousVersion = child.oldPkg.package && child.oldPkg.package.version
+ }
+ output(
+ mutation + '\t' +
+ moduleName(child) + '\t' +
+ (child.package ? child.package.version : '') + '\t' +
+ path.relative(self.where, child.path) + '\t' +
+ (previousVersion || '') + '\t' +
+ (previousPath || ''))
})
- if (!addedOrMoved.length) return cb()
- // TODO: remove the recalculateMetadata, should not be needed
- recalculateMetadata(this.idealTree, log, iferr(cb, function (tree) {
- // These options control both how installs happen AND how `ls` shows output.
- // Something like `npm install --production` only installs production deps.
- // By contrast `npm install --production foo` installs `foo` and the
- // `production` option is ignored. But when it comes time for `ls` to show
- // its output, it excludes the thing we just installed because that flag.
- // The summary output we get should be unfiltered, showing everything
- // installed, so we clear these options before calling `ls`.
- npm.config.set('production', false)
- npm.config.set('dev', false)
- npm.config.set('only', '')
- npm.config.set('also', '')
- ls.fromTree(self.where, tree, addedOrMoved, false, andIgnoreErrors(cb))
- }))
+ return cb()
}
Installer.prototype.debugActions = function (name, actionListName, cb) {
@@ -730,7 +856,7 @@ Installer.prototype.prettify = function (tree) {
seen[tree.path] = true
return {
label: packageId(tree),
- nodes: tree.children.filter(function (tree) { return !seen[tree.path] }).sort(byName).map(expandTree)
+ nodes: tree.children.filter((tree) => { return !seen[tree.path] && !tree.removed && !tree.failed }).sort(byName).map(expandTree)
}
}
return archy(expandTree(tree), '', { unicode: npm.config.get('unicode') })
diff --git a/deps/npm/lib/install/action/extract.js b/deps/npm/lib/install/action/extract.js
index fd9562c184..7839177850 100644
--- a/deps/npm/lib/install/action/extract.js
+++ b/deps/npm/lib/install/action/extract.js
@@ -1,67 +1,56 @@
'use strict'
-var path = require('path')
-var iferr = require('iferr')
-var asyncMap = require('slide').asyncMap
-var fs = require('graceful-fs')
-var mkdirp = require('mkdirp')
-var move = require('../../utils/move.js')
-var gentlyRm = require('../../utils/gently-rm.js')
-var updatePackageJson = require('../update-package-json')
-var npm = require('../../npm.js')
-var moduleName = require('../../utils/module-name.js')
-var packageId = require('../../utils/package-id.js')
-var cache = require('../../cache.js')
-var moduleStagingPath = require('../module-staging-path.js')
-var readPackageJson = require('read-package-json')
-module.exports = function (staging, pkg, log, next) {
- log.silly('extract', packageId(pkg))
- var up = npm.config.get('unsafe-perm')
- var user = up ? null : npm.config.get('user')
- var group = up ? null : npm.config.get('group')
- var extractTo = moduleStagingPath(staging, pkg)
- cache.unpack(pkg.package.name, pkg.package.version, extractTo, null, null, user, group,
- andUpdatePackageJson(pkg, staging, extractTo,
- andStageBundledChildren(pkg, staging, extractTo, log,
- andRemoveExtraneousBundles(extractTo, next))))
-}
-
-function andUpdatePackageJson (pkg, staging, extractTo, next) {
- return iferr(next, function () {
- readPackageJson(path.join(extractTo, 'package.json'), false, function (err, metadata) {
- if (!err) {
- // Copy _ keys (internal to npm) and any missing keys from the possibly incomplete
- // registry metadata over to the full package metadata read off of disk.
- Object.keys(pkg.package).forEach(function (key) {
- if (key[0] === '_' || !(key in metadata)) metadata[key] = pkg.package[key]
- })
- metadata.name = pkg.package.name // things go wrong if these don't match
- pkg.package = metadata
- }
- updatePackageJson(pkg, extractTo, next)
- })
- })
-}
+const BB = require('bluebird')
-function andStageBundledChildren (pkg, staging, extractTo, log, next) {
- return iferr(next, function () {
- if (!pkg.package.bundleDependencies) return next()
+const fs = BB.promisifyAll(require('graceful-fs'))
+const gentlyRm = BB.promisify(require('../../utils/gently-rm.js'))
+const log = require('npmlog')
+const mkdirp = BB.promisify(require('mkdirp'))
+const moduleName = require('../../utils/module-name.js')
+const moduleStagingPath = require('../module-staging-path.js')
+const move = BB.promisify(require('../../utils/move.js'))
+const npa = require('npm-package-arg')
+const npm = require('../../npm.js')
+const packageId = require('../../utils/package-id.js')
+const pacote = require('pacote')
+const pacoteOpts = require('../../config/pacote')
+const path = require('path')
- asyncMap(pkg.children, andStageBundledModule(pkg, staging, extractTo), next)
+module.exports = extract
+function extract (staging, pkg, log) {
+ log.silly('extract', packageId(pkg))
+ const up = npm.config.get('unsafe-perm')
+ const user = up ? null : npm.config.get('user')
+ const group = up ? null : npm.config.get('group')
+ const extractTo = moduleStagingPath(staging, pkg)
+ const opts = pacoteOpts({
+ uid: user,
+ gid: group,
+ integrity: pkg.package._integrity
})
-}
-
-function andRemoveExtraneousBundles (extractTo, next) {
- return iferr(next, function () {
- gentlyRm(path.join(extractTo, 'node_modules'), next)
+ return pacote.extract(
+ pkg.package._resolved
+ ? npa.resolve(pkg.package.name, pkg.package._resolved)
+ : pkg.package._requested,
+ extractTo,
+ opts
+ ).then(() => {
+ if (pkg.package.bundleDependencies) {
+ return readBundled(pkg, staging, extractTo)
+ }
+ }).then(() => {
+ return gentlyRm(path.join(extractTo, 'node_modules'))
})
}
-function andStageBundledModule (bundler, staging, parentPath) {
- return function (child, next) {
- if (child.error) return next(child.error)
- stageBundledModule(bundler, child, staging, parentPath, next)
- }
+function readBundled (pkg, staging, extractTo) {
+ return BB.map(pkg.children, (child) => {
+ if (child.error) {
+ throw child.error
+ } else {
+ return stageBundledModule(pkg, child, staging, extractTo)
+ }
+ }, {concurrency: 10})
}
function getTree (pkg) {
@@ -70,47 +59,43 @@ function getTree (pkg) {
}
function warn (pkg, code, msg) {
- var tree = getTree(pkg)
- var err = new Error(msg)
+ const tree = getTree(pkg)
+ const err = new Error(msg)
err.code = code
tree.warnings.push(err)
}
-function stageBundledModule (bundler, child, staging, parentPath, next) {
- var stageFrom = path.join(parentPath, 'node_modules', child.package.name)
- var stageTo = moduleStagingPath(staging, child)
-
- return asyncMap(child.children, andStageBundledModule(bundler, staging, stageFrom), iferr(next, finishModule))
+function stageBundledModule (bundler, child, staging, parentPath) {
+ const stageFrom = path.join(parentPath, 'node_modules', child.package.name)
+ const stageTo = moduleStagingPath(staging, child)
- function finishModule () {
- // If we were the one's who bundled this module…
- if (child.fromBundle === bundler) {
- return moveModule()
+ return BB.map(child.children, (child) => {
+ if (child.error) {
+ throw child.error
} else {
- return checkForReplacement()
+ return stageBundledModule(bundler, child, staging, stageFrom)
}
- }
-
- function moveModule () {
- return mkdirp(path.dirname(stageTo), iferr(next, function () {
- return move(stageFrom, stageTo, iferr(next, updateMovedPackageJson))
- }))
- }
+ }).then(() => {
+ return finishModule(bundler, child, stageTo, stageFrom)
+ })
+}
- function checkForReplacement () {
- return fs.stat(stageFrom, function (notExists, exists) {
- if (exists) {
- warn(bundler, 'EBUNDLEOVERRIDE', 'In ' + packageId(bundler) +
- ' replacing bundled version of ' + moduleName(child) +
- ' with ' + packageId(child))
- return gentlyRm(stageFrom, next)
- } else {
- return next()
- }
+function finishModule (bundler, child, stageTo, stageFrom) {
+ // If we were the one's who bundled this module…
+ if (child.fromBundle === bundler) {
+ return mkdirp(path.dirname(stageTo)).then(() => {
+ return move(stageFrom, stageTo)
})
- }
-
- function updateMovedPackageJson () {
- updatePackageJson(child, stageTo, next)
+ } else {
+ return fs.statAsync(stageFrom).then(() => {
+ const bundlerId = packageId(bundler)
+ if (!getTree(bundler).warnings.some((w) => {
+ return w.code === 'EBUNDLEOVERRIDE'
+ })) {
+ warn(bundler, 'EBUNDLEOVERRIDE', `${bundlerId} had bundled packages that do not match the required version(s). They have been replaced with non-bundled versions.`)
+ }
+ log.verbose('bundle', `EBUNDLEOVERRIDE: Replacing ${bundlerId}'s bundled version of ${moduleName(child)} with ${packageId(child)}.`)
+ return gentlyRm(stageFrom)
+ }, () => {})
}
}
diff --git a/deps/npm/lib/install/action/fetch.js b/deps/npm/lib/install/action/fetch.js
index 0e9146a0d5..474e00b05c 100644
--- a/deps/npm/lib/install/action/fetch.js
+++ b/deps/npm/lib/install/action/fetch.js
@@ -1,29 +1,12 @@
'use strict'
-// var cache = require('../../cache.js')
-// var packageId = require('../../utils/package-id.js')
-// var moduleName = require('../../utils/module-name.js')
-module.exports = function (staging, pkg, log, next) {
- next()
-/*
-// FIXME: Unnecessary as long as we have to have the tarball to resolve all deps, which
-// is progressively seeming to be likely for the indefinite future.
-// ALSO fails for local deps specified with relative URLs outside of the top level.
+const packageId = require('../../utils/package-id.js')
+const pacote = require('pacote')
+const pacoteOpts = require('../../config/pacote')
- var name = moduleName(pkg)
- var version
- switch (pkg.package._requested.type) {
- case 'version':
- case 'range':
- version = pkg.package.version
- break
- case 'hosted':
- name = name + '@' + pkg.package._requested.spec
- break
- default:
- name = pkg.package._requested.raw
- }
+module.exports = fetch
+function fetch (staging, pkg, log, next) {
log.silly('fetch', packageId(pkg))
- cache.add(name, version, pkg.parent.path, false, next)
-*/
+ const opts = pacoteOpts({integrity: pkg.package._integrity})
+ pacote.prefetch(pkg.package._requested, opts).then(() => next(), next)
}
diff --git a/deps/npm/lib/install/action/finalize.js b/deps/npm/lib/install/action/finalize.js
index 03a71f4cc0..1e86475710 100644
--- a/deps/npm/lib/install/action/finalize.js
+++ b/deps/npm/lib/install/action/finalize.js
@@ -1,85 +1,94 @@
'use strict'
-var path = require('path')
-var rimraf = require('rimraf')
-var fs = require('graceful-fs')
-var mkdirp = require('mkdirp')
-var asyncMap = require('slide').asyncMap
-var move = require('../../utils/move.js')
-var gentlyRm = require('../../utils/gently-rm')
-var moduleStagingPath = require('../module-staging-path.js')
+const path = require('path')
+const fs = require('graceful-fs')
+const Bluebird = require('bluebird')
+const rimraf = Bluebird.promisify(require('rimraf'))
+const mkdirp = Bluebird.promisify(require('mkdirp'))
+const lstat = Bluebird.promisify(fs.lstat)
+const readdir = Bluebird.promisify(fs.readdir)
+const symlink = Bluebird.promisify(fs.symlink)
+const gentlyRm = require('../../utils/gently-rm')
+const moduleStagingPath = require('../module-staging-path.js')
+const move = require('move-concurrently')
+const moveOpts = {fs: fs, Promise: Bluebird, maxConcurrency: 4}
+const getRequested = require('../get-requested.js')
-module.exports = function (staging, pkg, log, next) {
- log.silly('finalize', pkg.path)
+module.exports = function (staging, pkg, log) {
+ log.silly('finalize', pkg.realpath)
- var extractedTo = moduleStagingPath(staging, pkg)
+ const extractedTo = moduleStagingPath(staging, pkg)
- var delpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.DELETE')
+ const delpath = path.join(path.dirname(pkg.realpath), '.' + path.basename(pkg.realpath) + '.DELETE')
+ let movedDestAway = false
- mkdirp(path.resolve(pkg.path, '..'), whenParentExists)
-
- function whenParentExists (mkdirEr) {
- if (mkdirEr) return next(mkdirEr)
- // We stat first, because we can't rely on ENOTEMPTY from Windows.
- // Windows, by contrast, gives the generic EPERM of a folder already exists.
- fs.lstat(pkg.path, destStatted)
- }
-
- function destStatted (doesNotExist) {
- if (doesNotExist) {
- move(extractedTo, pkg.path, whenMoved)
- } else {
- moveAway()
- }
- }
-
- function whenMoved (moveEr) {
- if (!moveEr) return next()
- if (moveEr.code !== 'ENOTEMPTY' && moveEr.code !== 'EEXIST') return next(moveEr)
- moveAway()
+ const requested = pkg.package._requested || getRequested(pkg)
+ if (requested.type === 'directory') {
+ return makeParentPath(pkg.path)
+ .then(() => symlink(pkg.realpath, pkg.path, 'junction'))
+ .catch((ex) => {
+ return rimraf(pkg.path).then(() => symlink(pkg.realpath, pkg.path, 'junction'))
+ })
+ } else {
+ return makeParentPath(pkg.realpath)
+ .then(moveStagingToDestination)
+ .then(restoreOldNodeModules)
+ .catch((err) => {
+ if (movedDestAway) {
+ return rimraf(pkg.realpath).then(moveOldDestinationBack).then(() => {
+ throw err
+ })
+ } else {
+ throw err
+ }
+ })
+ .then(() => rimraf(delpath))
}
- function moveAway () {
- move(pkg.path, delpath, whenOldMovedAway)
+ function makeParentPath (dir) {
+ return mkdirp(path.dirname(dir))
}
- function whenOldMovedAway (moveEr) {
- if (moveEr) return next(moveEr)
- move(extractedTo, pkg.path, whenConflictMoved)
+ function moveStagingToDestination () {
+ return destinationIsClear()
+ .then(actuallyMoveStaging)
+ .catch(() => moveOldDestinationAway().then(actuallyMoveStaging))
}
- function whenConflictMoved (moveEr) {
- // if we got an error we'll try to put back the original module back,
- // succeed or fail though we want the original error that caused this
- if (moveEr) return move(delpath, pkg.path, function () { next(moveEr) })
- fs.readdir(path.join(delpath, 'node_modules'), makeTarget)
+ function destinationIsClear () {
+ return lstat(pkg.realpath).then(() => {
+ throw new Error('destination exists')
+ }, () => {})
}
- function makeTarget (readdirEr, files) {
- if (readdirEr) return cleanup()
- if (!files.length) return cleanup()
- mkdirp(path.join(pkg.path, 'node_modules'), function (mkdirEr) { moveModules(mkdirEr, files) })
+ function actuallyMoveStaging () {
+ return move(extractedTo, pkg.realpath, moveOpts)
}
- function moveModules (mkdirEr, files) {
- if (mkdirEr) return next(mkdirEr)
- asyncMap(files, function (file, done) {
- var from = path.join(delpath, 'node_modules', file)
- var to = path.join(pkg.path, 'node_modules', file)
- move(from, to, done)
- }, cleanup)
+ function moveOldDestinationAway () {
+ return rimraf(delpath).then(() => {
+ return move(pkg.realpath, delpath, moveOpts)
+ }).then(() => { movedDestAway = true })
}
- function cleanup (moveEr) {
- if (moveEr) return next(moveEr)
- rimraf(delpath, afterCleanup)
+ function moveOldDestinationBack () {
+ return move(delpath, pkg.realpath, moveOpts).then(() => { movedDestAway = false })
}
- function afterCleanup (rimrafEr) {
- if (rimrafEr) log.warn('finalize', rimrafEr)
- next()
+ function restoreOldNodeModules () {
+ if (!movedDestAway) return
+ return readdir(path.join(delpath, 'node_modules')).catch(() => []).then((modules) => {
+ if (!modules.length) return
+ return mkdirp(path.join(pkg.realpath, 'node_modules')).then(() => Bluebird.map(modules, (file) => {
+ const from = path.join(delpath, 'node_modules', file)
+ const to = path.join(pkg.realpath, 'node_modules', file)
+ return move(from, to, moveOpts)
+ }))
+ })
}
}
module.exports.rollback = function (top, staging, pkg, next) {
- gentlyRm(pkg.path, false, top, next)
+ const requested = pkg.package._requested || getRequested(pkg)
+ if (requested.type === 'directory') return next()
+ gentlyRm(pkg.realpath, false, top, next)
}
diff --git a/deps/npm/lib/install/action/global-install.js b/deps/npm/lib/install/action/global-install.js
index e4fd8d11d1..bdc121b693 100644
--- a/deps/npm/lib/install/action/global-install.js
+++ b/deps/npm/lib/install/action/global-install.js
@@ -8,7 +8,7 @@ module.exports = function (staging, pkg, log, next) {
log.silly('global-install', packageId(pkg))
var globalRoot = path.resolve(npm.globalDir, '..')
npm.config.set('global', true)
- var install = new Installer(globalRoot, false, [pkg.package.name + '@' + pkg.package._requested.spec])
+ var install = new Installer(globalRoot, false, [pkg.package.name + '@' + pkg.package._requested.fetchSpec])
install.link = false
install.run(function () {
npm.config.set('global', false)
diff --git a/deps/npm/lib/install/action/refresh-package-json.js b/deps/npm/lib/install/action/refresh-package-json.js
new file mode 100644
index 0000000000..337be0caf2
--- /dev/null
+++ b/deps/npm/lib/install/action/refresh-package-json.js
@@ -0,0 +1,38 @@
+'use strict'
+const path = require('path')
+const Bluebird = require('bluebird')
+const readJson = Bluebird.promisify(require('read-package-json'))
+const updatePackageJson = Bluebird.promisify(require('../update-package-json'))
+const getRequested = require('../get-requested.js')
+
+module.exports = function (staging, pkg, log) {
+ log.silly('refresh-package-json', pkg.realpath)
+
+ return readJson(path.join(pkg.path, 'package.json'), false).then((metadata) => {
+ Object.keys(pkg.package).forEach(function (key) {
+ if (key !== '_injectedFromShrinkwrap' && !isEmpty(pkg.package[key])) {
+ metadata[key] = pkg.package[key]
+ if (key === '_resolved' && metadata[key] == null && pkg.package._injectedFromShrinkwrap) {
+ metadata[key] = pkg.package._injectedFromShrinkwrap.resolved
+ }
+ }
+ })
+ // These two sneak in and it's awful
+ delete metadata.readme
+ delete metadata.readmeFilename
+
+ pkg.package = metadata
+ }).catch(() => 'ignore').then(() => {
+ const requested = pkg.package._requested || getRequested(pkg)
+ if (requested.type !== 'directory') {
+ return updatePackageJson(pkg, pkg.path)
+ }
+ })
+}
+
+function isEmpty (value) {
+ if (value == null) return true
+ if (Array.isArray(value)) return !value.length
+ if (typeof value === 'object') return !Object.keys(value).length
+ return false
+}
diff --git a/deps/npm/lib/install/action/update-linked.js b/deps/npm/lib/install/action/update-linked.js
deleted file mode 100644
index 0babe10fdf..0000000000
--- a/deps/npm/lib/install/action/update-linked.js
+++ /dev/null
@@ -1,16 +0,0 @@
-'use strict'
-var path = require('path')
-
-function getTop (pkg) {
- if (pkg.target && pkg.target.parent) return getTop(pkg.target.parent)
- if (pkg.parent) return getTop(pkg.parent)
- return pkg.path
-}
-
-module.exports = function (staging, pkg, log, next) {
- if (pkg.package.version !== pkg.oldPkg.package.version) {
- log.warn('update-linked', path.relative(getTop(pkg), pkg.path), 'needs updating to', pkg.package.version,
- 'from', pkg.oldPkg.package.version, "but we can't, as it's a symlink")
- }
- next()
-}
diff --git a/deps/npm/lib/install/actions.js b/deps/npm/lib/install/actions.js
index cb41217c02..912985e2c7 100644
--- a/deps/npm/lib/install/actions.js
+++ b/deps/npm/lib/install/actions.js
@@ -1,18 +1,16 @@
'use strict'
-var validate = require('aproba')
-var chain = require('slide').chain
-var asyncMap = require('slide').asyncMap
-var limit = require('call-limit')
-var iferr = require('iferr')
-var npm = require('../npm.js')
-var andFinishTracker = require('./and-finish-tracker.js')
-var andAddParentToErrors = require('./and-add-parent-to-errors.js')
-var failedDependency = require('./deps.js').failedDependency
-var moduleName = require('../utils/module-name.js')
-var reportOptionalFailure = require('./report-optional-failure.js')
-var isInstallable = require('./validate-args.js').isInstallable
-
-var actions = {}
+
+const BB = require('bluebird')
+
+const andAddParentToErrors = require('./and-add-parent-to-errors.js')
+const failedDependency = require('./deps.js').failedDependency
+const isInstallable = BB.promisify(require('./validate-args.js').isInstallable)
+const moduleName = require('../utils/module-name.js')
+const npm = require('../npm.js')
+const reportOptionalFailure = require('./report-optional-failure.js')
+const validate = require('aproba')
+
+const actions = {}
actions.fetch = require('./action/fetch.js')
actions.extract = require('./action/extract.js')
@@ -24,20 +22,19 @@ actions.prepare = require('./action/prepare.js')
actions.finalize = require('./action/finalize.js')
actions.remove = require('./action/remove.js')
actions.move = require('./action/move.js')
-actions['update-linked'] = require('./action/update-linked.js')
actions['global-install'] = require('./action/global-install.js')
actions['global-link'] = require('./action/global-link.js')
+actions['refresh-package-json'] = require('./action/refresh-package-json.js')
// FIXME: We wrap actions like three ways to sunday here.
// Rewrite this to only work one way.
Object.keys(actions).forEach(function (actionName) {
var action = actions[actionName]
- actions[actionName] = limit(function (staging, pkg, log, next) {
- // top, buildpath, pkg, log
- validate('SOOF', arguments)
+ actions[actionName] = (staging, pkg, log) => {
+ validate('SOO', [staging, pkg, log])
// refuse to run actions for failed packages
- if (pkg.failed) return next()
+ if (pkg.failed) return BB.resolve()
if (action.rollback) {
if (!pkg.rollback) pkg.rollback = []
pkg.rollback.unshift(action.rollback)
@@ -46,98 +43,137 @@ Object.keys(actions).forEach(function (actionName) {
if (!pkg.commit) pkg.commit = []
pkg.commit.push(action.commit)
}
+
+ let actionP
if (pkg.knownInstallable) {
- return thenRunAction()
+ actionP = runAction(action, staging, pkg, log)
} else {
- return isInstallable(pkg.package, iferr(andDone(next), andMarkInstallable(thenRunAction)))
- }
- function andMarkInstallable (cb) {
- return function () {
+ actionP = isInstallable(pkg.package).then(() => {
pkg.knownInstallable = true
- cb()
- }
+ return runAction(action, staging, pkg, log)
+ })
}
- function thenRunAction () {
- action(staging, pkg, log, andDone(next))
- }
- function andDone (cb) {
- return andFinishTracker(log, andAddParentToErrors(pkg.parent, andHandleOptionalDepErrors(pkg, cb)))
- }
- }, npm.limit.action)
+
+ return actionP.then(() => {
+ log.finish()
+ }, (err) => {
+ return BB.fromNode((cb) => {
+ andAddParentToErrors(pkg.parent, cb)(err)
+ }).catch((err) => {
+ return handleOptionalDepErrors(pkg, err)
+ })
+ })
+ }
})
+exports.actions = actions
+
+function runAction (action, staging, pkg, log) {
+ return BB.fromNode((cb) => {
+ const result = action(staging, pkg, log, cb)
+ if (result && result.then) {
+ result.then(() => cb(), cb)
+ }
+ })
+}
function markAsFailed (pkg) {
pkg.failed = true
- pkg.requires.forEach(function (req) {
- req.requiredBy = req.requiredBy.filter(function (reqReqBy) { return reqReqBy !== pkg })
- if (req.requiredBy.length === 0 && !req.userRequired && !req.existing) {
+ pkg.requires.forEach((req) => {
+ req.requiredBy = req.requiredBy.filter((reqReqBy) => {
+ return reqReqBy !== pkg
+ })
+ if (req.requiredBy.length === 0 && !req.userRequired) {
markAsFailed(req)
}
})
}
-function andHandleOptionalDepErrors (pkg, next) {
- return function (er) {
- if (!er) return next.apply(null, arguments)
- markAsFailed(pkg)
- var anyFatal = pkg.userRequired || pkg.isTop
- for (var ii = 0; ii < pkg.requiredBy.length; ++ii) {
- var parent = pkg.requiredBy[ii]
- var isFatal = failedDependency(parent, pkg)
- if (isFatal) anyFatal = true
- }
- if (anyFatal) return next.apply(null, arguments)
- reportOptionalFailure(pkg, null, er)
- next()
+function handleOptionalDepErrors (pkg, err) {
+ markAsFailed(pkg)
+ var anyFatal = pkg.userRequired || pkg.isTop
+ for (var ii = 0; ii < pkg.requiredBy.length; ++ii) {
+ var parent = pkg.requiredBy[ii]
+ var isFatal = failedDependency(parent, pkg)
+ if (isFatal) anyFatal = true
}
-}
-
-function prepareAction (staging, log) {
- validate('SO', arguments)
- return function (action) {
- validate('SO', action)
- var cmd = action[0]
- var pkg = action[1]
- if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
- return [actions[cmd], staging, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
+ if (anyFatal) {
+ throw err
+ } else {
+ reportOptionalFailure(pkg, null, err)
}
}
-exports.actions = actions
-
-function execAction (todo, done) {
- validate('AF', arguments)
- var cmd = todo.shift()
- todo.push(done)
- cmd.apply(null, todo)
+exports.doOne = doOne
+function doOne (cmd, staging, pkg, log, next) {
+ validate('SSOOF', arguments)
+ execAction(prepareAction([cmd, pkg], staging, log)).then(() => next(), next)
}
-exports.doOne = function (cmd, staging, pkg, log, next) {
- validate('SSOOF', arguments)
- execAction(prepareAction(staging, log)([cmd, pkg]), next)
+exports.doParallel = doParallel
+function doParallel (type, staging, actionsToRun, log, next) {
+ validate('SSAOF', arguments)
+ const acts = actionsToRun.reduce((acc, todo) => {
+ if (todo[0] === type) {
+ acc.push(prepareAction(todo, staging, log))
+ }
+ return acc
+ }, [])
+ log.silly('doParallel', type + ' ' + actionsToRun.length)
+ time(log)
+ BB.map(acts, execAction, {
+ concurrency: npm.limit.action
+ }).nodeify((err) => {
+ log.finish()
+ timeEnd(log)
+ next(err)
+ })
}
-exports.doSerial = function (type, staging, actionsToRun, log, next) {
+exports.doSerial = doSerial
+function doSerial (type, staging, actionsToRun, log, next) {
validate('SSAOF', arguments)
- actionsToRun = actionsToRun
- .filter(function (value) { return value[0] === type })
log.silly('doSerial', '%s %d', type, actionsToRun.length)
- chain(actionsToRun.map(prepareAction(staging, log)), andFinishTracker(log, next))
+ runSerial(type, staging, actionsToRun, log, next)
}
-exports.doReverseSerial = function (type, staging, actionsToRun, log, next) {
+exports.doReverseSerial = doReverseSerial
+function doReverseSerial (type, staging, actionsToRun, log, next) {
validate('SSAOF', arguments)
- actionsToRun = actionsToRun
- .filter(function (value) { return value[0] === type })
- .reverse()
log.silly('doReverseSerial', '%s %d', type, actionsToRun.length)
- chain(actionsToRun.map(prepareAction(staging, log)), andFinishTracker(log, next))
+ runSerial(type, staging, actionsToRun.reverse(), log, next)
}
-exports.doParallel = function (type, staging, actionsToRun, log, next) {
- validate('SSAOF', arguments)
- actionsToRun = actionsToRun.filter(function (value) { return value[0] === type })
- log.silly('doParallel', type + ' ' + actionsToRun.length)
+function runSerial (type, staging, actionsToRun, log, next) {
+ const acts = actionsToRun.reduce((acc, todo) => {
+ if (todo[0] === type) {
+ acc.push(prepareAction(todo, staging, log))
+ }
+ return acc
+ }, [])
+ time(log)
+ BB.each(acts, execAction).nodeify((err) => {
+ log.finish()
+ timeEnd(log)
+ next(err)
+ })
+}
+
+function time (log) {
+ process.emit('time', 'action:' + log.name)
+}
+function timeEnd (log) {
+ process.emit('timeEnd', 'action:' + log.name)
+}
+
+function prepareAction (action, staging, log) {
+ validate('ASO', arguments)
+ validate('SO', action)
+ var cmd = action[0]
+ var pkg = action[1]
+ if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
+ return [actions[cmd], staging, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
+}
- asyncMap(actionsToRun.map(prepareAction(staging, log)), execAction, andFinishTracker(log, next))
+function execAction (todo) {
+ return todo[0].apply(null, todo.slice(1))
}
diff --git a/deps/npm/lib/install/copy-tree.js b/deps/npm/lib/install/copy-tree.js
index 67a9c687a2..a5b558cf59 100644
--- a/deps/npm/lib/install/copy-tree.js
+++ b/deps/npm/lib/install/copy-tree.js
@@ -1,25 +1,31 @@
'use strict'
-
-module.exports = function (tree) {
- return copyTree(tree, {})
+var createNode = require('./node.js').create
+module.exports = function (tree, filter) {
+ return copyTree(tree, {}, filter)
}
-function copyTree (tree, cache) {
- if (cache[tree.path]) return cache[tree.path]
- var newTree = cache[tree.path] = Object.create(tree)
- copyModuleList(newTree, 'children', cache)
+function copyTree (tree, cache, filter) {
+ if (filter && !filter(tree)) { return null }
+ if (cache[tree.path]) { return cache[tree.path] }
+ var newTree = cache[tree.path] = createNode(Object.assign({}, tree))
+ copyModuleList(newTree, 'children', cache, filter)
newTree.children.forEach(function (child) {
child.parent = newTree
})
- copyModuleList(newTree, 'requires', cache)
- copyModuleList(newTree, 'requiredBy', cache)
+ copyModuleList(newTree, 'requires', cache, filter)
+ copyModuleList(newTree, 'requiredBy', cache, filter)
return newTree
}
-function copyModuleList (tree, key, cache) {
+function copyModuleList (tree, key, cache, filter) {
var newList = []
- tree[key].forEach(function (child) {
- newList.push(copyTree(child, cache))
- })
+ if (tree[key]) {
+ tree[key].forEach(function (child) {
+ const copy = copyTree(child, cache, filter)
+ if (copy) {
+ newList.push(copy)
+ }
+ })
+ }
tree[key] = newList
}
diff --git a/deps/npm/lib/install/decompose-actions.js b/deps/npm/lib/install/decompose-actions.js
index 70db70d035..1d954f5cab 100644
--- a/deps/npm/lib/install/decompose-actions.js
+++ b/deps/npm/lib/install/decompose-actions.js
@@ -19,7 +19,6 @@ module.exports = function (differences, decomposed, next) {
moveSteps(decomposed, pkg, done)
break
case 'remove':
- case 'update-linked':
default:
defaultSteps(decomposed, cmd, pkg, done)
}
@@ -27,10 +26,9 @@ module.exports = function (differences, decomposed, next) {
}
function addSteps (decomposed, pkg, done) {
- if (!pkg.fromBundle) {
+ if (!pkg.fromBundle && !pkg.isLink) {
decomposed.push(['fetch', pkg])
decomposed.push(['extract', pkg])
- decomposed.push(['test', pkg])
}
if (!pkg.fromBundle || npm.config.get('rebuild-bundle')) {
decomposed.push(['preinstall', pkg])
@@ -38,7 +36,10 @@ function addSteps (decomposed, pkg, done) {
decomposed.push(['install', pkg])
decomposed.push(['postinstall', pkg])
}
- decomposed.push(['finalize', pkg])
+ if (!pkg.fromBundle || !pkg.isLink) {
+ decomposed.push(['finalize', pkg])
+ }
+ decomposed.push(['refresh-package-json', pkg])
done()
}
@@ -52,7 +53,7 @@ function moveSteps (decomposed, pkg, done) {
decomposed.push(['build', pkg])
decomposed.push(['install', pkg])
decomposed.push(['postinstall', pkg])
- decomposed.push(['test', pkg])
+ decomposed.push(['refresh-package-json', pkg])
done()
}
diff --git a/deps/npm/lib/install/deps.js b/deps/npm/lib/install/deps.js
index d1feb6cd4b..3f3433535f 100644
--- a/deps/npm/lib/install/deps.js
+++ b/deps/npm/lib/install/deps.js
@@ -1,20 +1,19 @@
'use strict'
+
+const BB = require('bluebird')
+
+var fs = require('fs')
var assert = require('assert')
var path = require('path')
var semver = require('semver')
var asyncMap = require('slide').asyncMap
var chain = require('slide').chain
-var union = require('lodash.union')
var iferr = require('iferr')
var npa = require('npm-package-arg')
var validate = require('aproba')
-var realizePackageSpecifier = require('realize-package-specifier')
-var realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier')
-var asap = require('asap')
var dezalgo = require('dezalgo')
var fetchPackageMetadata = require('../fetch-package-metadata.js')
var andAddParentToErrors = require('./and-add-parent-to-errors.js')
-var addShrinkwrap = require('../fetch-package-metadata.js').addShrinkwrap
var addBundled = require('../fetch-package-metadata.js').addBundled
var readShrinkwrap = require('./read-shrinkwrap.js')
var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
@@ -24,35 +23,17 @@ var npm = require('../npm.js')
var flatNameFromTree = require('./flatten-tree.js').flatNameFromTree
var createChild = require('./node.js').create
var resetMetadata = require('./node.js').reset
-var andIgnoreErrors = require('./and-ignore-errors.js')
var isInstallable = require('./validate-args.js').isInstallable
var packageId = require('../utils/package-id.js')
var moduleName = require('../utils/module-name.js')
var isDevDep = require('./is-dev-dep.js')
var isProdDep = require('./is-prod-dep.js')
var reportOptionalFailure = require('./report-optional-failure.js')
+var getSaveType = require('./save.js').getSaveType
// The export functions in this module mutate a dependency tree, adding
// items to them.
-function isDep (tree, child, cb) {
- var name = moduleName(child)
- var prodVer = isProdDep(tree, name)
- var devVer = isDevDep(tree, name)
-
- childDependencySpecifier(tree, name, prodVer, function (er, prodSpec) {
- if (er) return cb(child.fromShrinkwrap)
- var matches
- if (prodSpec) matches = doesChildVersionMatch(child, prodSpec, tree)
- if (matches) return cb(true, prodSpec)
- if (devVer === prodVer) return cb(child.fromShrinkwrap)
- childDependencySpecifier(tree, name, devVer, function (er, devSpec) {
- if (er) return cb(child.fromShrinkwrap)
- cb(doesChildVersionMatch(child, devSpec, tree) || child.fromShrinkwrap, null, devSpec)
- })
- })
-}
-
var registryTypes = { range: true, version: true }
function doesChildVersionMatch (child, requested, requestor) {
@@ -61,130 +42,121 @@ function doesChildVersionMatch (child, requested, requestor) {
if (child.parent === requestor && child.fromShrinkwrap) return true
// ranges of * ALWAYS count as a match, because when downloading we allow
// prereleases to match * if there are ONLY prereleases
- if (requested.spec === '*') return true
+ if (requested.type === 'range' && requested.fetchSpec === '*') return true
- var childReq = child.package._requested
- if (!childReq) childReq = npa(moduleName(child) + '@' + child.package._from)
- if (childReq) {
- if (childReq.rawSpec === requested.rawSpec) return true
- if (childReq.type === requested.type && childReq.spec === requested.spec) return true
+ if (requested.type === 'directory') {
+ if (!child.isLink) return false
+ return path.relative(child.realpath, requested.fetchSpec) === ''
}
- // If _requested didn't exist OR if it didn't match then we'll try using
- // _from. We pass it through npa to normalize the specifier.
- // This can happen when installing from an `npm-shrinkwrap.json` where `_requested` will
- // be the tarball URL from `resolved` and thus can't match what's in the `package.json`.
- // In those cases _from, will be preserved and we can compare that to ensure that they
- // really came from the same sources.
- // You'll see this scenario happen with at least tags and git dependencies.
+
if (!registryTypes[requested.type]) {
+ var childReq = child.package._requested
+ if (!childReq && child.package._from) {
+ childReq = npa.resolve(moduleName(child), child.package._from.replace(new RegExp('^' + moduleName(child) + '@'), ''))
+ }
+ if (childReq) {
+ if (childReq.rawSpec === requested.rawSpec) return true
+ if (childReq.type === requested.type && childReq.saveSpec === requested.saveSpec) return true
+ if (childReq.type === requested.type && childReq.spec === requested.saveSpec) return true
+ }
+ // If _requested didn't exist OR if it didn't match then we'll try using
+ // _from. We pass it through npa to normalize the specifier.
+ // This can happen when installing from an `npm-shrinkwrap.json` where `_requested` will
+ // be the tarball URL from `resolved` and thus can't match what's in the `package.json`.
+ // In those cases _from, will be preserved and we can compare that to ensure that they
+ // really came from the same sources.
+ // You'll see this scenario happen with at least tags and git dependencies.
if (child.package._from) {
var fromReq = npa(child.package._from)
if (fromReq.rawSpec === requested.rawSpec) return true
- if (fromReq.type === requested.type && fromReq.spec === requested.spec) return true
+ if (fromReq.type === requested.type && fromReq.saveSpec && fromReq.saveSpec === requested.saveSpec) return true
}
return false
}
- return semver.satisfies(child.package.version, requested.spec)
-}
-
-// TODO: Rename to maybe computeMetadata or computeRelationships
-exports.recalculateMetadata = function (tree, log, next) {
- recalculateMetadata(tree, log, {}, next)
+ try {
+ return semver.satisfies(child.package.version, requested.fetchSpec)
+ } catch (e) {
+ return false
+ }
}
-exports._childDependencySpecifier = childDependencySpecifier
-function childDependencySpecifier (tree, name, spec, cb) {
- if (!tree.resolved) tree.resolved = {}
- if (!tree.resolved[name]) tree.resolved[name] = {}
- if (tree.resolved[name][spec]) {
- return asap(function () {
- cb(null, tree.resolved[name][spec])
- })
- }
- realizePackageSpecifier(name + '@' + spec, packageRelativePath(tree), function (er, req) {
- if (er) return cb(er)
- tree.resolved[name][spec] = req
- cb(null, req)
- })
+function childDependencySpecifier (tree, name, spec) {
+ return npa.resolve(name, spec, packageRelativePath(tree))
}
-function recalculateMetadata (tree, log, seen, next) {
- validate('OOOF', arguments)
- if (seen[tree.path]) return next()
+exports.computeMetadata = computeMetadata
+function computeMetadata (tree, seen) {
+ if (!seen) seen = {}
+ if (!tree || seen[tree.path]) return
seen[tree.path] = true
if (tree.parent == null) {
resetMetadata(tree)
tree.isTop = true
}
+ tree.location = flatNameFromTree(tree)
- function markDeps (toMark, done) {
- var name = toMark.name
- var spec = toMark.spec
- var kind = toMark.kind
- childDependencySpecifier(tree, name, spec, function (er, req) {
- if (er || !req.name) return done()
- var child = findRequirement(tree, req.name, req)
- if (child) {
- resolveWithExistingModule(child, tree, log, andIgnoreErrors(done))
- } else if (kind === 'dep') {
- tree.missingDeps[req.name] = req.rawSpec
- done()
- } else if (kind === 'dev') {
- tree.missingDevDeps[req.name] = req.rawSpec
- done()
- } else {
- done()
- }
- })
+ function findChild (name, spec, kind) {
+ try {
+ var req = childDependencySpecifier(tree, name, spec)
+ } catch (err) {
+ return
+ }
+ var child = findRequirement(tree, req.name, req)
+ if (child) {
+ resolveWithExistingModule(child, tree)
+ return true
+ }
+ return
}
- function makeMarkable (deps, kind) {
- if (!deps) return []
- return Object.keys(deps).map(function (depname) { return { name: depname, spec: deps[depname], kind: kind } })
+ const deps = tree.package.dependencies || {}
+ for (let name of Object.keys(deps)) {
+ if (findChild(name, deps[name])) continue
+ tree.missingDeps[name] = deps[name]
+ }
+ if (tree.isTop) {
+ const devDeps = tree.package.devDependencies || {}
+ for (let name of Object.keys(devDeps)) {
+ if (findChild(name, devDeps[name])) continue
+ tree.missingDevDeps[name] = devDeps[name]
+ }
}
- // Ensure dependencies and dev dependencies are marked as required
- var tomark = makeMarkable(tree.package.dependencies, 'dep')
- if (tree.isTop) tomark = union(tomark, makeMarkable(tree.package.devDependencies, 'dev'))
+ tree.children.filter((child) => !child.removed && !child.failed).forEach((child) => computeMetadata(child, seen))
- // Ensure any children ONLY from a shrinkwrap are also included
- var childrenOnlyInShrinkwrap = tree.children.filter(function (child) {
- return child.fromShrinkwrap &&
- !tree.package.dependencies[child.package.name] &&
- !tree.package.devDependencies[child.package.name]
- })
- var tomarkOnlyInShrinkwrap = childrenOnlyInShrinkwrap.map(function (child) {
- var name = child.package.name
- var matched = child.package._spec.match(/^@?[^@]+@(.*)$/)
- var spec = matched ? matched[1] : child.package._spec
- var kind = tree.package.dependencies[name] ? 'dep'
- : tree.package.devDependencies[name] ? 'dev'
- : 'dep'
- return { name: name, spec: spec, kind: kind }
- })
- tomark = union(tomark, tomarkOnlyInShrinkwrap)
+ return tree
+}
- // Don't bother trying to recalc children of failed deps
- tree.children = tree.children.filter(function (child) { return !child.failed })
+function isDep (tree, child) {
+ var name = moduleName(child)
+ var prodVer = isProdDep(tree, name)
+ var devVer = isDevDep(tree, name)
- chain([
- [asyncMap, tomark, markDeps],
- [asyncMap, tree.children, function (child, done) { recalculateMetadata(child, log, seen, done) }]
- ], function () {
- tree.location = flatNameFromTree(tree)
- next(null, tree)
- })
+ try {
+ var prodSpec = childDependencySpecifier(tree, name, prodVer)
+ } catch (err) {
+ return {isDep: true, isProdDep: false, isDevDep: false}
+ }
+ var matches
+ if (prodSpec) matches = doesChildVersionMatch(child, prodSpec, tree)
+ if (matches) return {isDep: true, isProdDep: prodSpec, isDevDep: false}
+ if (devVer === prodVer) return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
+ try {
+ var devSpec = childDependencySpecifier(tree, name, devVer)
+ return {isDep: doesChildVersionMatch(child, devSpec, tree) || child.fromShrinkwrap, isProdDep: false, isDevDep: devSpec}
+ } catch (err) {
+ return {isDep: child.fromShrinkwrap, isProdDep: false, isDevDep: false}
+ }
}
-function addRequiredDep (tree, child, cb) {
- isDep(tree, child, function (childIsDep, childIsProdDep, childIsDevDep) {
- if (!childIsDep) return cb(false)
- replaceModuleByPath(child, 'requiredBy', tree)
- replaceModuleByName(tree, 'requires', child)
- if (childIsProdDep && tree.missingDeps) delete tree.missingDeps[moduleName(child)]
- if (childIsDevDep && tree.missingDevDeps) delete tree.missingDevDeps[moduleName(child)]
- cb(true)
- })
+function addRequiredDep (tree, child) {
+ var dep = isDep(tree, child)
+ if (!dep.isDep) return false
+ replaceModuleByPath(child, 'requiredBy', tree)
+ replaceModuleByName(tree, 'requires', child)
+ if (dep.isProdDep && tree.missingDeps) delete tree.missingDeps[moduleName(child)]
+ if (dep.isDevDep && tree.missingDevDeps) delete tree.missingDevDeps[moduleName(child)]
+ return true
}
exports.removeObsoleteDep = removeObsoleteDep
@@ -207,45 +179,38 @@ function removeObsoleteDep (child, log) {
})
}
-function matchingDep (tree, name) {
- if (tree.package.dependencies && tree.package.dependencies[name]) return tree.package.dependencies[name]
- if (tree.package.devDependencies && tree.package.devDependencies[name]) return tree.package.devDependencies[name]
- return
-}
-
function packageRelativePath (tree) {
if (!tree) return ''
var requested = tree.package._requested || {}
- var isLocal = requested.type === 'directory' || requested.type === 'local'
- return isLocal ? requested.spec : tree.path
+ var isLocal = requested.type === 'directory' || requested.type === 'file'
+ return isLocal ? requested.fetchSpec : tree.path
}
-function getShrinkwrap (tree, name) {
- return tree.package._shrinkwrap && tree.package._shrinkwrap.dependencies && tree.package._shrinkwrap.dependencies[name]
+function matchingDep (tree, name) {
+ if (!tree || !tree.package) return
+ if (tree.package.dependencies && tree.package.dependencies[name]) return tree.package.dependencies[name]
+ if (tree.package.devDependencies && tree.package.devDependencies[name]) return tree.package.devDependencies[name]
+ return
}
exports.getAllMetadata = function (args, tree, where, next) {
asyncMap(args, function (arg, done) {
- function fetchMetadataWithVersion () {
- var version = matchingDep(tree, arg)
- var spec = version == null ? arg : arg + '@' + version
- return fetchPackageMetadata(spec, where, done)
- }
- if (tree && arg.lastIndexOf('@') <= 0) {
- var sw = getShrinkwrap(tree, arg)
- if (sw) {
- return realizeShrinkwrapSpecifier(arg, sw, where, function (err, spec) {
- if (err) {
- return fetchMetadataWithVersion()
+ var spec = npa(arg)
+ if (spec.type !== 'file' && spec.type !== 'directory' && (spec.name == null || spec.rawSpec === '')) {
+ return fs.stat(path.join(arg, 'package.json'), (err) => {
+ if (err) {
+ var version = matchingDep(tree, spec.name)
+ if (version) {
+ return fetchPackageMetadata(npa.resolve(spec.name, version), where, done)
} else {
return fetchPackageMetadata(spec, where, done)
}
- })
- } else {
- return fetchMetadataWithVersion()
- }
+ } else {
+ return fetchPackageMetadata(npa('file:' + arg), where, done)
+ }
+ })
} else {
- return fetchPackageMetadata(arg, where, done)
+ return fetchPackageMetadata(spec, where, done)
}
}, next)
}
@@ -261,13 +226,12 @@ exports.loadRequestedDeps = function (args, tree, saveToDependencies, log, next)
child.isGlobal = true
}
var childName = moduleName(child)
+ child.saveSpec = computeVersionSpec(tree, child)
if (saveToDependencies) {
- tree.package[saveToDependencies][childName] =
- child.package._requested.rawSpec || child.package._requested.spec
+ tree.package[getSaveType(tree, child)][childName] = child.saveSpec
}
- if (saveToDependencies && saveToDependencies !== 'devDependencies') {
- tree.package.dependencies[childName] =
- child.package._requested.rawSpec || child.package._requested.spec
+ if (getSaveType(tree, child) === 'optionalDependencies') {
+ tree.package.dependencies[childName] = child.saveSpec
}
child.userRequired = true
child.save = saveToDependencies
@@ -275,14 +239,32 @@ exports.loadRequestedDeps = function (args, tree, saveToDependencies, log, next)
// For things the user asked to install, that aren't a dependency (or
// won't be when we're done), flag it as "depending" on the user
// themselves, so we don't remove it as a dep that no longer exists
- addRequiredDep(tree, child, function (childIsDep) {
- if (!childIsDep) child.userRequired = true
- depLoaded(null, child, tracker)
- })
+ var childIsDep = addRequiredDep(tree, child)
+ if (!childIsDep) child.userRequired = true
+ depLoaded(null, child, tracker)
}))
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
+function computeVersionSpec (tree, child) {
+ validate('OO', arguments)
+ var requested = child.package._requested
+ if (requested.registry) {
+ var version = child.package.version
+ var rangeDescriptor = ''
+ if (semver.valid(version, true) &&
+ semver.gte(version, '0.1.0', true) &&
+ !npm.config.get('save-exact')) {
+ rangeDescriptor = npm.config.get('save-prefix')
+ }
+ return rangeDescriptor + version
+ } else if (requested.type === 'directory' || requested.type === 'file') {
+ return 'file:' + path.relative(tree.path, requested.fetchSpec)
+ } else {
+ return requested.saveSpec
+ }
+}
+
function moduleNameMatches (name) {
return function (child) { return moduleName(child) === name }
}
@@ -299,11 +281,18 @@ exports.removeDeps = function (args, tree, saveToDependencies, log, next) {
var pkgName = moduleName(pkg)
var toRemove = tree.children.filter(moduleNameMatches(pkgName))
var pkgToRemove = toRemove[0] || createChild({package: {name: pkgName}})
- if (saveToDependencies) {
- replaceModuleByPath(tree, 'removed', pkgToRemove)
- pkgToRemove.save = saveToDependencies
+ if (tree.isTop) {
+ if (saveToDependencies) {
+ pkgToRemove.save = getSaveType(tree, pkg)
+ delete tree.package[pkgToRemove.save][pkgName]
+ if (pkgToRemove.save === 'optionalDependencies') {
+ delete tree.package.dependencies[pkgName]
+ }
+ replaceModuleByPath(tree, 'removed', pkgToRemove)
+ }
+ pkgToRemove.requiredBy = pkgToRemove.requiredBy.filter((parent) => parent !== tree)
}
- removeObsoleteDep(pkgToRemove)
+ if (pkgToRemove.requiredBy.length === 0) removeObsoleteDep(pkgToRemove)
})
log.finish()
next()
@@ -387,6 +376,43 @@ function andHandleOptionalErrors (log, tree, name, done) {
}
}
+exports.prefetchDeps = prefetchDeps
+function prefetchDeps (tree, deps, log, next) {
+ validate('OOOF', arguments)
+ var skipOptional = !npm.config.get('optional')
+ var seen = {}
+ const finished = andFinishTracker(log, next)
+ const fpm = BB.promisify(fetchPackageMetadata)
+ resolveBranchDeps(tree.package, deps).then(
+ () => finished(), finished
+ )
+
+ function resolveBranchDeps (pkg, deps) {
+ return BB.resolve(null).then(() => {
+ var allDependencies = Object.keys(deps).map((dep) => {
+ return npa.resolve(dep, deps[dep])
+ }).filter((dep) => {
+ return dep.registry &&
+ !seen[dep.toString()] &&
+ !findRequirement(tree, dep.name, dep)
+ })
+ if (skipOptional) {
+ var optDeps = pkg.optionalDependencies || {}
+ allDependencies = allDependencies.filter((dep) => !optDeps[dep.name])
+ }
+ return BB.map(allDependencies, (dep) => {
+ seen[dep.toString()] = true
+ return fpm(dep, '', {tracker: log.newItem('fetchMetadata')}).then(
+ (pkg) => {
+ return pkg && pkg.dependencies && resolveBranchDeps(pkg, pkg.dependencies)
+ },
+ () => null
+ )
+ })
+ })
+ }
+}
+
// Load any missing dependencies in the given tree
exports.loadDeps = loadDeps
function loadDeps (tree, log, next) {
@@ -427,15 +453,19 @@ exports.loadDevDeps = function (tree, log, next) {
var loadExtraneous = exports.loadExtraneous = function (tree, log, next) {
var seen = {}
- function loadExtraneous (tree, log, next) {
- validate('OOF', arguments)
- if (seen[tree.path]) return next()
+
+ function loadExtraneous (tree) {
+ if (seen[tree.path]) return
seen[tree.path] = true
- asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
- resolveWithExistingModule(child, tree, log, done)
- }, andForEachChild(loadExtraneous, andFinishTracker(log, next)))
+ for (var child of tree.children) {
+ if (child.loaded) continue
+ resolveWithExistingModule(child, tree)
+ loadExtraneous(child)
+ }
}
- loadExtraneous(tree, log, next)
+ loadExtraneous(tree)
+ log.finish()
+ next()
}
exports.loadExtraneous.andResolveDeps = function (tree, log, next) {
@@ -444,37 +474,38 @@ exports.loadExtraneous.andResolveDeps = function (tree, log, next) {
// resolving the dependencies of extraneous deps.
if (tree.loaded) return loadExtraneous(tree, log, next)
asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
- resolveWithExistingModule(child, tree, log, done)
+ resolveWithExistingModule(child, tree)
+ done(null, child, log)
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
function addDependency (name, versionSpec, tree, log, done) {
validate('SSOOF', arguments)
var next = andAddParentToErrors(tree, done)
- childDependencySpecifier(tree, name, versionSpec, iferr(done, function (req) {
- var child = findRequirement(tree, name, req)
- if (child) {
- resolveWithExistingModule(child, tree, log, iferr(next, function (child, log) {
- if (child.package._shrinkwrap === undefined) {
- readShrinkwrap.andInflate(child, function (er) { next(er, child, log) })
- } else {
- next(null, child, log)
- }
- }))
+ try {
+ var req = childDependencySpecifier(tree, name, versionSpec)
+ } catch (err) {
+ return done(err)
+ }
+ var child = findRequirement(tree, name, req)
+ if (child) {
+ resolveWithExistingModule(child, tree)
+ if (child.package._shrinkwrap === undefined) {
+ readShrinkwrap.andInflate(child, function (er) { next(er, child, log) })
} else {
- fetchPackageMetadata(req, packageRelativePath(tree), {tracker: log.newItem('fetchMetadata')}, iferr(next, function (pkg) {
- resolveWithNewModule(pkg, tree, log, next)
- }))
+ next(null, child, log)
}
- }))
+ } else {
+ fetchPackageMetadata(req, packageRelativePath(tree), {tracker: log.newItem('fetchMetadata')}, iferr(next, function (pkg) {
+ resolveWithNewModule(pkg, tree, log, next)
+ }))
+ }
}
-function resolveWithExistingModule (child, tree, log, next) {
- validate('OOOF', arguments)
- addRequiredDep(tree, child, function () {
- if (tree.parent && child.parent !== tree) updatePhantomChildren(tree.parent, child)
- next(null, child, log)
- })
+function resolveWithExistingModule (child, tree) {
+ validate('OO', arguments)
+ addRequiredDep(tree, child)
+ if (tree.parent && child.parent !== tree) updatePhantomChildren(tree.parent, child)
}
var updatePhantomChildren = exports.updatePhantomChildren = function (current, child) {
@@ -521,44 +552,39 @@ function resolveWithNewModule (pkg, tree, log, next) {
log.silly('resolveWithNewModule', packageId(pkg), 'checking installable status')
return isInstallable(pkg, iferr(next, function () {
- if (!pkg._from) {
- pkg._from = pkg._requested.name + '@' + pkg._requested.spec
- }
- addShrinkwrap(pkg, iferr(next, function () {
- addBundled(pkg, iferr(next, function () {
- var parent = earliestInstallable(tree, tree, pkg) || tree
- var child = createChild({
- package: pkg,
- parent: parent,
- path: path.join(parent.path, 'node_modules', pkg.name),
- realpath: path.resolve(parent.realpath, 'node_modules', pkg.name),
- children: pkg._bundled || [],
- isLink: tree.isLink,
- knownInstallable: true
- })
- delete pkg._bundled
- var hasBundled = child.children.length
-
- var replaced = replaceModuleByName(parent, 'children', child)
- if (replaced) removeObsoleteDep(replaced)
- addRequiredDep(tree, child, function () {
- child.location = flatNameFromTree(child)
+ addBundled(pkg, iferr(next, function () {
+ var parent = earliestInstallable(tree, tree, pkg) || tree
+ var isLink = pkg._requested.type === 'directory'
+ var child = createChild({
+ package: pkg,
+ parent: parent,
+ path: path.join(parent.isLink ? parent.realpath : parent.path, 'node_modules', pkg.name),
+ realpath: isLink ? pkg._requested.fetchSpec : path.join(parent.realpath, 'node_modules', pkg.name),
+ children: pkg._bundled || [],
+ isLink: isLink,
+ isInLink: parent.isLink,
+ knownInstallable: true
+ })
+ delete pkg._bundled
+ var hasBundled = child.children.length
- if (tree.parent && parent !== tree) updatePhantomChildren(tree.parent, child)
+ var replaced = replaceModuleByName(parent, 'children', child)
+ if (replaced) removeObsoleteDep(replaced)
+ addRequiredDep(tree, child)
+ child.location = flatNameFromTree(child)
- if (hasBundled) {
- inflateBundled(child, child, child.children)
- }
+ if (tree.parent && parent !== tree) updatePhantomChildren(tree.parent, child)
- if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
- return inflateShrinkwrap(child, pkg._shrinkwrap.dependencies, function (er) {
- next(er, child, log)
- })
- }
+ if (hasBundled) {
+ inflateBundled(child, child, child.children)
+ }
- next(null, child, log)
+ if (pkg._shrinkwrap && pkg._shrinkwrap.dependencies) {
+ return inflateShrinkwrap(child, pkg._shrinkwrap.dependencies, function (er) {
+ next(er, child, log)
})
- }))
+ }
+ next(null, child, log)
}))
}))
}
@@ -567,7 +593,7 @@ var validatePeerDeps = exports.validatePeerDeps = function (tree, onInvalid) {
if (!tree.package.peerDependencies) return
Object.keys(tree.package.peerDependencies).forEach(function (pkgname) {
var version = tree.package.peerDependencies[pkgname]
- var match = findRequirement(tree.parent || tree, pkgname, npa(pkgname + '@' + version))
+ var match = findRequirement(tree.parent || tree, pkgname, npa.resolve(pkgname, version))
if (!match) onInvalid(tree, pkgname, version)
})
}
@@ -590,7 +616,7 @@ var findRequirement = exports.findRequirement = function (tree, name, requested,
validate('OSO', [tree, name, requested])
if (!requestor) requestor = tree
var nameMatch = function (child) {
- return moduleName(child) === name && child.parent && !child.removed
+ return moduleName(child) === name && child.parent && !child.removed && !child.failed
}
var versionMatch = function (child) {
return doesChildVersionMatch(child, requested, requestor)
@@ -618,7 +644,6 @@ var findRequirement = exports.findRequirement = function (tree, name, requested,
// If it is, then it's the level below where its installed.
var earliestInstallable = exports.earliestInstallable = function (requiredBy, tree, pkg) {
validate('OOO', arguments)
-
function undeletedModuleMatches (child) {
return !child.removed && moduleName(child) === pkg.name
}
@@ -645,7 +670,7 @@ var earliestInstallable = exports.earliestInstallable = function (requiredBy, tr
var devDeps = tree.package.devDependencies || {}
if (tree.isTop && devDeps[pkg.name]) {
- var requested = npa(pkg.name + '@' + devDeps[pkg.name])
+ var requested = npa.resolve(pkg.name, devDeps[pkg.name], tree.path)
if (!doesChildVersionMatch({package: pkg}, requested, tree)) {
return null
}
@@ -659,5 +684,7 @@ var earliestInstallable = exports.earliestInstallable = function (requiredBy, tr
if (npm.config.get('global-style') && tree.parent.isTop) return tree
if (npm.config.get('legacy-bundling')) return tree
+ if (!process.env.NODE_PRESERVE_SYMLINKS && /^[.][.][\\/]/.test(path.relative(tree.parent.realpath, tree.realpath))) return tree
+
return (earliestInstallable(requiredBy, tree.parent, pkg) || tree)
}
diff --git a/deps/npm/lib/install/diff-trees.js b/deps/npm/lib/install/diff-trees.js
index 1429c71dcb..67fe72d044 100644
--- a/deps/npm/lib/install/diff-trees.js
+++ b/deps/npm/lib/install/diff-trees.js
@@ -9,13 +9,13 @@ function nonRegistrySource (pkg) {
if (!requested) return false
if (requested.type === 'hosted') return true
- if (requested.type === 'local') return true
+ if (requested.type === 'file' || requested.type === 'directory') return true
return false
}
function pkgAreEquiv (aa, bb) {
- var aaSha = (aa.dist && aa.dist.shasum) || aa._shasum
- var bbSha = (bb.dist && bb.dist.shasum) || bb._shasum
+ var aaSha = (aa.dist && aa.dist.integrity) || aa._integrity
+ var bbSha = (bb.dist && bb.dist.integrity) || bb._integrity
if (aaSha === bbSha) return true
if (aaSha || bbSha) return false
if (nonRegistrySource(aa) || nonRegistrySource(bb)) return false
@@ -24,13 +24,13 @@ function pkgAreEquiv (aa, bb) {
}
function getUniqueId (pkg) {
- var versionspec = pkg._shasum
+ var versionspec = pkg._integrity
if (!versionspec && nonRegistrySource(pkg)) {
if (pkg._requested) {
- versionspec = pkg._requested.spec
+ versionspec = pkg._requested.fetchSpec
} else if (pkg._from) {
- versionspec = npa(pkg._from).spec
+ versionspec = npa(pkg._from).fetchSpec
}
}
if (!versionspec) {
@@ -50,15 +50,6 @@ module.exports = function (oldTree, newTree, differences, log, next) {
next()
}
-function isLink (node) {
- return node && node.isLink
-}
-
-function requiredByAllLinked (node) {
- if (!node.requiredBy.length) return false
- return node.requiredBy.filter(isLink).length === node.requiredBy.length
-}
-
function isNotTopOrExtraneous (node) {
return !node.isTop && !node.userRequired && !node.existing
}
@@ -136,16 +127,9 @@ var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
Object.keys(flatNewTree).forEach(function (path) {
var pkg = flatNewTree[path]
pkg.oldPkg = flatOldTree[path]
- pkg.isInLink = (pkg.oldPkg && isLink(pkg.oldPkg.parent)) ||
- (pkg.parent && isLink(pkg.parent)) ||
- requiredByAllLinked(pkg)
if (pkg.oldPkg) {
if (!pkg.userRequired && pkgAreEquiv(pkg.oldPkg.package, pkg.package)) return
- if (!pkg.isInLink && (isLink(pkg.oldPkg) || isLink(pkg))) {
- setAction(differences, 'update-linked', pkg)
- } else {
- setAction(differences, 'update', pkg)
- }
+ setAction(differences, 'update', pkg)
} else {
var vername = getUniqueId(pkg.package)
var removing = toRemoveByUniqueId[vername] && toRemoveByUniqueId[vername].length
@@ -155,7 +139,7 @@ var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
pkg.fromPath = toRemove[flatname].path
setAction(differences, 'move', pkg)
delete toRemove[flatname]
- } else {
+ } else if (!(pkg.isInLink && pkg.fromBundle)) {
setAction(differences, 'add', pkg)
}
}
diff --git a/deps/npm/lib/install/filter-invalid-actions.js b/deps/npm/lib/install/filter-invalid-actions.js
deleted file mode 100644
index beac30b7b0..0000000000
--- a/deps/npm/lib/install/filter-invalid-actions.js
+++ /dev/null
@@ -1,36 +0,0 @@
-'use strict'
-var path = require('path')
-var validate = require('aproba')
-var log = require('npmlog')
-var packageId = require('../utils/package-id.js')
-
-module.exports = function (top, differences, next) {
- validate('SAF', arguments)
- var action
- var keep = []
-
- differences.forEach(function (action) {
- var cmd = action[0]
- var pkg = action[1]
- if (cmd === 'remove') {
- pkg.removing = true
- }
- })
-
- /*eslint no-cond-assign:0*/
- while (action = differences.shift()) {
- var cmd = action[0]
- var pkg = action[1]
- if (pkg.isInLink || (pkg.parent && (pkg.parent.target || pkg.parent.isLink))) {
- // we want to skip warning if this is a child of another module that we're removing
- if (!pkg.parent.removing) {
- log.verbose('skippingAction', 'Module is inside a symlinked module: not running ' +
- cmd + ' ' + packageId(pkg) + ' ' + path.relative(top, pkg.path))
- }
- } else {
- keep.push(action)
- }
- }
- differences.push.apply(differences, keep)
- next()
-}
diff --git a/deps/npm/lib/install/get-requested.js b/deps/npm/lib/install/get-requested.js
new file mode 100644
index 0000000000..f6c44d1463
--- /dev/null
+++ b/deps/npm/lib/install/get-requested.js
@@ -0,0 +1,12 @@
+'use strict'
+const npa = require('npm-package-arg')
+const moduleName = require('../utils/module-name.js')
+
+module.exports = function (child) {
+ if (!child.requiredBy.length) return
+ const reqBy = child.requiredBy[0]
+ const deps = reqBy.package.dependencies || {}
+ const devDeps = reqBy.package.devDependencies || {}
+ const name = moduleName(child)
+ return npa.resolve(name, deps[name] || devDeps[name], reqBy.realpath)
+}
diff --git a/deps/npm/lib/install/inflate-bundled.js b/deps/npm/lib/install/inflate-bundled.js
index 5694841290..70da583df4 100644
--- a/deps/npm/lib/install/inflate-bundled.js
+++ b/deps/npm/lib/install/inflate-bundled.js
@@ -8,9 +8,10 @@ module.exports = function inflateBundled (bundler, parent, children) {
children.forEach(function (child) {
reset(child)
child.fromBundle = bundler
+ child.isInLink = bundler.isLink
child.parent = parent
child.path = childPath(parent.path, child)
- child.realpath = childPath(parent.path, child)
+ child.realpath = bundler.isLink ? child.realpath : childPath(parent.realpath, child)
child.isLink = child.isLink || parent.isLink || parent.target
inflateBundled(bundler, child, child.children)
})
diff --git a/deps/npm/lib/install/inflate-shrinkwrap.js b/deps/npm/lib/install/inflate-shrinkwrap.js
index aca4204930..9878b0f19a 100644
--- a/deps/npm/lib/install/inflate-shrinkwrap.js
+++ b/deps/npm/lib/install/inflate-shrinkwrap.js
@@ -1,105 +1,193 @@
'use strict'
-var asyncMap = require('slide').asyncMap
-var validate = require('aproba')
-var iferr = require('iferr')
-var realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier.js')
-var isRegistrySpecifier = require('./is-registry-specifier.js')
-var fetchPackageMetadata = require('../fetch-package-metadata.js')
-var annotateMetadata = require('../fetch-package-metadata.js').annotateMetadata
-var addShrinkwrap = require('../fetch-package-metadata.js').addShrinkwrap
-var addBundled = require('../fetch-package-metadata.js').addBundled
-var inflateBundled = require('./inflate-bundled.js')
-var npm = require('../npm.js')
-var createChild = require('./node.js').create
-var moduleName = require('../utils/module-name.js')
-var childPath = require('../utils/child-path.js')
+
+const BB = require('bluebird')
+
+const addBundled = BB.promisify(require('../fetch-package-metadata.js').addBundled)
+const childPath = require('../utils/child-path.js')
+const createChild = require('./node.js').create
+const fetchPackageMetadata = BB.promisify(require('../fetch-package-metadata.js'))
+const inflateBundled = require('./inflate-bundled.js')
+const moduleName = require('../utils/module-name.js')
+const normalizePackageData = require('normalize-package-data')
+const npm = require('../npm.js')
+const realizeShrinkwrapSpecifier = require('./realize-shrinkwrap-specifier.js')
+const validate = require('aproba')
+const path = require('path')
module.exports = function (tree, swdeps, finishInflating) {
if (!npm.config.get('shrinkwrap')) return finishInflating()
tree.loaded = true
- return inflateShrinkwrap(tree.path, tree, swdeps, finishInflating)
+ return inflateShrinkwrap(tree.path, tree, swdeps).then(
+ () => finishInflating(),
+ finishInflating
+ )
}
-function inflateShrinkwrap (topPath, tree, swdeps, finishInflating) {
- validate('SOOF', arguments)
- var onDisk = {}
- tree.children.forEach(function (child) { onDisk[moduleName(child)] = child })
- var dev = npm.config.get('dev') || (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || /^dev(elopment)?$/.test(npm.config.get('only'))
- var prod = !/^dev(elopment)?$/.test(npm.config.get('only'))
-
- // If the shrinkwrap has no dev dependencies in it then we'll leave the one's
- // already on disk. If it DOES have dev dependencies then ONLY those in the
- // shrinkwrap will be included.
- var swHasDev = Object.keys(swdeps).some(function (name) { return swdeps[name].dev })
- tree.children = swHasDev ? [] : tree.children.filter(function (child) {
- return tree.package.devDependencies[moduleName(child)]
+function inflateShrinkwrap (topPath, tree, swdeps) {
+ validate('SOO', arguments)
+ const onDisk = {}
+ tree.children.forEach((child) => {
+ onDisk[moduleName(child)] = child
})
+ const dev = npm.config.get('dev') || (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || /^dev(elopment)?$/.test(npm.config.get('only'))
+ const prod = !/^dev(elopment)?$/.test(npm.config.get('only'))
- return asyncMap(Object.keys(swdeps), doRealizeAndInflate, finishInflating)
+ tree.children = []
- function doRealizeAndInflate (name, next) {
- return realizeShrinkwrapSpecifier(name, swdeps[name], topPath, iferr(next, andInflate(name, next)))
- }
+ return BB.each(Object.keys(swdeps), (name) => {
+ const sw = swdeps[name]
+ if (
+ (!prod && !sw.dev) ||
+ (!dev && sw.dev)
+ ) { return null }
+ const dependencies = sw.dependencies || {}
+ const requested = realizeShrinkwrapSpecifier(name, sw, topPath)
+ return inflatableChild(
+ onDisk[name], name, topPath, tree, sw, requested
+ ).then((child) => {
+ return inflateShrinkwrap(topPath, child, dependencies)
+ })
+ })
+}
- function andInflate (name, next) {
- return function (requested) {
- var sw = swdeps[name]
- var dependencies = sw.dependencies || {}
- if ((!prod && !sw.dev) || (!dev && sw.dev)) return next()
- var child = onDisk[name]
- if (childIsEquivalent(sw, requested, child)) {
- if (!child.fromShrinkwrap) child.fromShrinkwrap = requested.raw
- if (sw.dev) child.shrinkwrapDev = true
- tree.children.push(child)
- annotateMetadata(child.package, requested, requested.raw, topPath)
- return inflateShrinkwrap(topPath, child, dependencies || {}, next)
- } else {
- var from = sw.from || requested.raw
- var optional = sw.optional
- return fetchPackageMetadata(requested, topPath, iferr(next, andAddShrinkwrap(from, optional, dependencies, next)))
- }
- }
+function normalizePackageDataNoErrors (pkg) {
+ try {
+ normalizePackageData(pkg)
+ } catch (ex) {
+ // don't care
}
+}
- function andAddShrinkwrap (from, optional, dependencies, next) {
- return function (pkg) {
- pkg._from = from
- pkg._optional = optional
- addShrinkwrap(pkg, iferr(next, andAddBundled(pkg, dependencies, next)))
- }
+function inflatableChild (onDiskChild, name, topPath, tree, sw, requested) {
+ validate('OSSOOO|ZSSOOO', arguments)
+ if (onDiskChild && childIsEquivalent(sw, requested, onDiskChild)) {
+ // The version on disk matches the shrinkwrap entry.
+ if (!onDiskChild.fromShrinkwrap) onDiskChild.fromShrinkwrap = true
+ if (sw.dev) onDiskChild.shrinkwrapDev = true
+ onDiskChild.package._requested = requested
+ onDiskChild.package._spec = requested.rawSpec
+ onDiskChild.package._where = topPath
+ onDiskChild.fromBundle = sw.bundled ? tree.fromBundle || tree : null
+ if (!onDiskChild.package._args) onDiskChild.package._args = []
+ onDiskChild.package._args.push([String(requested), topPath])
+ // non-npm registries can and will return unnormalized data, plus
+ // even the npm registry may have package data normalized with older
+ // normalization rules. This ensures we get package data in a consistent,
+ // stable format.
+ normalizePackageDataNoErrors(onDiskChild.package)
+ tree.children.push(onDiskChild)
+ return BB.resolve(onDiskChild)
+ } else if (sw.version && sw.integrity) {
+ // The shrinkwrap entry has an integrity field. We can fake a pkg to get
+ // the installer to do a content-address fetch from the cache, if possible.
+ return BB.resolve(makeFakeChild(name, topPath, tree, sw, requested))
+ } else {
+ // It's not on disk, and we can't just look it up by address -- do a full
+ // fpm/inflate bundle pass. For registry deps, this will go straight to the
+ // tarball URL, as if it were a remote tarball dep.
+ return fetchChild(topPath, tree, sw, requested)
}
+}
- function andAddBundled (pkg, dependencies, next) {
- return function () {
- return addBundled(pkg, iferr(next, andAddChild(pkg, dependencies, next)))
+function makeFakeChild (name, topPath, tree, sw, requested) {
+ const from = sw.from || requested.raw
+ const pkg = {
+ name: name,
+ version: sw.version,
+ _resolved: adaptResolved(requested, sw.resolved),
+ _requested: requested,
+ _optional: sw.optional,
+ _integrity: sw.integrity,
+ _from: from,
+ _spec: requested.rawSpec,
+ _where: topPath,
+ _args: [[requested.toString(), topPath]],
+ _injectedFromShrinkwrap: sw
+ }
+ let bundleAdded = BB.resolve()
+ if (Object.keys(sw.dependencies || {}).some((d) => {
+ return sw.dependencies[d].bundled
+ })) {
+ pkg.bundleDependencies = []
+ bundleAdded = addBundled(pkg)
+ }
+ return bundleAdded.then(() => {
+ const child = createChild({
+ package: pkg,
+ loaded: true,
+ parent: tree,
+ children: pkg._bundled || [],
+ fromShrinkwrap: true,
+ fromBundle: sw.bundled ? tree.fromBundle || tree : null,
+ path: childPath(tree.path, pkg),
+ realpath: childPath(tree.realpath, pkg),
+ location: tree.location + '/' + pkg.name,
+ isInLink: tree.isLink
+ })
+ tree.children.push(child)
+ if (pkg._bundled) {
+ delete pkg._bundled
+ inflateBundled(child, child, child.children)
}
+ return child
+ })
+}
+
+function adaptResolved (requested, resolved) {
+ const registry = requested.scope
+ ? npm.config.get(`${requested.scope}:registry`) || npm.config.get('registry')
+ : npm.config.get('registry')
+ if (!requested.registry || (resolved && resolved.indexOf(registry) === 0)) {
+ // Nothing to worry about here. Pass it through.
+ return resolved
+ } else {
+ // We could fast-path for registry.npmjs.org here, but if we do, it
+ // would end up getting written back to the `resolved` field. By always
+ // returning `null` for other registries, `pacote.extract()` will take
+ // care of any required metadata fetches internally, without altering
+ // the tree we're going to write out to shrinkwrap/lockfile.
+ return null
}
+}
- function andAddChild (pkg, dependencies, next) {
- return function () {
- var child = createChild({
- package: pkg,
- loaded: true,
- parent: tree,
- fromShrinkwrap: pkg._from,
- path: childPath(tree.path, pkg),
- realpath: childPath(tree.realpath, pkg),
- children: pkg._bundled || []
- })
- tree.children.push(child)
- if (pkg._bundled) {
- delete pkg._bundled
- inflateBundled(child, child, child.children)
- }
- inflateShrinkwrap(topPath, child, dependencies || {}, next)
+function fetchChild (topPath, tree, sw, requested) {
+ const from = sw.from || requested.raw
+ const optional = sw.optional
+ return fetchPackageMetadata(requested, topPath).then((pkg) => {
+ pkg._from = from
+ pkg._optional = optional
+ return addBundled(pkg).then(() => pkg)
+ }).then((pkg) => {
+ var isLink = pkg._requested.type === 'directory'
+ const child = createChild({
+ package: pkg,
+ loaded: true,
+ parent: tree,
+ fromShrinkwrap: requested,
+ path: childPath(tree.path, pkg),
+ realpath: isLink ? requested.fetchSpec : childPath(tree.realpath, pkg),
+ children: pkg._bundled || [],
+ location: tree.location + '/' + pkg.name,
+ isLink: isLink,
+ isInLink: tree.isLink
+ })
+ tree.children.push(child)
+ if (pkg._bundled) {
+ delete pkg._bundled
+ inflateBundled(child, child, child.children)
}
- }
+ return child
+ })
}
function childIsEquivalent (sw, requested, child) {
if (!child) return false
if (child.fromShrinkwrap) return true
+ if (sw.integrity && child.package._integrity === sw.integrity) return true
+ if (child.isLink && requested.type === 'directory') return path.relative(child.realpath, requested.fetchSpec) === ''
+
if (sw.resolved) return child.package._resolved === sw.resolved
- if (!isRegistrySpecifier(requested) && sw.from) return child.package._from === sw.from
+ if (!requested.registry && sw.from) return child.package._from === sw.from
+ if (!requested.registry && child.package._resolved) return sw.version === child.package._resolved
return child.package.version === sw.version
}
diff --git a/deps/npm/lib/install/is-registry-specifier.js b/deps/npm/lib/install/is-registry-specifier.js
deleted file mode 100644
index 606be2bd13..0000000000
--- a/deps/npm/lib/install/is-registry-specifier.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict'
-module.exports = isRegistrySpecifier
-
-function isRegistrySpecifier (spec) {
- return spec.type === 'range' || spec.type === 'version' || spec.type === 'tag'
-}
diff --git a/deps/npm/lib/install/node.js b/deps/npm/lib/install/node.js
index a5b766b054..b1b01fa8b9 100644
--- a/deps/npm/lib/install/node.js
+++ b/deps/npm/lib/install/node.js
@@ -18,26 +18,41 @@ var defaultTemplate = {
realpath: null,
location: null,
userRequired: false,
- existing: false,
- isTop: false
+ save: false,
+ saveSpec: null,
+ isTop: false,
+ fromBundle: false
}
function isLink (node) {
return node && node.isLink
}
-var create = exports.create = function (node, template) {
+var create = exports.create = function (node, template, isNotTop) {
if (!template) template = defaultTemplate
Object.keys(template).forEach(function (key) {
if (template[key] != null && typeof template[key] === 'object' && !(template[key] instanceof Array)) {
if (!node[key]) node[key] = {}
- return create(node[key], template[key])
+ return create(node[key], template[key], true)
}
if (node[key] != null) return
node[key] = template[key]
})
- if (isLink(node.parent)) {
- node.isLink = true
+ if (!isNotTop) {
+ // isLink is true for the symlink and everything inside it.
+ // by contrast, isInLink is true for only the things inside a link
+ if (node.isLink == null && isLink(node.parent)) {
+ node.isLink = true
+ node.isInLink = true
+ } else if (node.isLink == null) {
+ node.isLink = false
+ node.isInLink = false
+ }
+ if (node.fromBundle == null && node.package) {
+ node.fromBundle = node.package._inBundle
+ } else if (node.fromBundle == null) {
+ node.fromBundle = false
+ }
}
return node
}
diff --git a/deps/npm/lib/install/read-shrinkwrap.js b/deps/npm/lib/install/read-shrinkwrap.js
index 3453e3192f..913c303482 100644
--- a/deps/npm/lib/install/read-shrinkwrap.js
+++ b/deps/npm/lib/install/read-shrinkwrap.js
@@ -1,25 +1,59 @@
'use strict'
-var path = require('path')
-var fs = require('graceful-fs')
-var iferr = require('iferr')
-var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
-var parseJSON = require('../utils/parse-json.js')
-var readShrinkwrap = module.exports = function (child, next) {
+const BB = require('bluebird')
+
+const fs = require('graceful-fs')
+const iferr = require('iferr')
+const inflateShrinkwrap = require('./inflate-shrinkwrap.js')
+const log = require('npmlog')
+const parseJSON = require('../utils/parse-json.js')
+const path = require('path')
+const PKGLOCK_VERSION = require('../npm.js').lockfileVersion
+const pkgSri = require('../utils/package-integrity.js')
+
+const readFileAsync = BB.promisify(fs.readFile)
+
+module.exports = readShrinkwrap
+function readShrinkwrap (child, next) {
if (child.package._shrinkwrap) return process.nextTick(next)
- fs.readFile(path.join(child.path, 'npm-shrinkwrap.json'), function (er, data) {
- if (er) {
- child.package._shrinkwrap = null
- return next()
+ BB.join(
+ maybeReadFile('npm-shrinkwrap.json', child),
+ // Don't read non-root lockfiles
+ child.isTop && maybeReadFile('package-lock.json', child),
+ child.isTop && maybeReadFile('package.json', child),
+ (shrinkwrap, lockfile, pkgJson) => {
+ if (shrinkwrap && lockfile) {
+ log.warn('read-shrinkwrap', 'Ignoring package-lock.json because there is already an npm-shrinkwrap.json. Please use only one of the two.')
+ }
+ const name = shrinkwrap ? 'npm-shrinkwrap.json' : 'package-lock.json'
+ let parsed = null
+ if (shrinkwrap || lockfile) {
+ try {
+ parsed = parseJSON(shrinkwrap || lockfile)
+ } catch (ex) {
+ throw ex
+ }
+ }
+ if (
+ pkgJson &&
+ parsed &&
+ parsed.packageIntegrity &&
+ !pkgSri.check(JSON.parse(pkgJson), parsed.packageIntegrity)
+ ) {
+ log.info('read-shrinkwrap', `${name} will be updated because package.json does not match what it was generated against.`)
+ }
+ if (parsed && parsed.lockfileVersion !== PKGLOCK_VERSION) {
+ log.warn('read-shrinkwrap', `This version of npm is compatible with lockfileVersion@${PKGLOCK_VERSION}, but ${name} was generated for lockfileVersion@${parsed.lockfileVersion || 0}. I'll try to do my best with it!`)
+ }
+ child.package._shrinkwrap = parsed
}
- try {
- child.package._shrinkwrap = parseJSON(data)
- } catch (ex) {
- child.package._shrinkwrap = null
- return next(ex)
- }
- return next()
- })
+ ).then(() => next(), next)
+}
+
+function maybeReadFile (name, child) {
+ return readFileAsync(
+ path.join(child.path, name)
+ ).catch({code: 'ENOENT'}, () => null)
}
module.exports.andInflate = function (child, next) {
diff --git a/deps/npm/lib/install/realize-shrinkwrap-specifier.js b/deps/npm/lib/install/realize-shrinkwrap-specifier.js
index 0c491a6028..91030bfa82 100644
--- a/deps/npm/lib/install/realize-shrinkwrap-specifier.js
+++ b/deps/npm/lib/install/realize-shrinkwrap-specifier.js
@@ -1,25 +1,18 @@
'use strict'
-var realizePackageSpecifier = require('realize-package-specifier')
-var isRegistrySpecifier = require('./is-registry-specifier.js')
+var npa = require('npm-package-arg')
-module.exports = function (name, sw, where, cb) {
- function lookup (ver, cb) {
- realizePackageSpecifier(name + '@' + ver, where, cb)
- }
- if (sw.resolved) {
- return lookup(sw.resolved, cb)
- } else if (sw.from) {
- return lookup(sw.from, function (err, spec) {
- if (err || isRegistrySpecifier(spec)) {
- return thenUseVersion()
- } else {
- return cb(null, spec)
- }
- })
- } else {
- return thenUseVersion()
- }
- function thenUseVersion () {
- lookup(sw.version, cb)
- }
+module.exports = function (name, sw, where) {
+ try {
+ if (sw.version && sw.integrity) {
+ return npa.resolve(name, sw.version, where)
+ }
+ if (sw.resolved) {
+ return npa.resolve(name, sw.resolved, where)
+ }
+ if (sw.from) {
+ var spec = npa(sw.from, where)
+ if (!spec.registry) return spec
+ }
+ } catch (_) { }
+ return npa.resolve(name, sw.version, where)
}
diff --git a/deps/npm/lib/install/save.js b/deps/npm/lib/install/save.js
index 18028a3c26..5d5f4e7f7a 100644
--- a/deps/npm/lib/install/save.js
+++ b/deps/npm/lib/install/save.js
@@ -1,19 +1,20 @@
'use strict'
-var fs = require('graceful-fs')
-var path = require('path')
-var url = require('url')
-var writeFileAtomic = require('write-file-atomic')
-var log = require('npmlog')
-var semver = require('semver')
-var iferr = require('iferr')
-var validate = require('aproba')
-var without = require('lodash.without')
-var npm = require('../npm.js')
-var deepSortObject = require('../utils/deep-sort-object.js')
-var parseJSON = require('../utils/parse-json.js')
-var moduleName = require('../utils/module-name.js')
-var isDevDep = require('./is-dev-dep.js')
-var createShrinkwrap = require('../shrinkwrap.js').createShrinkwrap
+
+const BB = require('bluebird')
+
+const createShrinkwrap = require('../shrinkwrap.js').createShrinkwrap
+const deepSortObject = require('../utils/deep-sort-object.js')
+const detectIndent = require('detect-indent')
+const fs = BB.promisifyAll(require('graceful-fs'))
+const iferr = require('iferr')
+const log = require('npmlog')
+const moduleName = require('../utils/module-name.js')
+const npm = require('../npm.js')
+const parseJSON = require('../utils/parse-json.js')
+const path = require('path')
+const validate = require('aproba')
+const without = require('lodash.without')
+const writeFileAtomic = require('write-file-atomic')
// if the -S|--save option is specified, then write installed packages
// as dependencies to a package.json file.
@@ -42,30 +43,13 @@ function andWarnErrors (cb) {
function saveShrinkwrap (tree, next) {
validate('OF', arguments)
- var saveTarget = path.resolve(tree.path, 'npm-shrinkwrap.json')
- fs.stat(saveTarget, function (er, stat) {
- if (er) return next()
- var save = npm.config.get('save')
- var saveDev = npm.config.get('save-dev')
- var saveOptional = npm.config.get('save-optional')
-
- var shrinkwrap = tree.package._shrinkwrap || {dependencies: {}}
- var shrinkwrapHasAnyDevOnlyDeps = tree.requires.some(function (dep) {
- var name = moduleName(dep)
- return isDevDep(tree, name) &&
- shrinkwrap.dependencies[name] != null
- })
-
- if (!saveOptional && saveDev && !shrinkwrapHasAnyDevOnlyDeps) return next()
- if (saveOptional || !(save || saveDev)) return next()
-
- var silent = false
- createShrinkwrap(tree.path, tree.package, shrinkwrapHasAnyDevOnlyDeps, silent, next)
- })
+ createShrinkwrap(tree, {silent: false}, next)
}
function savePackageJson (args, tree, next) {
validate('AOF', arguments)
+ if (!args || !args.length) { return next() }
+
var saveBundle = npm.config.get('save-bundle')
// each item in the tree is a top-level thing that should be saved
@@ -74,33 +58,34 @@ function savePackageJson (args, tree, next) {
var saveTarget = path.resolve(tree.path, 'package.json')
// don't use readJson, because we don't want to do all the other
// tricky npm-specific stuff that's in there.
- fs.readFile(saveTarget, iferr(next, function (packagejson) {
+ fs.readFile(saveTarget, 'utf8', iferr(next, function (packagejson) {
+ const indent = detectIndent(packagejson).indent || ' '
try {
- packagejson = parseJSON(packagejson)
+ tree.package = parseJSON(packagejson)
} catch (ex) {
return next(ex)
}
// If we're saving bundled deps, normalize the key before we start
if (saveBundle) {
- var bundle = packagejson.bundleDependencies || packagejson.bundledDependencies
- delete packagejson.bundledDependencies
+ var bundle = tree.package.bundleDependencies || tree.package.bundledDependencies
+ delete tree.package.bundledDependencies
if (!Array.isArray(bundle)) bundle = []
}
var toSave = getThingsToSave(tree)
- var toRemove = getThingsToRemove(args, tree)
+ var toRemove = getThingsToRemove(tree)
var savingTo = {}
toSave.forEach(function (pkg) { savingTo[pkg.save] = true })
toRemove.forEach(function (pkg) { savingTo[pkg.save] = true })
Object.keys(savingTo).forEach(function (save) {
- if (!packagejson[save]) packagejson[save] = {}
+ if (!tree.package[save]) tree.package[save] = {}
})
log.verbose('saving', toSave)
toSave.forEach(function (pkg) {
- packagejson[pkg.save][pkg.name] = pkg.spec
+ tree.package[pkg.save][pkg.name] = pkg.spec
if (saveBundle) {
var ii = bundle.indexOf(pkg.name)
if (ii === -1) bundle.push(pkg.name)
@@ -108,71 +93,46 @@ function savePackageJson (args, tree, next) {
})
toRemove.forEach(function (pkg) {
- delete packagejson[pkg.save][pkg.name]
+ delete tree.package[pkg.save][pkg.name]
if (saveBundle) {
bundle = without(bundle, pkg.name)
}
})
Object.keys(savingTo).forEach(function (key) {
- packagejson[key] = deepSortObject(packagejson[key])
+ tree.package[key] = deepSortObject(tree.package[key])
})
if (saveBundle) {
- packagejson.bundledDependencies = deepSortObject(bundle)
+ tree.package.bundleDependencies = deepSortObject(bundle)
}
- var json = JSON.stringify(packagejson, null, 2) + '\n'
+ var json = JSON.stringify(tree.package, null, indent) + '\n'
writeFileAtomic(saveTarget, json, next)
}))
}
-var getSaveType = exports.getSaveType = function (args) {
- validate('A', arguments)
- var nothingToSave = !args.length
+exports.getSaveType = function (tree, arg) {
+ if (arguments.length) validate('OO', arguments)
var globalInstall = npm.config.get('global')
var noSaveFlags = !npm.config.get('save') &&
!npm.config.get('save-dev') &&
!npm.config.get('save-optional')
- if (nothingToSave || globalInstall || noSaveFlags) return null
-
- if (npm.config.get('save-optional')) return 'optionalDependencies'
- else if (npm.config.get('save-dev')) return 'devDependencies'
- else return 'dependencies'
-}
+ if (globalInstall || noSaveFlags) return null
-function computeVersionSpec (child) {
- validate('O', arguments)
- var requested = child.package._requested
- if (!requested || requested.type === 'tag') {
- requested = {
- type: 'version',
- spec: child.package.version
- }
- }
- if (requested.type === 'version' || requested.type === 'range') {
- var version = child.package.version
- var rangeDescriptor = ''
- if (semver.valid(version, true) &&
- semver.gte(version, '0.1.0', true) &&
- !npm.config.get('save-exact')) {
- rangeDescriptor = npm.config.get('save-prefix')
- }
- return rangeDescriptor + version
- } else if (requested.type === 'directory' || requested.type === 'local') {
- var relativePath = path.relative(child.parent.path, requested.spec)
- if (/^[.][.]/.test(relativePath)) {
- return url.format({
- protocol: 'file',
- slashes: true,
- pathname: requested.spec
- })
- } else {
- return 'file:' + relativePath
- }
- } else if (requested.type === 'hosted') {
- return requested.spec
+ if (npm.config.get('save-optional')) {
+ return 'optionalDependencies'
+ } else if (npm.config.get('save-dev')) {
+ return 'devDependencies'
} else {
- return requested.rawSpec
+ if (arg) {
+ var name = moduleName(arg)
+ if (tree.package.optionalDependencies[name]) {
+ return 'optionalDependencies'
+ } else if (tree.package.devDependencies[name]) {
+ return 'devDependencies'
+ }
+ }
+ return 'dependencies'
}
}
@@ -183,15 +143,15 @@ function getThingsToSave (tree) {
}).map(function (child) {
return {
name: moduleName(child),
- spec: computeVersionSpec(child),
+ spec: child.saveSpec,
save: child.save
}
})
return toSave
}
-function getThingsToRemove (args, tree) {
- validate('AO', arguments)
+function getThingsToRemove (tree) {
+ validate('O', arguments)
if (!tree.removed) return []
var toRemove = tree.removed.map(function (child) {
return {
@@ -199,12 +159,5 @@ function getThingsToRemove (args, tree) {
save: child.save
}
})
- var saveType = getSaveType(args)
- args.forEach(function (arg) {
- toRemove.push({
- name: arg,
- save: saveType
- })
- })
return toRemove
}
diff --git a/deps/npm/lib/install/update-package-json.js b/deps/npm/lib/install/update-package-json.js
index eee530c3cd..14339d0012 100644
--- a/deps/npm/lib/install/update-package-json.js
+++ b/deps/npm/lib/install/update-package-json.js
@@ -20,20 +20,24 @@ module.exports = function (mod, buildpath, next) {
pkg._requiredBy =
mod.requiredBy
.map(function (req) {
- if (req.package.devDependencies[name] && !req.package.dependencies[name]) {
+ if (
+ req.package.devDependencies &&
+ req.package.devDependencies[name] &&
+ !req.package.dependencies[name]
+ ) {
return '#DEV:' + req.location
} else {
return req.location
}
})
.concat(mod.userRequired ? ['#USER'] : [])
- .concat(mod.existing ? ['#EXISTING'] : [])
.sort()
pkg._location = mod.location
pkg._phantomChildren = {}
Object.keys(mod.phantomChildren).sort().forEach(function (name) {
pkg._phantomChildren[name] = mod.phantomChildren[name].package.version
})
+ pkg._inBundle = !!mod.fromBundle
// sort keys that are known safe to sort to produce more consistent output
sortKeys.forEach(function (key) {
@@ -42,5 +46,8 @@ module.exports = function (mod, buildpath, next) {
var data = JSON.stringify(sortedObject(pkg), null, 2) + '\n'
- writeFileAtomic(path.resolve(buildpath, 'package.json'), data, next)
+ writeFileAtomic(path.resolve(buildpath, 'package.json'), data, {
+ // We really don't need this guarantee, and fsyncing here is super slow.
+ fsync: false
+ }, next)
}
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
index 3c0e4384dd..b993dd6235 100644
--- a/deps/npm/lib/ls.js
+++ b/deps/npm/lib/ls.js
@@ -9,7 +9,6 @@ module.exports = exports = ls
var path = require('path')
var url = require('url')
var readPackageTree = require('read-package-tree')
-var log = require('npmlog')
var archy = require('archy')
var semver = require('semver')
var color = require('ansicolors')
@@ -19,7 +18,7 @@ var sortedObject = require('sorted-object')
var extend = Object.assign || require('util')._extend
var npm = require('./npm.js')
var mutateIntoLogicalTree = require('./install/mutate-into-logical-tree.js')
-var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var computeMetadata = require('./install/deps.js').computeMetadata
var packageId = require('./utils/package-id.js')
var usage = require('./utils/usage')
var output = require('./utils/output.js')
@@ -37,14 +36,14 @@ function ls (args, silent, cb) {
silent = false
}
var dir = path.resolve(npm.dir, '..')
- readPackageTree(dir, andRecalculateMetadata(iferr(cb, function (physicalTree) {
+ readPackageTree(dir, andComputeMetadata(iferr(cb, function (physicalTree) {
lsFromTree(dir, physicalTree, args, silent, cb)
})))
}
-function andRecalculateMetadata (next) {
+function andComputeMetadata (next) {
return function (er, tree) {
- recalculateMetadata(tree || {}, log, next)
+ next(null, computeMetadata(tree || {}))
}
}
@@ -63,14 +62,18 @@ var lsFromTree = ls.fromTree = function (dir, physicalTree, args, silent, cb) {
args = []
} else {
args = args.map(function (a) {
- var p = npa(a)
- var name = p.name
- // When version spec is missing, we'll skip using it when filtering.
- // Otherwise, `semver.validRange` would return '*', which won't
- // match prerelease versions.
- var ver = (p.rawSpec &&
- (semver.validRange(p.rawSpec) || ''))
- return [ name, ver, a ]
+ if (typeof a === 'object') {
+ return [a.package.name, a.package.version, a]
+ } else {
+ var p = npa(a)
+ var name = p.name
+ // When version spec is missing, we'll skip using it when filtering.
+ // Otherwise, `semver.validRange` would return '*', which won't
+ // match prerelease versions.
+ var ver = (p.rawSpec &&
+ (semver.validRange(p.rawSpec) || ''))
+ return [ name, ver, a ]
+ }
})
}
@@ -293,13 +296,15 @@ function filterFound (root, args) {
var argVersion = args[ii][1]
var argRaw = args[ii][2]
var found
- if (depName === argName && argVersion) {
+ if (typeof argRaw === 'object') {
+ if (dep.path === argRaw.path) {
+ found = true
+ }
+ } else if (depName === argName && argVersion) {
found = semver.satisfies(dep.version, argVersion, true)
} else if (depName === argName) {
// If version is missing from arg, just do a name match.
found = true
- } else if (dep.path === argRaw) {
- found = true
}
if (found) {
dep._found = 'explicit'
@@ -474,7 +479,7 @@ function makeParseable (data, long, dir, depth, parent, d) {
.sort(alphasort).map(function (d) {
return makeParseable(data.dependencies[d], long, dir, depth + 1, data, d)
}))
- .filter(function (x) { return x })
+ .filter(function (x) { return x && x.length })
.join('\n')
}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index f852886433..dbd8e97150 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -38,6 +38,12 @@
var plumbing = require('./config/cmd-list').plumbing
var output = require('./utils/output.js')
var startMetrics = require('./utils/metrics.js').start
+ var perf = require('./utils/perf.js')
+
+ perf.emit('time', 'npm')
+ perf.on('timing', function (name, finished) {
+ log.timing(name, 'Completed in', finished + 'ms')
+ })
npm.config = {
loaded: false,
@@ -54,16 +60,19 @@
// TUNING
npm.limit = {
fetch: 10,
- action: 10
+ action: 50
}
// ***
+ npm.lockfileVersion = 1
+
npm.rollbacks = []
try {
// startup, ok to do this synchronously
var j = parseJSON(fs.readFileSync(
path.join(__dirname, '../package.json')) + '')
+ npm.name = j.name
npm.version = j.version
} catch (ex) {
try {
@@ -94,7 +103,7 @@
if (!loaded) {
throw new Error(
'Call npm.load(config, cb) before using this command.\n' +
- 'See the README.md or cli.js for example usage.'
+ 'See the README.md or bin/npm-cli.js for example usage.'
)
}
var a = npm.deref(c)
@@ -334,8 +343,8 @@
// go ahead and spin up the registry client.
lazyProperty(npm, 'registry', function () {
registryLoaded = true
- var CachingRegClient = require('./cache/caching-client.js')
- var registry = new CachingRegClient(npm.config)
+ var RegClient = require('npm-registry-client')
+ var registry = new RegClient(adaptClientConfig(npm.config))
registry.version = npm.version
registry.refer = registryRefer
return registry
@@ -458,4 +467,31 @@
return ''
}
}
+
+ function adaptClientConfig (config) {
+ return {
+ proxy: {
+ http: config.get('proxy'),
+ https: config.get('https-proxy'),
+ localAddress: config.get('local-address')
+ },
+ ssl: {
+ certificate: config.get('cert'),
+ key: config.get('key'),
+ ca: config.get('ca'),
+ strict: config.get('strict-ssl')
+ },
+ retry: {
+ retries: config.get('fetch-retries'),
+ factor: config.get('fetch-retry-factor'),
+ minTimeout: config.get('fetch-retry-mintimeout'),
+ maxTimeout: config.get('fetch-retry-maxtimeout')
+ },
+ userAgent: config.get('user-agent'),
+ log: log,
+ defaultTag: config.get('tag'),
+ maxSockets: config.get('maxsockets'),
+ scope: npm.projectScope
+ }
+ }
})()
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index 546d14b134..7d5cfba86e 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -23,7 +23,6 @@ outdated.completion = require('./utils/completion/installed-deep.js')
var os = require('os')
var url = require('url')
var path = require('path')
-var log = require('npmlog')
var readPackageTree = require('read-package-tree')
var readJson = require('read-package-json')
var asyncMap = require('slide').asyncMap
@@ -38,7 +37,7 @@ var npm = require('./npm.js')
var long = npm.config.get('long')
var mapToRegistry = require('./utils/map-to-registry.js')
var isExtraneous = require('./install/is-extraneous.js')
-var recalculateMetadata = require('./install/deps.js').recalculateMetadata
+var computeMetadata = require('./install/deps.js').computeMetadata
var moduleName = require('./utils/module-name.js')
var output = require('./utils/output.js')
var ansiTrim = require('./utils/ansi-trim')
@@ -59,10 +58,10 @@ function uniq (list) {
return uniqed
}
-function andRecalculateMetadata (next) {
+function andComputeMetadata (next) {
return function (er, tree) {
if (er) return next(er)
- recalculateMetadata(tree, log, next)
+ next(null, computeMetadata(tree))
}
}
@@ -76,7 +75,7 @@ function outdated (args, silent, cb) {
// default depth for `outdated` is 0 (cf. `ls`)
if (npm.config.get('depth') === Infinity) npm.config.set('depth', 0)
- readPackageTree(dir, andRecalculateMetadata(function (er, tree) {
+ readPackageTree(dir, andComputeMetadata(function (er, tree) {
if (!tree) return cb(er)
mutateIntoLogicalTree(tree)
outdated_(args, '', tree, {}, 0, function (er, list) {
@@ -292,7 +291,7 @@ function outdated_ (args, path, tree, parentHas, depth, cb) {
var required = (tree.package.dependencies)[name] ||
(tree.package.optionalDependencies)[name] ||
(tree.package.devDependencies)[name] ||
- dep.package._requested && dep.package._requested.spec ||
+ dep.package._requested && dep.package._requested.fetchSpec ||
'*'
if (!long) return shouldUpdate(args, dep, name, has, required, depth, path, cb)
@@ -327,7 +326,7 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
}
if (args.length && args.indexOf(dep) === -1) return skip()
- var parsed = npa(dep + '@' + req)
+ var parsed = npa.resolve(dep, req)
if (tree.isLink && tree.parent && tree.parent.isTop) {
return doIt('linked', 'linked')
}
@@ -343,7 +342,7 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
})
function updateLocalDeps (latestRegistryVersion) {
- readJson(path.resolve(parsed.spec, 'package.json'), function (er, localDependency) {
+ readJson(path.resolve(parsed.fetchSpec, 'package.json'), function (er, localDependency) {
if (er) return cb()
var wanted = localDependency.version
@@ -367,7 +366,7 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
function updateDeps (er, d) {
if (er) {
- if (parsed.type !== 'local') return cb(er)
+ if (parsed.type !== 'directory' && parsed.type !== 'file') return cb(er)
return updateLocalDeps()
}
@@ -413,7 +412,7 @@ function shouldUpdate (args, tree, dep, has, req, depth, pkgpath, cb, type) {
dFromUrl && cFromUrl && d._from !== curr.from ||
d.version !== curr.version ||
d.version !== l.version) {
- if (parsed.type === 'local') return updateLocalDeps(l.version)
+ if (parsed.type === 'file' || parsed.type === 'directory') return updateLocalDeps(l.version)
doIt(d.version, l.version)
} else {
diff --git a/deps/npm/lib/pack.js b/deps/npm/lib/pack.js
index ede59dd12c..68c6030ee8 100644
--- a/deps/npm/lib/pack.js
+++ b/deps/npm/lib/pack.js
@@ -1,25 +1,39 @@
+'use strict'
+
// npm pack <pkg>
// Packs the specified package into a .tgz file, which can then
// be installed.
-module.exports = pack
+const BB = require('bluebird')
-var install = require('./install.js')
-var cache = require('./cache.js')
-var fs = require('graceful-fs')
-var chain = require('slide').chain
-var path = require('path')
-var cwd = process.cwd()
-var writeStreamAtomic = require('fs-write-stream-atomic')
-var cachedPackageRoot = require('./cache/cached-package-root.js')
-var output = require('./utils/output.js')
+const cache = require('./cache')
+const cacache = require('cacache')
+const deprCheck = require('./utils/depr-check')
+const fpm = BB.promisify(require('./fetch-package-metadata'))
+const fs = require('graceful-fs')
+const install = require('./install')
+const lifecycle = BB.promisify(require('./utils/lifecycle'))
+const move = require('move-concurrently')
+const npm = require('./npm')
+const output = require('./utils/output')
+const pacoteOpts = require('./config/pacote')
+const path = require('path')
+const pathIsInside = require('path-is-inside')
+const pipe = BB.promisify(require('mississippi').pipe)
+const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install')
+const pinflight = require('promise-inflight')
+const readJson = BB.promisify(require('read-package-json'))
+const tarPack = BB.promisify(require('./utils/tar').pack)
+const writeStreamAtomic = require('fs-write-stream-atomic')
pack.usage = 'npm pack [[<@scope>/]<pkg>...]'
// if it can be installed, it can be packed.
pack.completion = install.completion
+module.exports = pack
function pack (args, silent, cb) {
+ const cwd = process.cwd()
if (typeof cb !== 'function') {
cb = silent
silent = false
@@ -27,47 +41,77 @@ function pack (args, silent, cb) {
if (args.length === 0) args = ['.']
- chain(
- args.map(function (arg) { return function (cb) { pack_(arg, cb) } }),
- function (er, files) {
- if (er || silent) return cb(er, files)
- printFiles(files, cb)
+ BB.all(
+ args.map((arg) => pack_(arg, cwd))
+ ).then((files) => {
+ if (!silent) {
+ output(files.map((f) => path.relative(cwd, f)).join('\n'))
}
- )
-}
-
-function printFiles (files, cb) {
- files = files.map(function (file) {
- return path.relative(cwd, file)
- })
- output(files.join('\n'))
- cb()
+ cb(null, files)
+ }, cb)
}
// add to cache, then cp to the cwd
-function pack_ (pkg, cb) {
- cache.add(pkg, null, null, false, function (er, data) {
- if (er) return cb(er)
-
+function pack_ (pkg, dir) {
+ return fpm(pkg, dir).then((mani) => {
+ let name = mani.name[0] === '@'
// scoped packages get special treatment
- var name = data.name
- if (name[0] === '@') name = name.substr(1).replace(/\//g, '-')
- var fname = name + '-' + data.version + '.tgz'
-
- var cached = path.join(cachedPackageRoot(data), 'package.tgz')
- var from = fs.createReadStream(cached)
- var to = writeStreamAtomic(fname)
- var errState = null
-
- from.on('error', cb_)
- to.on('error', cb_)
- to.on('close', cb_)
- from.pipe(to)
+ ? mani.name.substr(1).replace(/\//g, '-')
+ : mani.name
+ const target = `${name}-${mani.version}.tgz`
+ return pinflight(target, () => {
+ if (mani._requested.type === 'directory') {
+ return prepareDirectory(mani._resolved).then(() => {
+ return packDirectory(mani, mani._resolved, target)
+ })
+ } else {
+ return cache.add(pkg).then((info) => {
+ return pipe(
+ cacache.get.stream.byDigest(pacoteOpts().cache, info.integrity || mani._integrity),
+ writeStreamAtomic(target)
+ )
+ }).then(() => target)
+ }
+ })
+ })
+}
- function cb_ (er) {
- if (errState) return
- if (er) return cb(errState = er)
- cb(null, fname)
+module.exports.prepareDirectory = prepareDirectory
+function prepareDirectory (dir) {
+ return readJson(path.join(dir, 'package.json')).then((pkg) => {
+ if (!pkg.name) {
+ throw new Error('package.json requires a "name" field')
+ }
+ if (!pkg.version) {
+ throw new Error('package.json requires a valid "version" field')
}
+ if (!pathIsInside(dir, npm.tmp)) {
+ if (pkg.scripts && pkg.scripts.prepublish) {
+ prepublishWarning([
+ 'As of npm@5, `prepublish` scripts are deprecated.',
+ 'Use `prepare` for build steps and `prepublishOnly` for upload-only',
+ 'See the deprecation note in `npm help scripts` for more information'
+ ])
+ }
+ if (npm.config.get('ignore-prepublish')) {
+ return lifecycle(pkg, 'prepare', dir).then(() => pkg)
+ } else {
+ return lifecycle(pkg, 'prepublish', dir).then(() => {
+ return lifecycle(pkg, 'prepare', dir)
+ }).then(() => pkg)
+ }
+ }
+ return pkg
+ })
+}
+
+module.exports.packDirectory = packDirectory
+function packDirectory (mani, dir, target) {
+ deprCheck(mani)
+ return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
+ const tmpTarget = path.join(tmp, path.basename(target))
+ return tarPack(tmpTarget, dir, mani).then(() => {
+ return move(tmpTarget, target, {Promise: BB, fs})
+ }).then(() => target)
})
}
diff --git a/deps/npm/lib/ping.js b/deps/npm/lib/ping.js
index a86150508d..e06be9a471 100644
--- a/deps/npm/lib/ping.js
+++ b/deps/npm/lib/ping.js
@@ -14,8 +14,8 @@ function ping (args, silent, cb) {
if (!registry) return cb(new Error('no default registry set'))
var auth = npm.config.getCredentialsByURI(registry)
- npm.registry.ping(registry, {auth: auth}, function (er, pong) {
+ npm.registry.ping(registry, {auth: auth}, function (er, pong, data, res) {
if (!silent) output(JSON.stringify(pong))
- cb(er, er ? null : pong)
+ cb(er, er ? null : pong, data, res)
})
}
diff --git a/deps/npm/lib/prune.js b/deps/npm/lib/prune.js
index 6d103fc508..39d1c8ffb7 100644
--- a/deps/npm/lib/prune.js
+++ b/deps/npm/lib/prune.js
@@ -15,6 +15,7 @@ var isDev = require('./install/is-dev-dep.js')
var removeDeps = require('./install/deps.js').removeDeps
var loadExtraneous = require('./install/deps.js').loadExtraneous
var chain = require('slide').chain
+var computeMetadata = require('./install/deps.js').computeMetadata
prune.completion = require('./utils/completion/installed-deep.js')
@@ -29,11 +30,12 @@ function Pruner (where, dryrun, args) {
util.inherits(Pruner, Installer)
Pruner.prototype.loadAllDepsIntoIdealTree = function (cb) {
- log.silly('uninstall', 'loadAllDepsIntoIdealtree')
+ log.silly('uninstall', 'loadAllDepsIntoIdealTree')
- var cg = this.progress.loadAllDepsIntoIdealTree
+ var cg = this.progress['loadIdealTree:loadAllDepsIntoIdealTree']
var steps = []
+ computeMetadata(this.idealTree)
var self = this
var excludeDev = npm.config.get('production') || /^prod(uction)?$/.test(npm.config.get('only'))
function shouldPrune (child) {
@@ -54,7 +56,7 @@ Pruner.prototype.loadAllDepsIntoIdealTree = function (cb) {
function nameObj (name) {
return {name: name}
}
- var toPrune = this.currentTree.children.filter(shouldPrune).map(getModuleName).filter(matchesArg).map(nameObj)
+ var toPrune = this.idealTree.children.filter(shouldPrune).map(getModuleName).filter(matchesArg).map(nameObj)
steps.push(
[removeDeps, toPrune, this.idealTree, null, cg.newGroup('removeDeps')],
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
index c0e910e85f..49c98fb8e6 100644
--- a/deps/npm/lib/publish.js
+++ b/deps/npm/lib/publish.js
@@ -1,19 +1,26 @@
-
-module.exports = publish
-
-var npm = require('./npm.js')
-var log = require('npmlog')
-var path = require('path')
-var readJson = require('read-package-json')
-var lifecycle = require('./utils/lifecycle.js')
-var chain = require('slide').chain
-var mapToRegistry = require('./utils/map-to-registry.js')
-var cachedPackageRoot = require('./cache/cached-package-root.js')
-var createReadStream = require('graceful-fs').createReadStream
-var npa = require('npm-package-arg')
-var semver = require('semver')
-var getPublishConfig = require('./utils/get-publish-config.js')
-var output = require('./utils/output.js')
+'use strict'
+
+const BB = require('bluebird')
+
+const cacache = require('cacache')
+const cache = require('./cache')
+const createReadStream = require('graceful-fs').createReadStream
+const getPublishConfig = require('./utils/get-publish-config.js')
+const lifecycle = BB.promisify(require('./utils/lifecycle.js'))
+const log = require('npmlog')
+const mapToRegistry = require('./utils/map-to-registry.js')
+const npa = require('npm-package-arg')
+const npm = require('./npm.js')
+const output = require('./utils/output.js')
+const pack = require('./pack')
+const pacote = require('pacote')
+const pacoteOpts = require('./config/pacote')
+const path = require('path')
+const pipe = BB.promisify(require('mississippi').pipe)
+const readJson = BB.promisify(require('read-package-json'))
+const semver = require('semver')
+const statAsync = BB.promisify(require('graceful-fs').stat)
+const writeStreamAtomic = require('fs-write-stream-atomic')
publish.usage = 'npm publish [<tarball>|<folder>] [--tag <tag>] [--access <public|restricted>]' +
"\n\nPublishes '.' if no argument supplied" +
@@ -26,6 +33,7 @@ publish.completion = function (opts, cb) {
return cb()
}
+module.exports = publish
function publish (args, isRetry, cb) {
if (typeof cb !== 'function') {
cb = isRetry
@@ -36,89 +44,116 @@ function publish (args, isRetry, cb) {
log.verbose('publish', args)
- var t = npm.config.get('tag').trim()
+ const t = npm.config.get('tag').trim()
if (semver.validRange(t)) {
- var er = new Error('Tag name must not be a valid SemVer range: ' + t)
- return cb(er)
+ return cb(new Error('Tag name must not be a valid SemVer range: ' + t))
}
- var arg = args[0]
- // if it's a local folder, then run the prepublish there, first.
- readJson(path.resolve(arg, 'package.json'), function (er, data) {
- if (er && er.code !== 'ENOENT' && er.code !== 'ENOTDIR') return cb(er)
+ publish_(args[0]).then((pkg) => {
+ output(`+ ${pkg._id}`)
+ cb()
+ }, cb)
+}
- if (data) {
- if (!data.name) return cb(new Error('No name provided'))
- if (!data.version) return cb(new Error('No version provided'))
+function publish_ (arg) {
+ return statAsync(arg).then((stat) => {
+ if (stat.isDirectory()) {
+ return stat
+ } else {
+ const err = new Error('not a directory')
+ err.code = 'ENOTDIR'
+ throw err
}
-
- // if readJson errors, the argument might be a tarball or package URL
- if (er) {
- npm.commands.cache.add(arg, null, null, false, function (er, data) {
- if (er) return cb(er)
- log.silly('publish', data)
- var cached = path.resolve(cachedPackageRoot(data), 'package') + '.tgz'
- // *publish* lifecycle scripts aren't run when publishing a built artifact
- // go to the next step directly
- publish_(arg, data, isRetry, cached, cb)
- })
+ }).then(() => {
+ return publishFromDirectory(arg)
+ }, (err) => {
+ if (err.code !== 'ENOENT' && err.code !== 'ENOTDIR') {
+ throw err
} else {
- var dir = arg
- npm.commands.cache.add(dir, null, null, false, function (er, data) {
- if (er) return cb(er)
- log.silly('publish', data)
- var cached = path.resolve(cachedPackageRoot(data), 'package') + '.tgz'
- // `prepublish` and `prepare` are run by cache.add
- chain(
- [
- [lifecycle, data, 'prepublishOnly', dir],
- [publish_, dir, data, isRetry, cached],
- [lifecycle, data, 'publish', dir],
- [lifecycle, data, 'postpublish', dir]
- ],
- cb
- )
- })
+ return publishFromPackage(arg)
}
})
}
-function publish_ (arg, data, isRetry, cached, cb) {
- if (!data) return cb(new Error('no package.json file found'))
-
- var mappedConfig = getPublishConfig(
- data.publishConfig,
- npm.config,
- npm.registry
- )
- var config = mappedConfig.config
- var registry = mappedConfig.client
+function publishFromDirectory (arg) {
+ return pack.prepareDirectory(arg).tap((pkg) => {
+ return lifecycle(pkg, 'prepublishOnly', arg)
+ }).tap((pkg) => {
+ return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromDir'}, (tmpDir) => {
+ const target = path.join(tmpDir, 'package.tgz')
+ return pack.packDirectory(pkg, arg, target).then(() => {
+ return upload(arg, pkg, false, target)
+ })
+ })
+ }).tap((pkg) => {
+ return lifecycle(pkg, 'publish', arg)
+ }).tap((pkg) => {
+ return lifecycle(pkg, 'postpublish', arg)
+ })
+}
- data._npmVersion = npm.version
- data._nodeVersion = process.versions.node
+function publishFromPackage (arg) {
+ return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'fromPackage'}, (tmp) => {
+ const extracted = path.join(tmp, 'package')
+ const target = path.join(tmp, 'package.json')
+ return cache.add(arg).then((info) => {
+ const opts = pacoteOpts({integrity: info.integrity})
+ return BB.all([
+ pipe(
+ cacache.get.stream.byDigest(opts.cache, info.integrity),
+ writeStreamAtomic(target)
+ ).then(() => target),
+ pacote.extract(arg, extracted, opts).then(() => {
+ return readJson(path.join(extracted, 'package.json'))
+ })
+ ]).spread((target, pkg) => {
+ return upload(arg, pkg, false, target)
+ })
+ })
+ })
+}
- delete data.modules
- if (data.private) {
- return cb(new Error(
+function upload (arg, pkg, isRetry, cached) {
+ if (!pkg) {
+ return BB.reject(new Error('no package.json file found'))
+ }
+ if (pkg.private) {
+ return BB.reject(new Error(
'This package has been marked as private\n' +
"Remove the 'private' field from the package.json to publish it."
))
}
- mapToRegistry(data.name, config, function (er, registryURI, auth, registryBase) {
- if (er) return cb(er)
+ const mappedConfig = getPublishConfig(
+ pkg.publishConfig,
+ npm.config,
+ npm.registry
+ )
+ const config = mappedConfig.config
+ const registry = mappedConfig.client
+ pkg._npmVersion = npm.version
+ pkg._nodeVersion = process.versions.node
+
+ delete pkg.modules
+
+ return BB.fromNode((cb) => {
+ mapToRegistry(pkg.name, config, (err, registryURI, auth, registryBase) => {
+ if (err) { return cb(err) }
+ cb(null, [registryURI, auth, registryBase])
+ })
+ }).spread((registryURI, auth, registryBase) => {
// we just want the base registry URL in this case
log.verbose('publish', 'registryBase', registryBase)
log.silly('publish', 'uploading', cached)
- data._npmUser = {
+ pkg._npmUser = {
name: auth.username,
email: auth.email
}
- var params = {
- metadata: data,
+ const params = {
+ metadata: pkg,
body: createReadStream(cached),
auth: auth
}
@@ -126,29 +161,35 @@ function publish_ (arg, data, isRetry, cached, cb) {
// registry-frontdoor cares about the access level, which is only
// configurable for scoped packages
if (config.get('access')) {
- if (!npa(data.name).scope && config.get('access') === 'restricted') {
- return cb(new Error("Can't restrict access to unscoped packages."))
+ if (!npa(pkg.name).scope && config.get('access') === 'restricted') {
+ throw new Error("Can't restrict access to unscoped packages.")
}
params.access = config.get('access')
}
- log.showProgress('publish:' + data._id)
- registry.publish(registryBase, params, function (er) {
- if (er && er.code === 'EPUBLISHCONFLICT' &&
- npm.config.get('force') && !isRetry) {
- log.warn('publish', 'Forced publish over ' + data._id)
- return npm.commands.unpublish([data._id], function (er) {
+ log.showProgress('publish:' + pkg._id)
+ return BB.fromNode((cb) => {
+ registry.publish(registryBase, params, cb)
+ }).catch((err) => {
+ if (
+ err.code === 'EPUBLISHCONFLICT' &&
+ npm.config.get('force') &&
+ !isRetry
+ ) {
+ log.warn('publish', 'Forced publish over ' + pkg._id)
+ return BB.fromNode((cb) => {
+ npm.commands.unpublish([pkg._id], cb)
+ }).finally(() => {
// ignore errors. Use the force. Reach out with your feelings.
- // but if it fails again, then report the first error.
- publish([arg], er || true, cb)
+ return upload(arg, pkg, true, cached).catch(() => {
+ // but if it fails again, then report the first error.
+ throw err
+ })
})
+ } else {
+ throw err
}
- // report the unpublish error if this was a retry and unpublish failed
- if (er && isRetry && isRetry !== true) return cb(isRetry)
- if (er) return cb(er)
- output('+ ' + data._id)
- cb()
})
})
}
diff --git a/deps/npm/lib/search/all-package-metadata.js b/deps/npm/lib/search/all-package-metadata.js
index fda5b6d306..5a27bdbcee 100644
--- a/deps/npm/lib/search/all-package-metadata.js
+++ b/deps/npm/lib/search/all-package-metadata.js
@@ -7,7 +7,7 @@ var chownr = require('chownr')
var npm = require('../npm.js')
var log = require('npmlog')
var cacheFile = require('npm-cache-filename')
-var getCacheStat = require('../cache/get-stat.js')
+var correctMkdir = require('../utils/correct-mkdir.js')
var mapToRegistry = require('../utils/map-to-registry.js')
var jsonstream = require('JSONStream')
var writeStreamAtomic = require('fs-write-stream-atomic')
@@ -236,7 +236,7 @@ function createCacheWriteStream (cacheFile, latest, cb) {
function _ensureCacheDirExists (cacheFile, cb) {
var cacheBase = path.dirname(cacheFile)
log.silly('all-package-metadata', 'making sure cache dir exists at', cacheBase)
- getCacheStat(function (er, st) {
+ correctMkdir(npm.cache, function (er, st) {
if (er) return cb(er)
mkdir(cacheBase, function (er, made) {
if (er) return cb(er)
diff --git a/deps/npm/lib/shrinkwrap.js b/deps/npm/lib/shrinkwrap.js
index 5e12f0bd81..75fe0dd95d 100644
--- a/deps/npm/lib/shrinkwrap.js
+++ b/deps/npm/lib/shrinkwrap.js
@@ -1,30 +1,39 @@
-// emit JSON describing versions of all packages currently installed (for later
-// use with shrinkwrap install)
+'use strict'
-module.exports = exports = shrinkwrap
+const BB = require('bluebird')
-var path = require('path')
-var log = require('npmlog')
-var writeFileAtomic = require('write-file-atomic')
-var iferr = require('iferr')
-var readPackageJson = require('read-package-json')
-var readPackageTree = require('read-package-tree')
-var validate = require('aproba')
-var chain = require('slide').chain
-var npm = require('./npm.js')
-var recalculateMetadata = require('./install/deps.js').recalculateMetadata
-var validatePeerDeps = require('./install/deps.js').validatePeerDeps
-var isExtraneous = require('./install/is-extraneous.js')
-var packageId = require('./utils/package-id.js')
-var moduleName = require('./utils/module-name.js')
-var output = require('./utils/output.js')
-var lifecycle = require('./utils/lifecycle.js')
-var isDevDep = require('./install/is-dev-dep.js')
-var isProdDep = require('./install/is-prod-dep.js')
-var isOptDep = require('./install/is-opt-dep.js')
+const chain = require('slide').chain
+const detectIndent = require('detect-indent')
+const fs = BB.promisifyAll(require('graceful-fs'))
+const getRequested = require('./install/get-requested.js')
+const id = require('./install/deps.js')
+const iferr = require('iferr')
+const isDevDep = require('./install/is-dev-dep.js')
+const isExtraneous = require('./install/is-extraneous.js')
+const isOptDep = require('./install/is-opt-dep.js')
+const isProdDep = require('./install/is-prod-dep.js')
+const lifecycle = require('./utils/lifecycle.js')
+const log = require('npmlog')
+const moduleName = require('./utils/module-name.js')
+const move = require('move-concurrently')
+const npm = require('./npm.js')
+const packageId = require('./utils/package-id.js')
+const path = require('path')
+const pkgSri = require('./utils/package-integrity.js')
+const readPackageTree = BB.promisify(require('read-package-tree'))
+const ssri = require('ssri')
+const validate = require('aproba')
+const writeFileAtomic = require('write-file-atomic')
+const PKGLOCK = 'package-lock.json'
+const SHRINKWRAP = 'npm-shrinkwrap.json'
+const PKGLOCK_VERSION = npm.lockfileVersion
+
+// emit JSON describing versions of all packages currently installed (for later
+// use with shrinkwrap install)
shrinkwrap.usage = 'npm shrinkwrap'
+module.exports = exports = shrinkwrap
function shrinkwrap (args, silent, cb) {
if (typeof cb !== 'function') {
cb = silent
@@ -35,56 +44,64 @@ function shrinkwrap (args, silent, cb) {
log.warn('shrinkwrap', "doesn't take positional args")
}
- var packagePath = path.join(npm.localPrefix, 'package.json')
- var prod = npm.config.get('production') || /^prod/.test(npm.config.get('only'))
-
- readPackageJson(packagePath, iferr(cb, function (pkg) {
- createShrinkwrap(npm.localPrefix, pkg, !prod, silent, cb)
- }))
+ move(
+ path.resolve(npm.prefix, PKGLOCK),
+ path.resolve(npm.prefix, SHRINKWRAP),
+ { Promise: BB }
+ ).then(() => {
+ log.notice('', `${PKGLOCK} has been renamed to ${SHRINKWRAP}. ${SHRINKWRAP} will be used for future installations.`)
+ return fs.readFileAsync(path.resolve(npm.prefix, SHRINKWRAP)).then((d) => {
+ return JSON.parse(d)
+ })
+ }, (err) => {
+ if (err.code !== 'ENOENT') {
+ throw err
+ } else {
+ return readPackageTree(npm.localPrefix).then(
+ id.computeMetadata
+ ).then((tree) => {
+ return BB.fromNode((cb) => {
+ createShrinkwrap(tree, {
+ silent,
+ defaultFile: SHRINKWRAP
+ }, cb)
+ })
+ })
+ }
+ }).then((data) => cb(null, data), cb)
}
module.exports.createShrinkwrap = createShrinkwrap
-function createShrinkwrap (dir, pkg, dev, silent, cb) {
- lifecycle(pkg, 'preshrinkwrap', dir, function () {
- readPackageTree(dir, andRecalculateMetadata(iferr(cb, function (tree) {
- var pkginfo = treeToShrinkwrap(tree, dev)
-
- chain([
- [lifecycle, tree.package, 'shrinkwrap', dir],
- [shrinkwrap_, pkginfo, silent],
- [lifecycle, tree.package, 'postshrinkwrap', dir]
- ], iferr(cb, function (data) {
- cb(null, data[0])
- }))
- })))
+function createShrinkwrap (tree, opts, cb) {
+ opts = opts || {}
+ lifecycle(tree.package, 'preshrinkwrap', tree.path, function () {
+ const pkginfo = treeToShrinkwrap(tree)
+ chain([
+ [lifecycle, tree.package, 'shrinkwrap', tree.path],
+ [shrinkwrap_, tree.path, pkginfo, opts],
+ [lifecycle, tree.package, 'postshrinkwrap', tree.path]
+ ], iferr(cb, function (data) {
+ cb(null, pkginfo)
+ }))
})
}
-function andRecalculateMetadata (next) {
- validate('F', arguments)
- return function (er, tree) {
- validate('EO', arguments)
- if (er) return next(er)
- recalculateMetadata(tree, log, next)
- }
-}
-
-function treeToShrinkwrap (tree, dev) {
- validate('OB', arguments)
+function treeToShrinkwrap (tree) {
+ validate('O', arguments)
var pkginfo = {}
if (tree.package.name) pkginfo.name = tree.package.name
if (tree.package.version) pkginfo.version = tree.package.version
var problems = []
if (tree.children.length) {
- shrinkwrapDeps(dev, problems, pkginfo.dependencies = {}, tree)
+ shrinkwrapDeps(problems, pkginfo.dependencies = {}, tree, tree)
}
if (problems.length) pkginfo.problems = problems
return pkginfo
}
-function shrinkwrapDeps (dev, problems, deps, tree, seen) {
- validate('BAOO', [dev, problems, deps, tree])
+function shrinkwrapDeps (problems, deps, top, tree, seen) {
+ validate('AOOO', [problems, deps, top, tree])
if (!seen) seen = {}
if (seen[tree.path]) return
seen[tree.path] = true
@@ -100,58 +117,133 @@ function shrinkwrapDeps (dev, problems, deps, tree, seen) {
})
tree.children.sort(function (aa, bb) { return moduleName(aa).localeCompare(moduleName(bb)) }).forEach(function (child) {
var childIsOnlyDev = isOnlyDev(child)
- if (!dev && childIsOnlyDev) {
- log.warn('shrinkwrap', 'Excluding devDependency: %s', child.location)
+ if (child.package._injectedFromShrinkwrap) {
+ deps[moduleName(child)] = child.package._injectedFromShrinkwrap
return
}
var pkginfo = deps[moduleName(child)] = {}
- pkginfo.version = child.package.version
- pkginfo.from = child.package._from
- pkginfo.resolved = child.package._resolved
- if (dev && childIsOnlyDev) pkginfo.dev = true
+ var req = child.package._requested || getRequested(child)
+ if (req.type === 'directory' || req.type === 'file') {
+ pkginfo.version = 'file:' + path.relative(top.path, child.package._resolved || req.fetchSpec)
+ } else if (!req.registry && !child.fromBundle) {
+ pkginfo.version = child.package._resolved || req.saveSpec || req.rawSpec
+ } else {
+ pkginfo.version = child.package.version
+ }
+ if (child.fromBundle || child.isInLink) {
+ pkginfo.bundled = true
+ } else {
+ if (req.registry) {
+ pkginfo.resolved = child.package._resolved
+ }
+ // no integrity for git deps as integirty hashes are based on the
+ // tarball and we can't (yet) create consistent tarballs from a stable
+ // source.
+ if (req.type !== 'git') {
+ pkginfo.integrity = child.package._integrity
+ if (!pkginfo.integrity && child.package._shasum) {
+ pkginfo.integrity = ssri.fromHex(child.package._shasum, 'sha1')
+ }
+ }
+ }
+ if (childIsOnlyDev) pkginfo.dev = true
if (isOptional(child)) pkginfo.optional = true
if (isExtraneous(child)) {
problems.push('extraneous: ' + child.package._id + ' ' + child.path)
}
- validatePeerDeps(child, function (tree, pkgname, version) {
+ id.validatePeerDeps(child, function (tree, pkgname, version) {
problems.push('peer invalid: ' + pkgname + '@' + version +
', required by ' + child.package._id)
})
if (child.children.length) {
- shrinkwrapDeps(dev, problems, pkginfo.dependencies = {}, child, seen)
+ pkginfo.dependencies = {}
+ shrinkwrapDeps(problems, pkginfo.dependencies, top, child, seen)
}
})
}
-function shrinkwrap_ (pkginfo, silent, cb) {
- if (pkginfo.problems) {
- return cb(new Error('Problems were encountered\n' +
- 'Please correct and try again.\n' +
- pkginfo.problems.join('\n')))
- }
-
- save(pkginfo, silent, cb)
+function shrinkwrap_ (dir, pkginfo, opts, cb) {
+ save(dir, pkginfo, opts, cb)
}
-function save (pkginfo, silent, cb) {
+function save (dir, pkginfo, opts, cb) {
// copy the keys over in a well defined order
// because javascript objects serialize arbitrarily
- var swdata
- try {
- swdata = JSON.stringify(pkginfo, null, 2) + '\n'
- } catch (er) {
- log.error('shrinkwrap', 'Error converting package info to json')
- return cb(er)
- }
-
- var file = path.resolve(npm.prefix, 'npm-shrinkwrap.json')
+ BB.join(
+ checkPackageFile(dir, SHRINKWRAP),
+ checkPackageFile(dir, PKGLOCK),
+ checkPackageFile(dir, 'package.json'),
+ (shrinkwrap, lockfile, pkg) => {
+ const info = (
+ shrinkwrap ||
+ lockfile ||
+ {
+ path: path.resolve(dir, opts.defaultFile || PKGLOCK),
+ data: '{}',
+ indent: (pkg && pkg.indent) || 2
+ }
+ )
+ const updated = updateLockfileMetadata(pkginfo, pkg && pkg.data)
+ const swdata = JSON.stringify(updated, null, info.indent) + '\n'
+ writeFileAtomic(info.path, swdata, (err) => {
+ if (err) return cb(err)
+ if (opts.silent) return cb(null, pkginfo)
+ if (!shrinkwrap && !lockfile) {
+ log.notice('', `created a lockfile as ${path.basename(info.path)}. You should commit this file.`)
+ }
+ cb(null, pkginfo)
+ })
+ }
+ ).then((file) => {
+ }, cb)
+}
- writeFileAtomic(file, swdata, function (er) {
- if (er) return cb(er)
- if (silent) return cb(null, pkginfo)
- output('wrote npm-shrinkwrap.json')
- cb(null, pkginfo)
+function updateLockfileMetadata (pkginfo, pkgJson) {
+ // This is a lot of work just to make sure the extra metadata fields are
+ // between version and dependencies fields, without affecting any other stuff
+ const newPkg = {}
+ let metainfoWritten = false
+ const metainfo = new Set([
+ 'lockfileVersion',
+ 'packageIntegrity',
+ 'preserveSymlinks'
+ ])
+ Object.keys(pkginfo).forEach((k) => {
+ if (k === 'dependencies') {
+ writeMetainfo(newPkg)
+ }
+ if (!metainfo.has(k)) {
+ newPkg[k] = pkginfo[k]
+ }
+ if (k === 'version') {
+ writeMetainfo(newPkg)
+ }
})
+ if (!metainfoWritten) {
+ writeMetainfo(newPkg)
+ }
+ function writeMetainfo (pkginfo) {
+ pkginfo.lockfileVersion = PKGLOCK_VERSION
+ pkginfo.packageIntegrity = pkgJson && pkgSri.hash(pkgJson)
+ if (process.env.NODE_PRESERVE_SYMLINKS) {
+ pkginfo.preserveSymlinks = process.env.NODE_PRESERVE_SYMLINKS
+ }
+ metainfoWritten = true
+ }
+ return newPkg
+}
+
+function checkPackageFile (dir, name) {
+ const file = path.resolve(dir, name)
+ return fs.readFileAsync(
+ file, 'utf8'
+ ).then((data) => {
+ return {
+ path: file,
+ data: JSON.parse(data),
+ indent: detectIndent(data).indent || 2
+ }
+ }).catch({code: 'ENOENT'}, () => {})
}
// Returns true if the module `node` is only required direcctly as a dev
diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js
index 2176048843..9e3d91ac40 100644
--- a/deps/npm/lib/uninstall.js
+++ b/deps/npm/lib/uninstall.js
@@ -63,12 +63,11 @@ Uninstaller.prototype.loadArgMetadata = function (next) {
Uninstaller.prototype.loadAllDepsIntoIdealTree = function (cb) {
validate('F', arguments)
- log.silly('uninstall', 'loadAllDepsIntoIdealtree')
- var saveDeps = getSaveType(this.args)
+ log.silly('uninstall', 'loadAllDepsIntoIdealTree')
+ var saveDeps = getSaveType()
- var cg = this.progress.loadAllDepsIntoIdealTree
+ var cg = this.progress['loadIdealTree:loadAllDepsIntoIdealTree']
var steps = []
-
steps.push(
[removeDeps, this.args, this.idealTree, saveDeps, cg.newGroup('removeDeps')],
[loadExtraneous, this.idealTree, cg.newGroup('loadExtraneous')])
diff --git a/deps/npm/lib/unpublish.js b/deps/npm/lib/unpublish.js
index ee050c2846..4ea8187025 100644
--- a/deps/npm/lib/unpublish.js
+++ b/deps/npm/lib/unpublish.js
@@ -99,20 +99,20 @@ function gotProject (project, version, publishConfig, cb_) {
var registry = mappedConfig.client
// remove from the cache first
- npm.commands.cache(['clean', project, version], function (er) {
- if (er) {
- log.error('unpublish', 'Failed to clean cache')
- return cb(er)
+ // npm.commands.cache(['clean', project, version], function (er) {
+ // if (er) {
+ // log.error('unpublish', 'Failed to clean cache')
+ // return cb(er)
+ // }
+
+ mapToRegistry(project, config, function (er, uri, auth) {
+ if (er) return cb(er)
+
+ var params = {
+ version: version,
+ auth: auth
}
-
- mapToRegistry(project, config, function (er, uri, auth) {
- if (er) return cb(er)
-
- var params = {
- version: version,
- auth: auth
- }
- registry.unpublish(uri, params, cb)
- })
+ registry.unpublish(uri, params, cb)
})
+ // })
}
diff --git a/deps/npm/lib/utils/depr-check.js b/deps/npm/lib/utils/depr-check.js
index 89cf402739..97023ddda1 100644
--- a/deps/npm/lib/utils/depr-check.js
+++ b/deps/npm/lib/utils/depr-check.js
@@ -1,13 +1,23 @@
-var log = require('npmlog')
-
-var deprecated = {}
-var deprWarned = {}
-module.exports = function deprCheck (data) {
- if (deprecated[data._id]) data.deprecated = deprecated[data._id]
- if (data.deprecated) deprecated[data._id] = data.deprecated
- else return
- if (!deprWarned[data._id]) {
- deprWarned[data._id] = true
- log.warn('deprecated', '%s: %s', data._id, data.deprecated)
+'use strict'
+
+const log = require('npmlog')
+
+const deprecated = {}
+const deprWarned = {}
+
+module.exports = deprCheck
+function deprCheck (data) {
+ if (deprecated[data._id]) {
+ data.deprecated = deprecated[data._id]
}
+
+ if (data.deprecated) {
+ deprecated[data._id] = data.deprecated
+ if (!deprWarned[data._id]) {
+ deprWarned[data._id] = true
+ log.warn('deprecated', '%s: %s', data._id, data.deprecated)
+ }
+ }
+
+ return data
}
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
index 1213902886..8365f39d9d 100644
--- a/deps/npm/lib/utils/error-handler.js
+++ b/deps/npm/lib/utils/error-handler.js
@@ -1,5 +1,6 @@
module.exports = errorHandler
+module.exports.exit = exit
var cbCalled = false
var log = require('npmlog')
@@ -14,6 +15,7 @@ var writeFileAtomic = require('write-file-atomic')
var errorMessage = require('./error-message.js')
var stopMetrics = require('./metrics.js').stop
var mkdirp = require('mkdirp')
+var fs = require('graceful-fs')
var logFileName
function getLogFile () {
@@ -23,9 +25,26 @@ function getLogFile () {
return logFileName
}
+var timings = {
+ version: npm.version,
+ command: process.argv.slice(2),
+ logfile: null
+}
+process.on('timing', function (name, value) {
+ if (timings[name]) { timings[name] += value } else { timings[name] = value }
+})
+
process.on('exit', function (code) {
+ process.emit('timeEnd', 'npm')
log.disableProgress()
- if (!npm.config || !npm.config.loaded) return
+ if (npm.config.loaded && npm.config.get('timing')) {
+ try {
+ timings.logfile = getLogFile()
+ fs.appendFileSync(path.join(npm.config.get('cache'), '_timing.json'), JSON.stringify(timings) + '\n')
+ } catch (_) {
+ // ignore
+ }
+ }
// kill any outstanding stats reporter if it hasn't finished yet
stopMetrics()
@@ -42,25 +61,26 @@ process.on('exit', function (code) {
writeLogFile()
}
- if (wroteLogFile) {
- // just a line break
- if (log.levels[log.level] <= log.levels.error) console.error('')
-
- log.error(
- '',
- [
- 'A complete log of this run can be found in:',
- ' ' + getLogFile()
- ].join('\n')
- )
- wroteLogFile = false
- }
if (code) {
log.verbose('code', code)
}
}
+ if (npm.config.loaded && npm.config.get('timing') && !wroteLogFile) writeLogFile()
+ if (wroteLogFile) {
+ // just a line break
+ if (log.levels[log.level] <= log.levels.error) console.error('')
+
+ log.error(
+ '',
+ [
+ 'A complete log of this run can be found in:',
+ ' ' + getLogFile()
+ ].join('\n')
+ )
+ wroteLogFile = false
+ }
- var doExit = npm.config.get('_exit')
+ var doExit = npm.config.loaded && npm.config.get('_exit')
if (doExit) {
// actually exit.
if (exitCode === 0 && !itWorked) {
@@ -75,7 +95,7 @@ process.on('exit', function (code) {
function exit (code, noLog) {
exitCode = exitCode || process.exitCode || code
- var doExit = npm.config ? npm.config.get('_exit') : true
+ var doExit = npm.config.loaded ? npm.config.get('_exit') : true
log.verbose('exit', [code, doExit])
if (log.level === 'silent') noLog = true
@@ -108,9 +128,6 @@ function exit (code, noLog) {
function reallyExit (er) {
if (er && !code) code = typeof er.errno === 'number' ? er.errno : 1
- // truncate once it's been written.
- log.record.length = 0
-
itWorked = !code
// just emit a fake exit event.
@@ -189,13 +206,28 @@ function errorHandler (er) {
msg.summary.concat(msg.detail).forEach(function (errline) {
log.error.apply(log, errline)
})
+ if (npm.config.get('json')) {
+ var error = {
+ error: {
+ code: er.code,
+ summary: messageText(msg.summary),
+ detail: messageText(msg.detail)
+ }
+ }
+ console.log(JSON.stringify(error, null, 2))
+ }
exit(typeof er.errno === 'number' ? er.errno : 1)
}
+function messageText (msg) {
+ return msg.map(function (line) {
+ return line.slice(1).join(' ')
+ }).join('\n')
+}
+
function writeLogFile () {
if (wroteLogFile) return
- wroteLogFile = true
var os = require('os')
@@ -214,6 +246,10 @@ function writeLogFile () {
})
})
writeFileAtomic.sync(getLogFile(), logOutput)
+
+ // truncate once it's been written.
+ log.record.length = 0
+ wroteLogFile = true
} catch (ex) {
return
}
diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js
index f19d0bf6d3..49aa9124ec 100644
--- a/deps/npm/lib/utils/error-message.js
+++ b/deps/npm/lib/utils/error-message.js
@@ -33,17 +33,8 @@ function errorMessage (er) {
'',
[
'',
- 'Failed at the ' + er.pkgid + ' ' + er.stage + " script '" + er.script + "'.",
- 'Make sure you have the latest version of node.js and npm installed.',
- 'If you do, this is most likely a problem with the ' + er.pkgname + ' package,',
- 'not with npm itself.',
- 'Tell the author that this fails on your system:',
- ' ' + er.script,
- 'You can get information on how to open an issue for this project with:',
- ' npm bugs ' + er.pkgname,
- 'Or if that isn\'t available, you can get their info via:',
- ' npm owner ls ' + er.pkgname,
- 'There is likely additional logging output above.'
+ 'Failed at the ' + er.pkgid + ' ' + er.stage + ' script.',
+ 'This is probably not a problem with npm. There is likely additional logging output above.'
].join('\n')]
)
break
@@ -55,7 +46,6 @@ function errorMessage (er) {
[
'',
'Failed using git.',
- 'This is most likely not a problem with npm itself.',
'Please check if you have git installed and in your PATH.'
].join('\n')
])
@@ -70,7 +60,6 @@ function errorMessage (er) {
'Failed to parse package.json data.',
'package.json must be actual JSON, not just JavaScript.',
'',
- 'This is not a bug in npm.',
'Tell the package author to fix their package.json file.'
].join('\n'),
'JSON.parse'
@@ -187,8 +176,7 @@ function errorMessage (er) {
detail.push([
'network',
[
- 'This is most likely not a problem with npm itself',
- 'and is related to network connectivity.',
+ 'This is a problem related to network connectivity.',
'In most cases you are behind a proxy or have bad network settings.',
'\nIf you are behind a proxy, please make sure that the',
"'proxy' config is set properly. See: 'npm help config'"
@@ -201,7 +189,6 @@ function errorMessage (er) {
detail.push([
'package.json',
[
- 'This is most likely not a problem with npm itself.',
"npm can't find a package.json file in your current directory."
].join('\n')
])
@@ -210,7 +197,6 @@ function errorMessage (er) {
case 'ETARGET':
short.push(['notarget', er.message])
msg = [
- 'This is most likely not a problem with npm itself.',
'In most cases you or one of your dependencies are requesting',
"a package version that doesn't exist."
]
@@ -244,8 +230,8 @@ function errorMessage (er) {
detail.push([
'nospc',
[
- 'This is most likely not a problem with npm itself',
- 'and is related to insufficient space on your system.'
+ 'There appears to be insufficient space on your system to finish.',
+ 'Clear up some disk space and try again.'
].join('\n')
])
break
@@ -255,9 +241,7 @@ function errorMessage (er) {
detail.push([
'rofs',
[
- 'This is most likely not a problem with npm itself',
- 'and is related to the file system being read-only.',
- '\nOften virtualized file systems, or other file systems',
+ 'Often virtualized file systems, or other file systems',
"that don't support symlinks, give this error."
].join('\n')
])
@@ -268,8 +252,7 @@ function errorMessage (er) {
detail.push([
'enoent',
[
- 'This is most likely not a problem with npm itself',
- 'and is related to npm not being able to find a file.',
+ 'This is related to npm not being able to find a file.',
er.file ? "\nCheck if the file '" + er.file + "' is present." : ''
].join('\n')
])
@@ -289,18 +272,6 @@ function errorMessage (er) {
])
break
- case 'EISDIR':
- short.push(['eisdir', er.message])
- detail.push([
- 'eisdir',
- [
- 'This is most likely not a problem with npm itself',
- 'and is related to npm not being able to find a package.json in',
- 'a package you are trying to install.'
- ].join('\n')
- ])
- break
-
default:
short.push(['', er.message || er])
break
diff --git a/deps/npm/lib/utils/gently-rm.js b/deps/npm/lib/utils/gently-rm.js
index 634bf94fcc..7253e873c6 100644
--- a/deps/npm/lib/utils/gently-rm.js
+++ b/deps/npm/lib/utils/gently-rm.js
@@ -29,13 +29,6 @@ function gentlyRm (target, gently, base, cb) {
gently = false
}
- log.silly(
- 'gentlyRm',
- target,
- 'is being', gently ? 'gently removed' : 'purged',
- base ? 'from base ' + base : ''
- )
-
// never rm the root, prefix, or bin dirs
//
// globals included because of `npm link` -- as far as the package
@@ -53,15 +46,13 @@ function gentlyRm (target, gently, base, cb) {
var targetPath = normalize(resolve(npm.prefix, target))
if (prefixes.indexOf(targetPath) !== -1) {
- log.verbose('gentlyRm', targetPath, "is part of npm and can't be removed")
return cb(new Error('May not delete: ' + targetPath))
}
- var options = { log: log.silly.bind(log, 'vacuum-fs') }
+ var options = { }
if (npm.config.get('force') || !gently) options.purge = true
if (base) options.base = normalize(resolve(npm.prefix, base))
if (!gently) {
- log.verbose('gentlyRm', "don't care about contents; nuking", targetPath)
return vacuum(targetPath, options, cb)
}
@@ -95,8 +86,6 @@ function gentlyRm (target, gently, base, cb) {
function thenRemove (toRemove, removeBase) {
if (!toRemove) return cb()
if (removeBase) options.base = removeBase
- log.verbose('gentlyRm', options.purge ? 'Purging' : 'Vacuuming',
- toRemove, 'up to', options.base)
return vacuum(toRemove, options, cb)
}
})
@@ -116,7 +105,7 @@ function isSafeToRm (parent, target, cb) {
// The parent directory or something it symlinks to must eventually be in
// a folder that npm maintains.
if (!parent.managed) {
- log.verbose('gentlyRm', parent.path,
+ log.info('gentlyRm', parent.path,
'is not contained in any diretory npm is known to control or ' +
'any place they link to')
return cb(clobberFail(target.path, 'containing path ' + parent.path +
diff --git a/deps/npm/lib/utils/get-publish-config.js b/deps/npm/lib/utils/get-publish-config.js
index dcbb7b9c0c..fa475434ff 100644
--- a/deps/npm/lib/utils/get-publish-config.js
+++ b/deps/npm/lib/utils/get-publish-config.js
@@ -1,5 +1,5 @@
var Conf = require('../config/core.js').Conf
-var CachingRegClient = require('../cache/caching-client.js')
+var RegClient = require('npm-registry-client')
var log = require('npmlog')
module.exports = getPublishConfig
@@ -18,7 +18,7 @@ function getPublishConfig (publishConfig, defaultConfig, defaultClient) {
s[k] = publishConfig[k]
return s
}, {}))
- client = new CachingRegClient(config)
+ client = new RegClient(config)
}
return { config: config, client: client }
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index 4ab5e0979a..f8b34d7bef 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -55,6 +55,10 @@ function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
pkg.scripts = {}
}
+ if (stage === 'prepublish' && npm.config.get('ignore-prepublish')) {
+ log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
+ delete pkg.scripts.prepublish
+ }
validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
if (er) return cb(er)
diff --git a/deps/npm/lib/utils/map-to-registry.js b/deps/npm/lib/utils/map-to-registry.js
index 9e7ce67490..1f9798c09f 100644
--- a/deps/npm/lib/utils/map-to-registry.js
+++ b/deps/npm/lib/utils/map-to-registry.js
@@ -52,7 +52,7 @@ function mapToRegistry (name, config, cb) {
var uri
log.silly('mapToRegistry', 'data', data)
if (data.type === 'remote') {
- uri = data.spec
+ uri = data.fetchSpec
} else {
uri = url.resolve(normalized, name)
}
diff --git a/deps/npm/lib/utils/package-integrity.js b/deps/npm/lib/utils/package-integrity.js
new file mode 100644
index 0000000000..f9560d660e
--- /dev/null
+++ b/deps/npm/lib/utils/package-integrity.js
@@ -0,0 +1,21 @@
+'use strict'
+
+// Utilities for generating and verifying the packageIntegrity field for
+// package-lock
+//
+// Spec: https://github.com/npm/npm/pull/16441
+
+const ssri = require('ssri')
+const SSRI_OPTS = {
+ algorithms: ['sha512']
+}
+
+module.exports.check = check
+function check (pkg, integrity) {
+ return ssri.checkData(JSON.stringify(pkg), integrity, SSRI_OPTS)
+}
+
+module.exports.hash = hash
+function hash (pkg) {
+ return ssri.fromData(JSON.stringify(pkg), SSRI_OPTS).toString()
+}
diff --git a/deps/npm/lib/utils/perf.js b/deps/npm/lib/utils/perf.js
new file mode 100644
index 0000000000..0423263225
--- /dev/null
+++ b/deps/npm/lib/utils/perf.js
@@ -0,0 +1,27 @@
+'use strict'
+var log = require('npmlog')
+var EventEmitter = require('events').EventEmitter
+var perf = new EventEmitter()
+module.exports = perf
+
+var timings = {}
+
+process.on('time', time)
+process.on('timeEnd', timeEnd)
+
+perf.on('time', time)
+perf.on('timeEnd', timeEnd)
+
+function time (name) {
+ timings[name] = Date.now()
+}
+
+function timeEnd (name) {
+ if (name in timings) {
+ process.emit('timing', name, Date.now() - timings[name])
+ delete timings[name]
+ } else {
+ log.silly('timing', "Tried to end timer that doesn't exist:", name)
+ return
+ }
+}
diff --git a/deps/npm/lib/utils/rename.js b/deps/npm/lib/utils/rename.js
deleted file mode 100644
index 43a2f7e104..0000000000
--- a/deps/npm/lib/utils/rename.js
+++ /dev/null
@@ -1,9 +0,0 @@
-/*
-
-This is a stub file to ensure that the following hack doesn't break. This can be removed w/ npm@5.
-
-# Fix bug https://github.com/npm/npm/issues/9863
-RUN cd $(npm root -g)/npm \
- && npm install fs-extra \
- && sed -i -e s/graceful-fs/fs-extra/ -e s/fs\.rename/fs.move/ ./lib/utils/rename.js
-*/
diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js
index 88cfc6b805..7ebc9d6875 100644
--- a/deps/npm/lib/utils/tar.js
+++ b/deps/npm/lib/utils/tar.js
@@ -1,6 +1,10 @@
+'use strict'
+
// commands for packing and unpacking tarballs
// this file is used by lib/cache.js
+const BB = require('bluebird')
+
var fs = require('graceful-fs')
var path = require('path')
var writeFileAtomic = require('write-file-atomic')
@@ -24,6 +28,11 @@ var moduleName = require('./module-name.js')
var packageId = require('./package-id.js')
var pulseTillDone = require('../utils/pulse-till-done.js')
+const cacache = require('cacache')
+const packAsync = BB.promisify(pack)
+const PassThrough = require('stream').PassThrough
+const pipe = BB.promisify(require('mississippi').pipe)
+
if (process.env.SUDO_UID && myUid === 0) {
if (!isNaN(process.env.SUDO_UID)) myUid = +process.env.SUDO_UID
if (!isNaN(process.env.SUDO_GID)) myGid = +process.env.SUDO_GID
@@ -32,6 +41,18 @@ if (process.env.SUDO_UID && myUid === 0) {
exports.pack = pack
exports.unpack = unpack
+module.exports.packToStream = packToStream
+function packToStream (mani, dir) {
+ const stream = new PassThrough()
+ cacache.tmp.withTmp(npm.tmp, (tmp) => {
+ const tmpTarget = path.join(tmp, 'package.tgz')
+ return packAsync(tmpTarget, dir, mani).then(() => {
+ return pipe(fs.createReadStream(tmpTarget), stream)
+ })
+ }).catch((err) => stream.emit('error', err))
+ return stream
+}
+
function pack (tarball, folder, pkg, cb) {
log.verbose('tar pack', [tarball, folder])
@@ -45,14 +66,11 @@ function pack (tarball, folder, pkg, cb) {
// we require this at runtime due to load-order issues, because recursive
// requires fail if you replace the exports object, and we do, not in deps, but
// in a dep of it.
- var recalculateMetadata = require('../install/deps.js').recalculateMetadata
+ var computeMetadata = require('../install/deps.js').computeMetadata
readPackageTree(folder, pulseTillDone('pack:readTree:' + packageId(pkg), iferr(cb, function (tree) {
- var recalcGroup = log.newGroup('pack:recalc:' + packageId(pkg))
- recalculateMetadata(tree, recalcGroup, iferr(cb, function () {
- recalcGroup.finish()
- pack_(tarball, folder, tree, pkg, pulseTillDone('pack:' + packageId(pkg), cb))
- }))
+ computeMetadata(tree)
+ pack_(tarball, folder, tree, pkg, pulseTillDone('pack:' + packageId(pkg), cb))
})))
}
})
@@ -103,7 +121,9 @@ BundledPacker.prototype.applyIgnores = function (entry, partial, entryObj) {
entry.match(/^\..*\.swp$/) ||
entry === '.DS_Store' ||
entry.match(/^\._/) ||
- entry.match(/^.*\.orig$/)
+ entry.match(/^.*\.orig$/) ||
+ // Package locks are never allowed in tarballs -- use shrinkwrap instead
+ entry === 'package-lock.json'
) {
return false
}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
index f3caf4332a..42f7d2a68d 100644
--- a/deps/npm/lib/version.js
+++ b/deps/npm/lib/version.js
@@ -1,19 +1,19 @@
-// npm version <newver>
-
-module.exports = version
-
-var semver = require('semver')
-var path = require('path')
-var fs = require('graceful-fs')
-var writeFileAtomic = require('write-file-atomic')
-var chain = require('slide').chain
-var log = require('npmlog')
-var npm = require('./npm.js')
-var git = require('./utils/git.js')
-var assert = require('assert')
-var lifecycle = require('./utils/lifecycle.js')
-var parseJSON = require('./utils/parse-json.js')
-var output = require('./utils/output.js')
+'use strict'
+const BB = require('bluebird')
+
+const assert = require('assert')
+const chain = require('slide').chain
+const detectIndent = require('detect-indent')
+const fs = BB.promisifyAll(require('graceful-fs'))
+const git = require('./utils/git.js')
+const lifecycle = require('./utils/lifecycle.js')
+const log = require('npmlog')
+const npm = require('./npm.js')
+const output = require('./utils/output.js')
+const parseJSON = require('./utils/parse-json.js')
+const path = require('path')
+const semver = require('semver')
+const writeFileAtomic = require('write-file-atomic')
version.usage = 'npm version [<newversion> | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]' +
'\n(run in package dir)\n' +
@@ -23,6 +23,8 @@ version.usage = 'npm version [<newversion> | major | minor | patch | premajor |
'published version\n' +
"'npm ls' to inspect current package/dependency versions"
+// npm version <newver>
+module.exports = version
function version (args, silent, cb_) {
if (typeof cb_ !== 'function') {
cb_ = silent
@@ -30,7 +32,7 @@ function version (args, silent, cb_) {
}
if (args.length > 1) return cb_(version.usage)
- readPackage(function (er, data) {
+ readPackage(function (er, data, indent) {
if (!args.length) return dump(data, cb_)
if (er) {
@@ -89,7 +91,9 @@ function persistVersion (newVersion, silent, data, localData, cb_) {
localData = {}
}
- if (data.version === newVersion) return cb_(new Error('Version not changed'))
+ if (!npm.config.get('allow-same-version') && data.version === newVersion) {
+ return cb_(new Error('Version not changed, might want --allow-same-version'))
+ }
data.version = newVersion
var lifecycleData = Object.create(data)
lifecycleData._id = data.name + '@' + newVersion
@@ -107,16 +111,17 @@ function persistVersion (newVersion, silent, data, localData, cb_) {
function readPackage (cb) {
var packagePath = path.join(npm.localPrefix, 'package.json')
- fs.readFile(packagePath, function (er, data) {
+ fs.readFile(packagePath, 'utf8', function (er, data) {
if (er) return cb(new Error(er))
- if (data) data = data.toString()
+ var indent
try {
+ indent = detectIndent(data).indent || ' '
data = JSON.parse(data)
} catch (e) {
er = e
data = null
}
- cb(er, data)
+ cb(er, data, indent)
})
}
@@ -126,42 +131,60 @@ function updatePackage (newVersion, silent, cb_) {
cb_(er)
}
- readPackage(function (er, data) {
+ readPackage(function (er, data, indent) {
if (er) return cb(new Error(er))
data.version = newVersion
- write(data, 'package.json', cb)
+ write(data, 'package.json', indent, cb)
})
}
function commit (localData, newVersion, cb) {
- updateShrinkwrap(newVersion, function (er, hasShrinkwrap) {
+ updateShrinkwrap(newVersion, function (er, hasShrinkwrap, hasLock) {
if (er || !localData.hasGit) return cb(er)
localData.hasShrinkwrap = hasShrinkwrap
+ localData.hasPackageLock = hasLock
_commit(newVersion, localData, cb)
})
}
-function updateShrinkwrap (newVersion, cb) {
- fs.readFile(path.join(npm.localPrefix, 'npm-shrinkwrap.json'), function (er, data) {
- if (er && er.code === 'ENOENT') return cb(null, false)
+const SHRINKWRAP = 'npm-shrinkwrap.json'
+const PKGLOCK = 'package-lock.json'
- try {
- data = data.toString()
- data = parseJSON(data)
- } catch (er) {
- log.error('version', 'Bad npm-shrinkwrap.json data')
- return cb(er)
- }
+function readLockfile (name) {
+ return fs.readFileAsync(
+ path.join(npm.localPrefix, name), 'utf8'
+ ).catch({code: 'ENOENT'}, () => null)
+}
- data.version = newVersion
- write(data, 'npm-shrinkwrap.json', function (er) {
- if (er) {
- log.error('version', 'Bad npm-shrinkwrap.json data')
- return cb(er)
+function updateShrinkwrap (newVersion, cb) {
+ BB.join(
+ readLockfile(SHRINKWRAP),
+ readLockfile(PKGLOCK),
+ (shrinkwrap, lockfile) => {
+ if (!shrinkwrap && !lockfile) {
+ return cb(null, false, false)
}
- cb(null, true)
- })
- })
+ const file = shrinkwrap ? SHRINKWRAP : PKGLOCK
+ let data
+ let indent
+ try {
+ data = parseJSON(shrinkwrap || lockfile)
+ indent = detectIndent(shrinkwrap || lockfile).indent || ' '
+ } catch (err) {
+ log.error('version', `Bad ${file} data.`)
+ return cb(err)
+ }
+ data.version = newVersion
+ write(data, file, indent, (err) => {
+ if (err) {
+ log.error('version', `Failed to update version in ${file}`)
+ return cb(err)
+ } else {
+ return cb(null, !!shrinkwrap, !!lockfile)
+ }
+ })
+ }
+ )
}
function dump (data, cb) {
@@ -264,6 +287,7 @@ function _commit (version, localData, cb) {
[
git.chainableExec([ 'add', packagePath ], options),
localData.hasShrinkwrap && git.chainableExec([ 'add', path.join(npm.localPrefix, 'npm-shrinkwrap.json') ], options),
+ localData.hasPackageLock && git.chainableExec([ 'add', path.join(npm.localPrefix, 'package-lock.json') ], options),
git.chainableExec([ 'commit', '-m', message ], options),
!localData.existingTag && git.chainableExec([
'tag',
@@ -276,14 +300,14 @@ function _commit (version, localData, cb) {
)
}
-function write (data, file, cb) {
+function write (data, file, indent, cb) {
assert(data && typeof data === 'object', 'must pass data to version write')
assert(typeof file === 'string', 'must pass filename to write to version write')
log.verbose('version.write', 'data', data, 'to', file)
writeFileAtomic(
path.join(npm.localPrefix, file),
- new Buffer(JSON.stringify(data, null, 2) + '\n'),
+ new Buffer(JSON.stringify(data, null, indent || 2) + '\n'),
cb
)
}
diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js
index 64f5aa79c4..733cf60e5f 100644
--- a/deps/npm/lib/view.js
+++ b/deps/npm/lib/view.js
@@ -68,7 +68,12 @@ function view (args, silent, cb) {
if (!args.length) args = ['.']
var pkg = args.shift()
- var nv = npa(pkg)
+ var nv
+ if (/^[.]@/.test(pkg)) {
+ nv = npa.resolve(null, pkg.slice(2))
+ } else {
+ nv = npa(pkg)
+ }
var name = nv.name
var local = (name === '.' || !name)