summaryrefslogtreecommitdiff
path: root/deps/npm/lib
diff options
context:
space:
mode:
authorKat Marchán <kzm@sykosomatic.org>2016-12-18 20:22:09 -0800
committerMyles Borins <myles.borins@gmail.com>2016-12-28 15:30:50 -0500
commit9946519fba73687d34dccd5813808252a1973f3c (patch)
tree74d7194d2a00743bcd905027195e7e8000c5ac5a /deps/npm/lib
parent4d3b487b791606ea965f6280ce0eeea03d79b660 (diff)
downloadandroid-node-v8-9946519fba73687d34dccd5813808252a1973f3c.tar.gz
android-node-v8-9946519fba73687d34dccd5813808252a1973f3c.tar.bz2
android-node-v8-9946519fba73687d34dccd5813808252a1973f3c.zip
deps: upgrade npm to 4.0.5
PR-URL: https://github.com/nodejs/node/pull/10330 Reviewed-By: Myles Borins <myles.borins@gmail.com>
Diffstat (limited to 'deps/npm/lib')
-rw-r--r--deps/npm/lib/adduser.js2
-rw-r--r--deps/npm/lib/build.js5
-rw-r--r--deps/npm/lib/cache/add-local.js20
-rw-r--r--deps/npm/lib/cache/caching-client.js3
-rw-r--r--deps/npm/lib/cache/update-index.js105
-rw-r--r--deps/npm/lib/config/cmd-list.js1
-rw-r--r--deps/npm/lib/config/defaults.js16
-rw-r--r--deps/npm/lib/dedupe.js13
-rw-r--r--deps/npm/lib/install.js34
-rw-r--r--deps/npm/lib/install/action/build.js2
-rw-r--r--deps/npm/lib/install/action/extract.js22
-rw-r--r--deps/npm/lib/install/action/fetch.js4
-rw-r--r--deps/npm/lib/install/action/finalize.js25
-rw-r--r--deps/npm/lib/install/action/global-install.js2
-rw-r--r--deps/npm/lib/install/action/global-link.js2
-rw-r--r--deps/npm/lib/install/action/install.js4
-rw-r--r--deps/npm/lib/install/action/move.js2
-rw-r--r--deps/npm/lib/install/action/postinstall.js4
-rw-r--r--deps/npm/lib/install/action/preinstall.js7
-rw-r--r--deps/npm/lib/install/action/prepare.js27
-rw-r--r--deps/npm/lib/install/action/prepublish.js8
-rw-r--r--deps/npm/lib/install/action/remove.js2
-rw-r--r--deps/npm/lib/install/action/test.js6
-rw-r--r--deps/npm/lib/install/action/update-linked.js10
-rw-r--r--deps/npm/lib/install/actions.js15
-rw-r--r--deps/npm/lib/install/deps.js18
-rw-r--r--deps/npm/lib/install/diff-trees.js6
-rw-r--r--deps/npm/lib/install/inflate-bundled.js2
-rw-r--r--deps/npm/lib/install/inflate-shrinkwrap.js14
-rw-r--r--deps/npm/lib/install/module-staging-path.js (renamed from deps/npm/lib/install/build-path.js)4
-rw-r--r--deps/npm/lib/install/mutate-into-logical-tree.js2
-rw-r--r--deps/npm/lib/install/read-shrinkwrap.js1
-rw-r--r--deps/npm/lib/install/validate-args.js7
-rw-r--r--deps/npm/lib/npm.js31
-rw-r--r--deps/npm/lib/outdated.js1
-rw-r--r--deps/npm/lib/publish.js63
-rw-r--r--deps/npm/lib/search.js321
-rw-r--r--deps/npm/lib/search/all-package-metadata.js311
-rw-r--r--deps/npm/lib/search/format-package-stream.js175
-rw-r--r--deps/npm/lib/search/package-filter.js41
-rw-r--r--deps/npm/lib/shrinkwrap.js4
-rw-r--r--deps/npm/lib/tag.js42
-rw-r--r--deps/npm/lib/utils/error-handler.js2
-rw-r--r--deps/npm/lib/utils/lifecycle.js33
-rw-r--r--deps/npm/lib/utils/unsupported.js33
45 files changed, 879 insertions, 573 deletions
diff --git a/deps/npm/lib/adduser.js b/deps/npm/lib/adduser.js
index e04af0556b..7f3975175a 100644
--- a/deps/npm/lib/adduser.js
+++ b/deps/npm/lib/adduser.js
@@ -137,8 +137,6 @@ function save (c, u, cb) {
// there may be a saved scope and no --registry (for login)
if (scope) {
- if (scope.charAt(0) !== '@') scope = '@' + scope
-
var scopedRegistry = npm.config.get(scope + ':registry')
var cliRegistry = npm.config.get('registry', 'cli')
if (scopedRegistry && !cliRegistry) uri = scopedRegistry
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
index fbe78c746d..e6f600799d 100644
--- a/deps/npm/lib/build.js
+++ b/deps/npm/lib/build.js
@@ -66,8 +66,7 @@ function build_ (global, didPre, didRB) {
[linkStuff, pkg, folder, global, didRB],
[writeBuiltinConf, pkg, folder],
didPre !== build._noLC && [lifecycle, pkg, 'install', folder],
- didPre !== build._noLC && [lifecycle, pkg, 'postinstall', folder],
- didPre !== build._noLC && npm.config.get('npat') && [lifecycle, pkg, 'test', folder]
+ didPre !== build._noLC && [lifecycle, pkg, 'postinstall', folder]
],
cb)
})
@@ -191,7 +190,7 @@ function linkBins (pkg, folder, parent, gtop, cb) {
}
var binRoot = gtop ? npm.globalBin
: path.resolve(parent, '.bin')
- log.verbose('link bins', [pkg.bin, binRoot, gtop])
+ log.verbose('linkBins', [pkg.bin, binRoot, gtop])
asyncMap(Object.keys(pkg.bin), function (b, cb) {
linkBin(
diff --git a/deps/npm/lib/cache/add-local.js b/deps/npm/lib/cache/add-local.js
index 0794b92144..221505625e 100644
--- a/deps/npm/lib/cache/add-local.js
+++ b/deps/npm/lib/cache/add-local.js
@@ -8,6 +8,7 @@ var log = require('npmlog')
var npm = require('../npm.js')
var tar = require('../utils/tar.js')
var deprCheck = require('../utils/depr-check.js')
+var prepublishWarning = require('../utils/warn-deprecated.js')('prepublish-on-install')
var getCacheStat = require('./get-stat.js')
var cachedPackageRoot = require('./cached-package-root.js')
var addLocalTarball = require('./add-local-tarball.js')
@@ -15,6 +16,7 @@ var sha = require('sha')
var inflight = require('inflight')
var lifecycle = require('../utils/lifecycle.js')
var iferr = require('iferr')
+var chain = require('slide').chain
module.exports = addLocal
@@ -94,7 +96,23 @@ function addLocalDirectory (p, pkgData, shasum, cb) {
if (er) return wrapped(er)
var doPrePublish = !pathIsInside(p, npm.tmp)
if (doPrePublish) {
- lifecycle(data, 'prepublish', p, iferr(wrapped, thenPack))
+ // TODO: for `npm@5`, change the behavior and remove this warning.
+ // see https://github.com/npm/npm/issues/10074 for details
+ if (data && data.scripts && data.scripts.prepublish) {
+ prepublishWarning([
+ 'As of npm@5, `prepublish` scripts will run only for `npm publish`.',
+ '(In npm@4 and previous versions, it also runs for `npm install`.)',
+ 'See the deprecation note in `npm help scripts` for more information.'
+ ])
+ }
+
+ chain(
+ [
+ [lifecycle, data, 'prepublish', p],
+ [lifecycle, data, 'prepare', p]
+ ],
+ iferr(wrapped, thenPack)
+ )
} else {
thenPack()
}
diff --git a/deps/npm/lib/cache/caching-client.js b/deps/npm/lib/cache/caching-client.js
index a6bcee373d..61fb83b311 100644
--- a/deps/npm/lib/cache/caching-client.js
+++ b/deps/npm/lib/cache/caching-client.js
@@ -214,6 +214,7 @@ function adaptConfig (config) {
log: log,
defaultTag: config.get('tag'),
couchToken: config.get('_token'),
- maxSockets: config.get('maxsockets')
+ maxSockets: config.get('maxsockets'),
+ scope: npm.projectScope
}
}
diff --git a/deps/npm/lib/cache/update-index.js b/deps/npm/lib/cache/update-index.js
deleted file mode 100644
index a872b03490..0000000000
--- a/deps/npm/lib/cache/update-index.js
+++ /dev/null
@@ -1,105 +0,0 @@
-module.exports = updateIndex
-
-var fs = require('graceful-fs')
-var assert = require('assert')
-var path = require('path')
-var mkdir = require('mkdirp')
-var chownr = require('chownr')
-var npm = require('../npm.js')
-var log = require('npmlog')
-var cacheFile = require('npm-cache-filename')
-var getCacheStat = require('./get-stat.js')
-var mapToRegistry = require('../utils/map-to-registry.js')
-var pulseTillDone = require('../utils/pulse-till-done.js')
-var parseJSON = require('../utils/parse-json.js')
-
-/* /-/all is special.
- * It uses timestamp-based caching and partial updates,
- * because it is a monster.
- */
-function updateIndex (staleness, cb) {
- assert(typeof cb === 'function', 'must pass callback to updateIndex')
-
- mapToRegistry('-/all', npm.config, function (er, uri, auth) {
- if (er) return cb(er)
-
- var params = {
- timeout: staleness,
- follow: true,
- staleOk: true,
- auth: auth
- }
- var cacheBase = cacheFile(npm.config.get('cache'))(uri)
- var cachePath = path.join(cacheBase, '.cache.json')
- log.info('updateIndex', cachePath)
-
- getCacheStat(function (er, st) {
- if (er) return cb(er)
-
- mkdir(cacheBase, function (er, made) {
- if (er) return cb(er)
-
- fs.readFile(cachePath, function (er, data) {
- if (er) {
- log.warn('', 'Building the local index for the first time, please be patient')
- return updateIndex_(uri, params, {}, cachePath, cb)
- }
-
- chownr(made || cachePath, st.uid, st.gid, function (er) {
- if (er) return cb(er)
-
- data = parseJSON.noExceptions(data)
- if (!data) {
- fs.writeFile(cachePath, '{}', function (er) {
- if (er) return cb(new Error('Broken cache.'))
-
- log.warn('', 'Building the local index for the first time, please be patient')
- return updateIndex_(uri, params, {}, cachePath, cb)
- })
- }
-
- var t = +data._updated || 0
- // use the cache and update in the background if it's not too old
- if (Date.now() - t < 60000) {
- cb(null, data)
- cb = function () {}
- }
-
- if (t === 0) {
- log.warn('', 'Building the local index for the first time, please be patient')
- } else {
- log.verbose('updateIndex', 'Cached search data present with timestamp', t)
- uri += '/since?stale=update_after&startkey=' + t
- }
- updateIndex_(uri, params, data, cachePath, cb)
- })
- })
- })
- })
- })
-}
-
-function updateIndex_ (all, params, data, cachePath, cb) {
- log.silly('update-index', 'fetching', all)
- npm.registry.request(all, params, pulseTillDone('updateIndex', function (er, updates, _, res) {
- if (er) return cb(er, data)
-
- var headers = res.headers
- var updated = updates._updated || Date.parse(headers.date)
-
- Object.keys(updates).forEach(function (p) { data[p] = updates[p] })
-
- data._updated = updated
- getCacheStat(function (er, st) {
- if (er) return cb(er)
-
- fs.writeFile(cachePath, JSON.stringify(data), function (er) {
- delete data._updated
- if (er) return cb(er)
- chownr(cachePath, st.uid, st.gid, function (er) {
- cb(er, data)
- })
- })
- })
- }))
-}
diff --git a/deps/npm/lib/config/cmd-list.js b/deps/npm/lib/config/cmd-list.js
index d66a5722db..eb79e9df06 100644
--- a/deps/npm/lib/config/cmd-list.js
+++ b/deps/npm/lib/config/cmd-list.js
@@ -64,7 +64,6 @@ var cmdList = [
'publish',
'star',
'stars',
- 'tag',
'adduser',
'login', // This is an alias for `adduser` but it can be confusing
'logout',
diff --git a/deps/npm/lib/config/defaults.js b/deps/npm/lib/config/defaults.js
index e93c1e57ec..05a06a4e20 100644
--- a/deps/npm/lib/config/defaults.js
+++ b/deps/npm/lib/config/defaults.js
@@ -168,7 +168,6 @@ Object.defineProperty(exports, 'defaults', {get: function () {
maxsockets: 50,
message: '%s',
'node-version': process.version,
- npat: false,
'onload-script': false,
only: null,
optional: true,
@@ -193,9 +192,10 @@ Object.defineProperty(exports, 'defaults', {get: function () {
'save-optional': false,
'save-prefix': '^',
scope: '',
+ 'scripts-prepend-node-path': 'warn-only',
searchopts: '',
searchexclude: null,
- searchsort: 'name',
+ searchstaleness: 15 * 60,
shell: osenv.shell(),
shrinkwrap: true,
'sign-git-tag': false,
@@ -280,7 +280,6 @@ exports.types = {
maxsockets: Number,
message: String,
'node-version': [null, semver],
- npat: Boolean,
'onload-script': [null, String],
only: [null, 'dev', 'development', 'prod', 'production'],
optional: Boolean,
@@ -300,15 +299,10 @@ exports.types = {
'save-optional': Boolean,
'save-prefix': String,
scope: String,
+ 'scripts-prepend-node-path': [false, true, 'auto', 'warn-only'],
searchopts: String,
searchexclude: [null, String],
- searchsort: [
- 'name', '-name',
- 'description', '-description',
- 'author', '-author',
- 'date', '-date',
- 'keywords', '-keywords'
- ],
+ searchstaleness: Number,
shell: String,
shrinkwrap: Boolean,
'sign-git-tag': Boolean,
@@ -370,8 +364,6 @@ exports.shorthands = {
help: ['--usage'],
v: ['--version'],
f: ['--force'],
- gangster: ['--force'],
- gangsta: ['--force'],
desc: ['--description'],
'no-desc': ['--no-description'],
'local': ['--no-global'],
diff --git a/deps/npm/lib/dedupe.js b/deps/npm/lib/dedupe.js
index b99a9a1fd1..d1bd374e9b 100644
--- a/deps/npm/lib/dedupe.js
+++ b/deps/npm/lib/dedupe.js
@@ -48,19 +48,6 @@ function Deduper (where, dryrun) {
}
util.inherits(Deduper, Installer)
-Deduper.prototype.normalizeTree = function (log, cb) {
- validate('OF', arguments)
- log.silly('dedupe', 'normalizeTree')
- // If we're looking globally only look at the one package we're operating on
- if (npm.config.get('global')) {
- var args = this.args
- this.currentTree.children = this.currentTree.children.filter(function (child) {
- return args.filter(function (arg) { return arg === moduleName(child) }).length
- })
- }
- Installer.prototype.normalizeTree.call(this, log, cb)
-}
-
Deduper.prototype.loadIdealTree = function (cb) {
validate('F', arguments)
log.silly('install', 'loadIdealTree')
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
index 4c3d0def78..0cf6a2b2d6 100644
--- a/deps/npm/lib/install.js
+++ b/deps/npm/lib/install.js
@@ -208,7 +208,6 @@ function Installer (where, dryrun, args) {
this.progress = {}
this.noPackageJsonOk = !!args.length
this.topLevelLifecycles = !args.length
- this.npat = npm.config.get('npat')
this.dev = npm.config.get('dev') || (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || /^dev(elopment)?$/.test(npm.config.get('only'))
this.prod = !/^dev(elopment)?$/.test(npm.config.get('only'))
this.rollback = npm.config.get('rollback')
@@ -485,10 +484,6 @@ Installer.prototype.executeActions = function (cb) {
[doParallelActions, 'update-linked', staging, todo, trackLifecycle.newGroup('update-linked')],
[doSerialActions, 'install', staging, todo, trackLifecycle.newGroup('install')],
[doSerialActions, 'postinstall', staging, todo, trackLifecycle.newGroup('postinstall')])
- if (this.npat) {
- steps.push(
- [doParallelActions, 'test', staging, todo, trackLifecycle.newGroup('npat')])
- }
var self = this
chain(steps, function (er) {
@@ -507,9 +502,10 @@ Installer.prototype.rollbackFailedOptional = function (staging, actionsToRun, cb
}).filter(function (pkg) {
return pkg.failed && pkg.rollback
})
+ var top = this.currentTree.path
asyncMap(failed, function (pkg, next) {
asyncMap(pkg.rollback, function (rollback, done) {
- rollback(staging, pkg, done)
+ rollback(top, staging, pkg, done)
}, next)
}, cb)
}
@@ -552,13 +548,9 @@ Installer.prototype.runPostinstallTopLevelLifecycles = function (cb) {
[doOneAction, 'build', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('build:.')],
[doOneAction, 'install', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('install:.')],
[doOneAction, 'postinstall', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('postinstall:.')])
- if (this.npat) {
- steps.push(
- [doOneAction, 'test', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('npat:.')])
- }
if (this.dev) {
steps.push(
- [doOneAction, 'prepublish', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('prepublish')])
+ [doOneAction, 'prepare', this.idealTree.path, this.idealTree, trackLifecycle.newGroup('prepare')])
}
chain(steps, cb)
}
@@ -607,19 +599,13 @@ Installer.prototype.readLocalPackageData = function (cb) {
return cb(er)
}
if (!currentTree.package) currentTree.package = {}
- if (currentTree.package._shrinkwrap) {
- self.loadArgMetadata(cb)
- } else {
- fs.readFile(path.join(self.where, 'npm-shrinkwrap.json'), function (er, data) {
- if (er) return self.loadArgMetadata(cb)
- try {
- currentTree.package._shrinkwrap = parseJSON(data)
- } catch (ex) {
- return cb(ex)
- }
- return self.loadArgMetadata(cb)
- })
- }
+ readShrinkwrap(currentTree, function (err) {
+ if (err) {
+ cb(err)
+ } else {
+ self.loadArgMetadata(cb)
+ }
+ })
}))
}))
}
diff --git a/deps/npm/lib/install/action/build.js b/deps/npm/lib/install/action/build.js
index ffb870d686..f59b852e84 100644
--- a/deps/npm/lib/install/action/build.js
+++ b/deps/npm/lib/install/action/build.js
@@ -4,7 +4,7 @@ var build = require('../../build.js')
var npm = require('../../npm.js')
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('build', packageId(pkg))
chain([
[build.linkStuff, pkg.package, pkg.path, npm.config.get('global'), true],
diff --git a/deps/npm/lib/install/action/extract.js b/deps/npm/lib/install/action/extract.js
index 60aae036c2..2c8a995af6 100644
--- a/deps/npm/lib/install/action/extract.js
+++ b/deps/npm/lib/install/action/extract.js
@@ -10,35 +10,35 @@ var npm = require('../../npm.js')
var moduleName = require('../../utils/module-name.js')
var packageId = require('../../utils/package-id.js')
var cache = require('../../cache.js')
-var buildPath = require('../build-path.js')
+var moduleStagingPath = require('../module-staging-path.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('extract', packageId(pkg))
var up = npm.config.get('unsafe-perm')
var user = up ? null : npm.config.get('user')
var group = up ? null : npm.config.get('group')
- cache.unpack(pkg.package.name, pkg.package.version, buildpath, null, null, user, group,
- andUpdatePackageJson(pkg, buildpath, andStageBundledChildren(pkg, buildpath, log, next)))
+ var extractTo = moduleStagingPath(staging, pkg)
+ cache.unpack(pkg.package.name, pkg.package.version, extractTo, null, null, user, group,
+ andUpdatePackageJson(pkg, staging, extractTo, andStageBundledChildren(pkg, staging, extractTo, log, next)))
}
-function andUpdatePackageJson (pkg, buildpath, next) {
+function andUpdatePackageJson (pkg, staging, extractTo, next) {
return iferr(next, function () {
- updatePackageJson(pkg, buildpath, next)
+ updatePackageJson(pkg, extractTo, next)
})
}
-function andStageBundledChildren (pkg, buildpath, log, next) {
- var staging = path.resolve(buildpath, '..')
+function andStageBundledChildren (pkg, staging, extractTo, log, next) {
return iferr(next, function () {
for (var i = 0; i < pkg.children.length; ++i) {
var c = pkg.children[i]
if (!c.package.name) return next(c.error)
}
- asyncMap(pkg.children, andStageBundledModule(pkg, staging, buildpath), cleanupBundled)
+ asyncMap(pkg.children, andStageBundledModule(pkg, staging, extractTo), cleanupBundled)
})
function cleanupBundled () {
- gentlyRm(path.join(buildpath, 'node_modules'), next)
+ gentlyRm(path.join(extractTo, 'node_modules'), next)
}
}
@@ -62,7 +62,7 @@ function warn (pkg, code, msg) {
function stageBundledModule (bundler, child, staging, parentPath, next) {
var stageFrom = path.join(parentPath, 'node_modules', child.package.name)
- var stageTo = buildPath(staging, child)
+ var stageTo = moduleStagingPath(staging, child)
asyncMap(child.children, andStageBundledModule(bundler, staging, stageFrom), iferr(next, moveModule))
diff --git a/deps/npm/lib/install/action/fetch.js b/deps/npm/lib/install/action/fetch.js
index a706b1967b..0e9146a0d5 100644
--- a/deps/npm/lib/install/action/fetch.js
+++ b/deps/npm/lib/install/action/fetch.js
@@ -3,7 +3,7 @@
// var packageId = require('../../utils/package-id.js')
// var moduleName = require('../../utils/module-name.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
next()
/*
// FIXME: Unnecessary as long as we have to have the tarball to resolve all deps, which
@@ -24,6 +24,6 @@ module.exports = function (top, buildpath, pkg, log, next) {
name = pkg.package._requested.raw
}
log.silly('fetch', packageId(pkg))
- cache.add(name, version, top, false, next)
+ cache.add(name, version, pkg.parent.path, false, next)
*/
}
diff --git a/deps/npm/lib/install/action/finalize.js b/deps/npm/lib/install/action/finalize.js
index ad278df20f..acc11cc4a7 100644
--- a/deps/npm/lib/install/action/finalize.js
+++ b/deps/npm/lib/install/action/finalize.js
@@ -5,10 +5,14 @@ var fs = require('graceful-fs')
var mkdirp = require('mkdirp')
var asyncMap = require('slide').asyncMap
var rename = require('../../utils/rename.js')
+var gentlyRm = require('../../utils/gently-rm')
+var moduleStagingPath = require('../module-staging-path.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('finalize', pkg.path)
+ var extractedTo = moduleStagingPath(staging, pkg)
+
var delpath = path.join(path.dirname(pkg.path), '.' + path.basename(pkg.path) + '.DELETE')
mkdirp(path.resolve(pkg.path, '..'), whenParentExists)
@@ -22,7 +26,7 @@ module.exports = function (top, buildpath, pkg, log, next) {
function destStatted (doesNotExist) {
if (doesNotExist) {
- rename(buildpath, pkg.path, whenMoved)
+ rename(extractedTo, pkg.path, whenMoved)
} else {
moveAway()
}
@@ -40,7 +44,7 @@ module.exports = function (top, buildpath, pkg, log, next) {
function whenOldMovedAway (renameEr) {
if (renameEr) return next(renameEr)
- rename(buildpath, pkg.path, whenConflictMoved)
+ rename(extractedTo, pkg.path, whenConflictMoved)
}
function whenConflictMoved (renameEr) {
@@ -76,17 +80,6 @@ module.exports = function (top, buildpath, pkg, log, next) {
}
}
-module.exports.rollback = function (buildpath, pkg, next) {
- var top = path.resolve(buildpath, '..')
- rimraf(pkg.path, function () {
- removeEmptyParents(pkg.path)
- })
- function removeEmptyParents (pkgdir) {
- if (path.relative(top, pkgdir)[0] === '.') return next()
- fs.rmdir(pkgdir, function (er) {
- // FIXME: Make sure windows does what we want here
- if (er && er.code !== 'ENOENT') return next()
- removeEmptyParents(path.resolve(pkgdir, '..'))
- })
- }
+module.exports.rollback = function (top, staging, pkg, next) {
+ gentlyRm(pkg.path, false, top, next)
}
diff --git a/deps/npm/lib/install/action/global-install.js b/deps/npm/lib/install/action/global-install.js
index a2aa594025..e4fd8d11d1 100644
--- a/deps/npm/lib/install/action/global-install.js
+++ b/deps/npm/lib/install/action/global-install.js
@@ -4,7 +4,7 @@ var npm = require('../../npm.js')
var Installer = require('../../install.js').Installer
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('global-install', packageId(pkg))
var globalRoot = path.resolve(npm.globalDir, '..')
npm.config.set('global', true)
diff --git a/deps/npm/lib/install/action/global-link.js b/deps/npm/lib/install/action/global-link.js
index daad48e974..f109e5b88a 100644
--- a/deps/npm/lib/install/action/global-link.js
+++ b/deps/npm/lib/install/action/global-link.js
@@ -2,7 +2,7 @@
var npm = require('../../npm.js')
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('global-link', packageId(pkg))
npm.link(pkg.package.name, next)
}
diff --git a/deps/npm/lib/install/action/install.js b/deps/npm/lib/install/action/install.js
index f7b3295534..754bff43ff 100644
--- a/deps/npm/lib/install/action/install.js
+++ b/deps/npm/lib/install/action/install.js
@@ -2,7 +2,7 @@
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
- log.silly('install', packageId(pkg), buildpath)
+module.exports = function (staging, pkg, log, next) {
+ log.silly('install', packageId(pkg))
lifecycle(pkg.package, 'install', pkg.path, false, false, next)
}
diff --git a/deps/npm/lib/install/action/move.js b/deps/npm/lib/install/action/move.js
index ee6bdb9525..3f29379f2b 100644
--- a/deps/npm/lib/install/action/move.js
+++ b/deps/npm/lib/install/action/move.js
@@ -16,7 +16,7 @@ var rename = require('../../utils/rename.js')
folders.
*/
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('move', pkg.fromPath, pkg.path)
chain([
[lifecycle, pkg.package, 'preuninstall', pkg.fromPath, false, true],
diff --git a/deps/npm/lib/install/action/postinstall.js b/deps/npm/lib/install/action/postinstall.js
index 4ca4c8baec..197dc1e6f9 100644
--- a/deps/npm/lib/install/action/postinstall.js
+++ b/deps/npm/lib/install/action/postinstall.js
@@ -2,7 +2,7 @@
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
- log.silly('postinstall', packageId(pkg), buildpath)
+module.exports = function (staging, pkg, log, next) {
+ log.silly('postinstall', packageId(pkg))
lifecycle(pkg.package, 'postinstall', pkg.path, false, false, next)
}
diff --git a/deps/npm/lib/install/action/preinstall.js b/deps/npm/lib/install/action/preinstall.js
index abd767016e..a6f85b0a5a 100644
--- a/deps/npm/lib/install/action/preinstall.js
+++ b/deps/npm/lib/install/action/preinstall.js
@@ -1,8 +1,9 @@
'use strict'
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
+var moduleStagingPath = require('../module-staging-path.js')
-module.exports = function (top, buildpath, pkg, log, next) {
- log.silly('preinstall', packageId(pkg), buildpath)
- lifecycle(pkg.package, 'preinstall', buildpath, false, false, next)
+module.exports = function (staging, pkg, log, next) {
+ log.silly('preinstall', packageId(pkg))
+ lifecycle(pkg.package, 'preinstall', moduleStagingPath(staging, pkg), false, false, next)
}
diff --git a/deps/npm/lib/install/action/prepare.js b/deps/npm/lib/install/action/prepare.js
new file mode 100644
index 0000000000..771a2a9399
--- /dev/null
+++ b/deps/npm/lib/install/action/prepare.js
@@ -0,0 +1,27 @@
+'use strict'
+var chain = require('slide').chain
+var lifecycle = require('../../utils/lifecycle.js')
+var packageId = require('../../utils/package-id.js')
+var prepublishWarning = require('../../utils/warn-deprecated.js')('prepublish-on-install')
+var moduleStagingPath = require('../module-staging-path.js')
+
+module.exports = function (staging, pkg, log, next) {
+ log.silly('prepublish', packageId(pkg))
+ // TODO: for `npm@5`, change the behavior and remove this warning.
+ // see https://github.com/npm/npm/issues/10074 for details
+ if (pkg.package && pkg.package.scripts && pkg.package.scripts.prepublish) {
+ prepublishWarning([
+ 'As of npm@5, `prepublish` scripts will run only for `npm publish`.',
+ '(In npm@4 and previous versions, it also runs for `npm install`.)',
+ 'See the deprecation note in `npm help scripts` for more information.'
+ ])
+ }
+ var buildpath = moduleStagingPath(staging, pkg)
+ chain(
+ [
+ [lifecycle, pkg.package, 'prepublish', buildpath, false, false],
+ [lifecycle, pkg.package, 'prepare', buildpath, false, false]
+ ],
+ next
+ )
+}
diff --git a/deps/npm/lib/install/action/prepublish.js b/deps/npm/lib/install/action/prepublish.js
deleted file mode 100644
index b9a5a5d6f1..0000000000
--- a/deps/npm/lib/install/action/prepublish.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-var lifecycle = require('../../utils/lifecycle.js')
-var packageId = require('../../utils/package-id.js')
-
-module.exports = function (top, buildpath, pkg, log, next) {
- log.silly('prepublish', packageId(pkg), buildpath)
- lifecycle(pkg.package, 'prepublish', buildpath, false, false, next)
-}
diff --git a/deps/npm/lib/install/action/remove.js b/deps/npm/lib/install/action/remove.js
index 7b05a81b6d..47d5b766f7 100644
--- a/deps/npm/lib/install/action/remove.js
+++ b/deps/npm/lib/install/action/remove.js
@@ -10,7 +10,7 @@ var rename = require('../../utils/rename.js')
// This is weird because we want to remove the module but not it's node_modules folder
// allowing for this allows us to not worry about the order of operations
-module.exports = function (top, buildpath, pkg, log, next) {
+module.exports = function (staging, pkg, log, next) {
log.silly('remove', pkg.path)
if (pkg.target) {
removeLink(pkg, next)
diff --git a/deps/npm/lib/install/action/test.js b/deps/npm/lib/install/action/test.js
index ee315290ad..29f26f3ecf 100644
--- a/deps/npm/lib/install/action/test.js
+++ b/deps/npm/lib/install/action/test.js
@@ -2,7 +2,7 @@
var lifecycle = require('../../utils/lifecycle.js')
var packageId = require('../../utils/package-id.js')
-module.exports = function (top, buildpath, pkg, log, next) {
- log.silly('test', packageId(pkg), buildpath)
- lifecycle(pkg.package, 'test', buildpath, false, false, next)
+module.exports = function (staging, pkg, log, next) {
+ log.silly('test', packageId(pkg))
+ lifecycle(pkg.package, 'test', pkg.path, false, false, next)
}
diff --git a/deps/npm/lib/install/action/update-linked.js b/deps/npm/lib/install/action/update-linked.js
index b37f477e17..0babe10fdf 100644
--- a/deps/npm/lib/install/action/update-linked.js
+++ b/deps/npm/lib/install/action/update-linked.js
@@ -1,9 +1,15 @@
'use strict'
var path = require('path')
-module.exports = function (top, buildpath, pkg, log, next) {
+function getTop (pkg) {
+ if (pkg.target && pkg.target.parent) return getTop(pkg.target.parent)
+ if (pkg.parent) return getTop(pkg.parent)
+ return pkg.path
+}
+
+module.exports = function (staging, pkg, log, next) {
if (pkg.package.version !== pkg.oldPkg.package.version) {
- log.warn('update-linked', path.relative(top, pkg.path), 'needs updating to', pkg.package.version,
+ log.warn('update-linked', path.relative(getTop(pkg), pkg.path), 'needs updating to', pkg.package.version,
'from', pkg.oldPkg.package.version, "but we can't, as it's a symlink")
}
next()
diff --git a/deps/npm/lib/install/actions.js b/deps/npm/lib/install/actions.js
index 7c3f97cefc..6e9c1a0b65 100644
--- a/deps/npm/lib/install/actions.js
+++ b/deps/npm/lib/install/actions.js
@@ -1,5 +1,4 @@
'use strict'
-var path = require('path')
var validate = require('aproba')
var chain = require('slide').chain
var asyncMap = require('slide').asyncMap
@@ -8,7 +7,6 @@ var andFinishTracker = require('./and-finish-tracker.js')
var andAddParentToErrors = require('./and-add-parent-to-errors.js')
var failedDependency = require('./deps.js').failedDependency
var moduleName = require('../utils/module-name.js')
-var buildPath = require('./build-path.js')
var reportOptionalFailure = require('./report-optional-failure.js')
var isInstallable = require('./validate-args.js').isInstallable
@@ -21,7 +19,7 @@ actions.test = require('./action/test.js')
actions.preinstall = require('./action/preinstall.js')
actions.install = require('./action/install.js')
actions.postinstall = require('./action/postinstall.js')
-actions.prepublish = require('./action/prepublish.js')
+actions.prepare = require('./action/prepare.js')
actions.finalize = require('./action/finalize.js')
actions.remove = require('./action/remove.js')
actions.move = require('./action/move.js')
@@ -34,8 +32,9 @@ actions['global-link'] = require('./action/global-link.js')
Object.keys(actions).forEach(function (actionName) {
var action = actions[actionName]
- actions[actionName] = function (top, buildpath, pkg, log, next) {
- validate('SSOOF', arguments)
+ actions[actionName] = function (staging, pkg, log, next) {
+ // top, buildpath, pkg, log
+ validate('SOOF', arguments)
// refuse to run actions for failed packages
if (pkg.failed) return next()
if (action.rollback) {
@@ -58,7 +57,7 @@ Object.keys(actions).forEach(function (actionName) {
}
}
function thenRunAction () {
- action(top, buildpath, pkg, log, andDone(next))
+ action(staging, pkg, log, andDone(next))
}
function andDone (cb) {
return andFinishTracker(log, andAddParentToErrors(pkg.parent, andHandleOptionalDepErrors(pkg, cb)))
@@ -99,9 +98,7 @@ function prepareAction (staging, log) {
var cmd = action[0]
var pkg = action[1]
if (!actions[cmd]) throw new Error('Unknown decomposed command "' + cmd + '" (is it new?)')
- var top = path.resolve(staging, '../..')
- var buildpath = buildPath(staging, pkg)
- return [actions[cmd], top, buildpath, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
+ return [actions[cmd], staging, pkg, log.newGroup(cmd + ':' + moduleName(pkg))]
}
}
diff --git a/deps/npm/lib/install/deps.js b/deps/npm/lib/install/deps.js
index d7081296db..bdbd817007 100644
--- a/deps/npm/lib/install/deps.js
+++ b/deps/npm/lib/install/deps.js
@@ -406,6 +406,11 @@ function loadDeps (tree, log, next) {
exports.loadDevDeps = function (tree, log, next) {
validate('OOF', arguments)
if (!tree.package.devDependencies) return andFinishTracker.now(log, next)
+ // if any of our prexisting children are from a shrinkwrap then we skip
+ // loading dev deps as the shrinkwrap will already have provided them for us.
+ if (tree.children.some(function (child) { return child.shrinkwrapDev })) {
+ return andFinishTracker.now(log, next)
+ }
asyncMap(Object.keys(tree.package.devDependencies), function (dep, done) {
// things defined as both dev dependencies and regular dependencies are treated
// as the former
@@ -416,7 +421,7 @@ exports.loadDevDeps = function (tree, log, next) {
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
}
-exports.loadExtraneous = function loadExtraneous (tree, log, next) {
+var loadExtraneous = exports.loadExtraneous = function (tree, log, next) {
var seen = {}
function loadExtraneous (tree, log, next) {
validate('OOF', arguments)
@@ -431,6 +436,9 @@ exports.loadExtraneous = function loadExtraneous (tree, log, next) {
exports.loadExtraneous.andResolveDeps = function (tree, log, next) {
validate('OOF', arguments)
+ // For canonicalized trees (eg from shrinkwrap) we don't want to bother
+ // resolving the dependencies of extraneous deps.
+ if (tree.loaded) return loadExtraneous(tree, log, next)
asyncMap(tree.children.filter(function (child) { return !child.loaded }), function (child, done) {
resolveWithExistingModule(child, tree, log, done)
}, andForEachChild(loadDeps, andFinishTracker(log, next)))
@@ -631,6 +639,14 @@ var earliestInstallable = exports.earliestInstallable = function (requiredBy, tr
return null
}
+ var devDeps = tree.package.devDependencies || {}
+ if (tree.isTop && devDeps[pkg.name]) {
+ var requested = npa(pkg.name + '@' + devDeps[pkg.name])
+ if (!doesChildVersionMatch({package: pkg}, requested, tree)) {
+ return null
+ }
+ }
+
if (tree.phantomChildren && tree.phantomChildren[pkg.name]) return null
if (tree.isTop) return tree
diff --git a/deps/npm/lib/install/diff-trees.js b/deps/npm/lib/install/diff-trees.js
index 578cda90ce..db4fb3ce31 100644
--- a/deps/npm/lib/install/diff-trees.js
+++ b/deps/npm/lib/install/diff-trees.js
@@ -113,7 +113,7 @@ var sortActions = module.exports.sortActions = function (differences) {
return sorted
}
-function diffTrees (oldTree, newTree) {
+var diffTrees = module.exports._diffTrees = function (oldTree, newTree) {
validate('OO', arguments)
var differences = []
var flatOldTree = flattenTree(oldTree)
@@ -144,7 +144,9 @@ function diffTrees (oldTree, newTree) {
}
} else {
var vername = getNameAndVersion(pkg.package)
- if (toRemoveByNameAndVer[vername] && toRemoveByNameAndVer[vername].length && !pkg.fromBundle) {
+ var removing = toRemoveByNameAndVer[vername] && toRemoveByNameAndVer[vername].length
+ var bundlesOrFromBundle = pkg.fromBundle || pkg.package.bundleDependencies
+ if (removing && !bundlesOrFromBundle) {
var flatname = toRemoveByNameAndVer[vername].shift()
pkg.fromPath = toRemove[flatname].path
differences.push(['move', pkg])
diff --git a/deps/npm/lib/install/inflate-bundled.js b/deps/npm/lib/install/inflate-bundled.js
index c597e7a566..f91e9112b9 100644
--- a/deps/npm/lib/install/inflate-bundled.js
+++ b/deps/npm/lib/install/inflate-bundled.js
@@ -1,10 +1,12 @@
'use strict'
var validate = require('aproba')
var childPath = require('../utils/child-path.js')
+var reset = require('./node.js').reset
module.exports = function inflateBundled (parent, children) {
validate('OA', arguments)
children.forEach(function (child) {
+ reset(child)
child.fromBundle = true
child.parent = parent
child.path = childPath(parent.path, child)
diff --git a/deps/npm/lib/install/inflate-shrinkwrap.js b/deps/npm/lib/install/inflate-shrinkwrap.js
index b70e9576bf..497bbe3348 100644
--- a/deps/npm/lib/install/inflate-shrinkwrap.js
+++ b/deps/npm/lib/install/inflate-shrinkwrap.js
@@ -16,6 +16,7 @@ var childPath = require('../utils/child-path.js')
module.exports = function (tree, swdeps, finishInflating) {
if (!npm.config.get('shrinkwrap')) return finishInflating()
+ tree.loaded = true
return inflateShrinkwrap(tree.path, tree, swdeps, finishInflating)
}
@@ -23,9 +24,17 @@ function inflateShrinkwrap (topPath, tree, swdeps, finishInflating) {
validate('SOOF', arguments)
var onDisk = {}
tree.children.forEach(function (child) { onDisk[moduleName(child)] = child })
- tree.children = []
var dev = npm.config.get('dev') || (!/^prod(uction)?$/.test(npm.config.get('only')) && !npm.config.get('production')) || /^dev(elopment)?$/.test(npm.config.get('only'))
var prod = !/^dev(elopment)?$/.test(npm.config.get('only'))
+
+ // If the shrinkwrap has no dev dependencies in it then we'll leave the one's
+ // already on disk. If it DOES have dev dependencies then ONLY those in the
+ // shrinkwrap will be included.
+ var swHasDev = Object.keys(swdeps).some(function (name) { return swdeps[name].dev })
+ tree.children = swHasDev ? [] : tree.children.filter(function (child) {
+ return tree.package.devDependencies[moduleName(child)]
+ })
+
return asyncMap(Object.keys(swdeps), doRealizeAndInflate, finishInflating)
function doRealizeAndInflate (name, next) {
@@ -40,6 +49,7 @@ function inflateShrinkwrap (topPath, tree, swdeps, finishInflating) {
var child = onDisk[name]
if (childIsEquivalent(sw, requested, child)) {
if (!child.fromShrinkwrap) child.fromShrinkwrap = requested.raw
+ if (sw.dev) child.shrinkwrapDev = true
tree.children.push(child)
annotateMetadata(child.package, requested, requested.raw, topPath)
return inflateShrinkwrap(topPath, child, dependencies || {}, next)
@@ -69,7 +79,7 @@ function inflateShrinkwrap (topPath, tree, swdeps, finishInflating) {
return function () {
var child = createChild({
package: pkg,
- loaded: false,
+ loaded: true,
parent: tree,
fromShrinkwrap: pkg._from,
path: childPath(tree.path, pkg),
diff --git a/deps/npm/lib/install/build-path.js b/deps/npm/lib/install/module-staging-path.js
index ebef544f80..00ee96b33d 100644
--- a/deps/npm/lib/install/build-path.js
+++ b/deps/npm/lib/install/module-staging-path.js
@@ -2,7 +2,7 @@
var uniqueFilename = require('unique-filename')
var moduleName = require('../utils/module-name.js')
-module.exports = buildPath
-function buildPath (staging, pkg) {
+module.exports = moduleStagingPath
+function moduleStagingPath (staging, pkg) {
return uniqueFilename(staging, moduleName(pkg), pkg.realpath)
}
diff --git a/deps/npm/lib/install/mutate-into-logical-tree.js b/deps/npm/lib/install/mutate-into-logical-tree.js
index b2059da906..491f20913c 100644
--- a/deps/npm/lib/install/mutate-into-logical-tree.js
+++ b/deps/npm/lib/install/mutate-into-logical-tree.js
@@ -38,7 +38,7 @@ var mutateIntoLogicalTree = module.exports = function (tree) {
Object.keys(flat).sort().forEach(function (flatname) {
var node = flat[flatname]
- if (!node.requiredBy.length) return
+ if (!(node.requiredBy && node.requiredBy.length)) return
if (node.parent) {
// If a node is a cycle that never reaches the root of the logical
diff --git a/deps/npm/lib/install/read-shrinkwrap.js b/deps/npm/lib/install/read-shrinkwrap.js
index 35180b688d..3453e3192f 100644
--- a/deps/npm/lib/install/read-shrinkwrap.js
+++ b/deps/npm/lib/install/read-shrinkwrap.js
@@ -6,6 +6,7 @@ var inflateShrinkwrap = require('./inflate-shrinkwrap.js')
var parseJSON = require('../utils/parse-json.js')
var readShrinkwrap = module.exports = function (child, next) {
+ if (child.package._shrinkwrap) return process.nextTick(next)
fs.readFile(path.join(child.path, 'npm-shrinkwrap.json'), function (er, data) {
if (er) {
child.package._shrinkwrap = null
diff --git a/deps/npm/lib/install/validate-args.js b/deps/npm/lib/install/validate-args.js
index 02c0558e4c..c30bf8695b 100644
--- a/deps/npm/lib/install/validate-args.js
+++ b/deps/npm/lib/install/validate-args.js
@@ -50,7 +50,12 @@ function checkSelf (idealTree, pkg, force, next) {
idealTree.warnings.push(warn)
next()
} else {
- var er = new Error('Refusing to install ' + pkg.name + ' as a dependency of itself')
+ var er = new Error('Refusing to install package with name "' + pkg.name +
+ '" under a package\n' +
+ 'also called "' + pkg.name + '". Did you name your project the same\n' +
+ 'as the dependency you\'re installing?\n\n' +
+ 'For more information, see:\n' +
+ ' <https://docs.npmjs.com/cli/install#limitations-of-npms-install-algorithm>')
er.code = 'ENOSELF'
next(er)
}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index 4f81015251..05749b3fd2 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -13,6 +13,9 @@
return
}
+ var unsupported = require('../lib/utils/unsupported.js')
+ unsupported.checkForBrokenNode()
+
var gfs = require('graceful-fs')
// Patch the global fs module here at the app level
var fs = gfs.gracefulify(require('fs'))
@@ -284,10 +287,6 @@
log.resume()
- // at this point the configs are all set.
- // go ahead and spin up the registry client.
- npm.registry = new CachingRegClient(npm.config)
-
var umask = npm.config.get('umask')
npm.modes = {
exec: parseInt('0777', 8) & (~umask),
@@ -301,6 +300,14 @@
var lp = Object.getOwnPropertyDescriptor(config, 'localPrefix')
Object.defineProperty(npm, 'localPrefix', lp)
+ config.set('scope', scopeifyScope(config.get('scope')))
+ npm.projectScope = config.get('scope') ||
+ scopeifyScope(getProjectScope(npm.prefix))
+
+ // at this point the configs are all set.
+ // go ahead and spin up the registry client.
+ npm.registry = new CachingRegClient(npm.config)
+
return cb(null, npm)
})
})
@@ -400,4 +407,20 @@
if (require.main === module) {
require('../bin/npm-cli.js')
}
+
+ function scopeifyScope (scope) {
+ return (!scope || scope[0] === '@') ? scope : ('@' + scope)
+ }
+
+ function getProjectScope (prefix) {
+ try {
+ var pkg = JSON.parse(fs.readFileSync(path.join(prefix, 'package.json')))
+ if (typeof pkg.name !== 'string') return ''
+ var sep = pkg.name.indexOf('/')
+ if (sep === -1) return ''
+ return pkg.name.slice(0, sep)
+ } catch (ex) {
+ return ''
+ }
+ }
})()
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
index dd59798365..9f6a8b75eb 100644
--- a/deps/npm/lib/outdated.js
+++ b/deps/npm/lib/outdated.js
@@ -111,6 +111,7 @@ function outdated (args, silent, cb) {
}
output(table(outTable, tableOpts))
}
+ process.exitCode = 1
cb(null, list.map(function (item) { return [item[0].parent.path].concat(item.slice(1, 7)) }))
})
}))
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
index e1826df9d4..c0e910e85f 100644
--- a/deps/npm/lib/publish.js
+++ b/deps/npm/lib/publish.js
@@ -52,37 +52,38 @@ function publish (args, isRetry, cb) {
if (!data.version) return cb(new Error('No version provided'))
}
- // Error is OK. Could be publishing a URL or tarball, however, that means
- // that we will not have automatically run the prepublish script, since
- // that gets run when adding a folder to the cache.
- if (er) return cacheAddPublish(arg, false, isRetry, cb)
- else cacheAddPublish(arg, true, isRetry, cb)
- })
-}
-
-// didPre in this case means that we already ran the prepublish script,
-// and that the 'dir' is an actual directory, and not something silly
-// like a tarball or name@version thing.
-// That means that we can run publish/postpublish in the dir, rather than
-// in the cache dir.
-function cacheAddPublish (dir, didPre, isRetry, cb) {
- npm.commands.cache.add(dir, null, null, false, function (er, data) {
- if (er) return cb(er)
- log.silly('publish', data)
- var cachedir = path.resolve(cachedPackageRoot(data), 'package')
- chain(
- [
- !didPre && [lifecycle, data, 'prepublish', cachedir],
- [publish_, dir, data, isRetry, cachedir],
- [lifecycle, data, 'publish', didPre ? dir : cachedir],
- [lifecycle, data, 'postpublish', didPre ? dir : cachedir]
- ],
- cb
- )
+ // if readJson errors, the argument might be a tarball or package URL
+ if (er) {
+ npm.commands.cache.add(arg, null, null, false, function (er, data) {
+ if (er) return cb(er)
+ log.silly('publish', data)
+ var cached = path.resolve(cachedPackageRoot(data), 'package') + '.tgz'
+ // *publish* lifecycle scripts aren't run when publishing a built artifact
+ // go to the next step directly
+ publish_(arg, data, isRetry, cached, cb)
+ })
+ } else {
+ var dir = arg
+ npm.commands.cache.add(dir, null, null, false, function (er, data) {
+ if (er) return cb(er)
+ log.silly('publish', data)
+ var cached = path.resolve(cachedPackageRoot(data), 'package') + '.tgz'
+ // `prepublish` and `prepare` are run by cache.add
+ chain(
+ [
+ [lifecycle, data, 'prepublishOnly', dir],
+ [publish_, dir, data, isRetry, cached],
+ [lifecycle, data, 'publish', dir],
+ [lifecycle, data, 'postpublish', dir]
+ ],
+ cb
+ )
+ })
+ }
})
}
-function publish_ (arg, data, isRetry, cachedir, cb) {
+function publish_ (arg, data, isRetry, cached, cb) {
if (!data) return cb(new Error('no package.json file found'))
var mappedConfig = getPublishConfig(
@@ -107,11 +108,9 @@ function publish_ (arg, data, isRetry, cachedir, cb) {
mapToRegistry(data.name, config, function (er, registryURI, auth, registryBase) {
if (er) return cb(er)
- var tarballPath = cachedir + '.tgz'
-
// we just want the base registry URL in this case
log.verbose('publish', 'registryBase', registryBase)
- log.silly('publish', 'uploading', tarballPath)
+ log.silly('publish', 'uploading', cached)
data._npmUser = {
name: auth.username,
@@ -120,7 +119,7 @@ function publish_ (arg, data, isRetry, cachedir, cb) {
var params = {
metadata: data,
- body: createReadStream(tarballPath),
+ body: createReadStream(cached),
auth: auth
}
diff --git a/deps/npm/lib/search.js b/deps/npm/lib/search.js
index cd6d5ed8ea..c9f3628717 100644
--- a/deps/npm/lib/search.js
+++ b/deps/npm/lib/search.js
@@ -1,11 +1,15 @@
+'use strict'
module.exports = exports = search
var npm = require('./npm.js')
-var columnify = require('columnify')
-var updateIndex = require('./cache/update-index.js')
+var allPackageMetadata = require('./search/all-package-metadata.js')
+var packageFilter = require('./search/package-filter.js')
+var formatPackageStream = require('./search/format-package-stream.js')
var usage = require('./utils/usage')
var output = require('./utils/output.js')
+var log = require('npmlog')
+var ms = require('mississippi')
search.usage = usage(
'search',
@@ -13,276 +17,89 @@ search.usage = usage(
)
search.completion = function (opts, cb) {
- var compl = {}
- var partial = opts.partialWord
- var ipartial = partial.toLowerCase()
- var plen = partial.length
-
- // get the batch of data that matches so far.
- // this is an example of using npm.commands.search programmatically
- // to fetch data that has been filtered by a set of arguments.
- search(opts.conf.argv.remain.slice(2), true, function (er, data) {
- if (er) return cb(er)
- Object.keys(data).forEach(function (name) {
- data[name].words.split(' ').forEach(function (w) {
- if (w.toLowerCase().indexOf(ipartial) === 0) {
- compl[partial + w.substr(plen)] = true
- }
- })
- })
- cb(null, Object.keys(compl))
- })
+ cb(null, [])
}
-function search (args, silent, staleness, cb) {
- if (typeof cb !== 'function') {
- cb = staleness
- staleness = 600
- }
- if (typeof cb !== 'function') {
- cb = silent
- silent = false
- }
-
- var searchopts = npm.config.get('searchopts')
- var searchexclude = npm.config.get('searchexclude')
+function search (args, cb) {
+ var staleness = npm.config.get('searchstaleness')
- if (typeof searchopts !== 'string') searchopts = ''
- searchopts = searchopts.split(/\s+/)
- var opts = searchopts.concat(args).map(function (s) {
- return s.toLowerCase()
- }).filter(function (s) { return s })
-
- if (opts.length === 0) {
+ var include = prepareIncludes(args, npm.config.get('searchopts'))
+ if (include.length === 0) {
return cb(new Error('search must be called with arguments'))
}
- if (typeof searchexclude === 'string') {
- searchexclude = searchexclude.split(/\s+/)
- } else {
- searchexclude = []
- }
- searchexclude = searchexclude.map(function (s) {
- return s.toLowerCase()
- })
-
- getFilteredData(staleness, opts, searchexclude, function (er, data) {
- // now data is the list of data that we want to show.
- // prettify and print it, and then provide the raw
- // data to the cb.
- if (er || silent) return cb(er, data)
- output(prettify(data, args))
- cb(null, data)
- })
-}
-
-function getFilteredData (staleness, args, notArgs, cb) {
- updateIndex(staleness, function (er, data) {
- if (er) return cb(er)
- return cb(null, filter(data, args, notArgs))
- })
-}
-
-function filter (data, args, notArgs) {
- // data={<name>:{package data}}
- return Object.keys(data).map(function (d) {
- return data[d]
- }).filter(function (d) {
- return typeof d === 'object'
- }).map(stripData).map(getWords).filter(function (data) {
- return filterWords(data, args, notArgs)
- }).reduce(function (l, r) {
- l[r.name] = r
- return l
- }, {})
-}
-
-function stripData (data) {
- return {
- name: data.name,
- description: npm.config.get('description') ? data.description : '',
- maintainers: (data.maintainers || []).map(function (m) {
- return '=' + m.name
- }),
- url: !Object.keys(data.versions || {}).length ? data.url : null,
- keywords: data.keywords || [],
- version: Object.keys(data.versions || {})[0] || [],
- time: data.time &&
- data.time.modified &&
- (new Date(data.time.modified).toISOString() // remove time
- .split('T').join(' ')
- .replace(/:[0-9]{2}\.[0-9]{3}Z$/, ''))
- .slice(0, -5) ||
- 'prehistoric'
- }
-}
-
-function getWords (data) {
- data.words = [ data.name ]
- .concat(data.description)
- .concat(data.maintainers)
- .concat(data.url && ('<' + data.url + '>'))
- .concat(data.keywords)
- .map(function (f) { return f && f.trim && f.trim() })
- .filter(function (f) { return f })
- .join(' ')
- .toLowerCase()
- return data
-}
-
-function filterWords (data, args, notArgs) {
- var words = data.words
- for (var i = 0, l = args.length; i < l; i++) {
- if (!match(words, args[i])) return false
- }
- for (i = 0, l = notArgs.length; i < l; i++) {
- if (match(words, notArgs[i])) return false
- }
- return true
-}
-
-function match (words, arg) {
- if (arg.charAt(0) === '/') {
- arg = arg.replace(/\/$/, '')
- arg = new RegExp(arg.substr(1, arg.length - 1))
- return words.match(arg)
- }
- return words.indexOf(arg) !== -1
-}
+ var exclude = prepareExcludes(npm.config.get('searchexclude'))
-function prettify (data, args) {
- var searchsort = (npm.config.get('searchsort') || 'NAME').toLowerCase()
- var sortField = searchsort.replace(/^\-+/, '')
- var searchRev = searchsort.charAt(0) === '-'
- var truncate = !npm.config.get('long')
+ // Used later to figure out whether we had any packages go out
+ var anyOutput = false
- if (Object.keys(data).length === 0) {
- return 'No match found for ' + (args.map(JSON.stringify).join(' '))
- }
+ // Get a stream with *all* the packages. This takes care of dealing
+ // with the local cache as well, but that's an internal detail.
+ var allEntriesStream = allPackageMetadata(staleness)
- var lines = Object.keys(data).map(function (d) {
- // strip keyname
- return data[d]
- }).map(function (dat) {
- dat.author = dat.maintainers
- delete dat.maintainers
- dat.date = dat.time
- delete dat.time
- return dat
- }).map(function (dat) {
- // split keywords on whitespace or ,
- if (typeof dat.keywords === 'string') {
- dat.keywords = dat.keywords.split(/[,\s]+/)
- }
- if (Array.isArray(dat.keywords)) {
- dat.keywords = dat.keywords.join(' ')
- }
-
- // split author on whitespace or ,
- if (typeof dat.author === 'string') {
- dat.author = dat.author.split(/[,\s]+/)
- }
- if (Array.isArray(dat.author)) {
- dat.author = dat.author.join(' ')
- }
- return dat
+ // Grab a stream that filters those packages according to given params.
+ var searchSection = (npm.config.get('unicode') ? '🤔 ' : '') + 'search'
+ var filterStream = streamFilter(function (pkg) {
+ log.gauge.pulse('search')
+ log.gauge.show({section: searchSection, logline: 'scanning ' + pkg.name})
+ // Simply 'true' if the package matches search parameters.
+ var match = packageFilter(pkg, include, exclude, {
+ description: npm.config.get('description')
+ })
+ if (match) { anyOutput = true }
+ return match
})
- lines.sort(function (a, b) {
- var aa = a[sortField].toLowerCase()
- var bb = b[sortField].toLowerCase()
- return aa === bb ? 0
- : aa < bb ? -1 : 1
+ // Grab a configured output stream that will spit out packages in the
+ // desired format.
+ var outputStream = formatPackageStream({
+ args: args, // --searchinclude options are not highlighted
+ long: npm.config.get('long'),
+ description: npm.config.get('description'),
+ json: npm.config.get('json'),
+ parseable: npm.config.get('parseable'),
+ color: npm.color
+ })
+ outputStream.on('data', function (chunk) {
+ output(chunk.toString('utf8'))
})
- if (searchRev) lines.reverse()
-
- var columns = npm.config.get('description')
- ? ['name', 'description', 'author', 'date', 'version', 'keywords']
- : ['name', 'author', 'date', 'version', 'keywords']
-
- var output = columnify(
- lines,
- {
- include: columns,
- truncate: truncate,
- config: {
- name: { maxWidth: 40, truncate: false, truncateMarker: '' },
- description: { maxWidth: 60 },
- author: { maxWidth: 20 },
- date: { maxWidth: 11 },
- version: { maxWidth: 11 },
- keywords: { maxWidth: Infinity }
- }
+ log.silly('search', 'searching packages')
+ ms.pipe(allEntriesStream, filterStream, outputStream, function (er) {
+ if (er) return cb(er)
+ if (!anyOutput && !npm.config.get('json') && !npm.config.get('parseable')) {
+ output('No matches found for ' + (args.map(JSON.stringify).join(' ')))
}
- )
- output = trimToMaxWidth(output)
- output = highlightSearchTerms(output, args)
-
- return output
+ log.silly('search', 'index search completed')
+ log.clearProgress()
+ cb(null, {})
+ })
}
-var colors = [31, 33, 32, 36, 34, 35]
-var cl = colors.length
-
-function addColorMarker (str, arg, i) {
- var m = i % cl + 1
- var markStart = String.fromCharCode(m)
- var markEnd = String.fromCharCode(0)
-
- if (arg.charAt(0) === '/') {
- return str.replace(
- new RegExp(arg.substr(1, arg.length - 2), 'gi'),
- function (bit) { return markStart + bit + markEnd }
- )
- }
-
- // just a normal string, do the split/map thing
- var pieces = str.toLowerCase().split(arg.toLowerCase())
- var p = 0
-
- return pieces.map(function (piece) {
- piece = str.substr(p, piece.length)
- var mark = markStart +
- str.substr(p + piece.length, arg.length) +
- markEnd
- p += piece.length + arg.length
- return piece + mark
- }).join('')
+function prepareIncludes (args, searchopts) {
+ if (typeof searchopts !== 'string') searchopts = ''
+ return searchopts.split(/\s+/).concat(args).map(function (s) {
+ return s.toLowerCase()
+ }).filter(function (s) { return s })
}
-function colorize (line) {
- for (var i = 0; i < cl; i++) {
- var m = i + 1
- var color = npm.color ? '\u001B[' + colors[i] + 'm' : ''
- line = line.split(String.fromCharCode(m)).join(color)
+function prepareExcludes (searchexclude) {
+ var exclude
+ if (typeof searchexclude === 'string') {
+ exclude = searchexclude.split(/\s+/)
+ } else {
+ exclude = []
}
- var uncolor = npm.color ? '\u001B[0m' : ''
- return line.split('\u0000').join(uncolor)
-}
-
-function getMaxWidth () {
- var cols
- try {
- var tty = require('tty')
- var stdout = process.stdout
- cols = !tty.isatty(stdout.fd) ? Infinity : process.stdout.getWindowSize()[0]
- cols = (cols === 0) ? Infinity : cols
- } catch (ex) { cols = Infinity }
- return cols
-}
-
-function trimToMaxWidth (str) {
- var maxWidth = getMaxWidth()
- return str.split('\n').map(function (line) {
- return line.slice(0, maxWidth)
- }).join('\n')
+ return exclude.map(function (s) {
+ return s.toLowerCase()
+ })
}
-function highlightSearchTerms (str, terms) {
- terms.forEach(function (arg, i) {
- str = addColorMarker(str, arg, i)
+function streamFilter (filter) {
+ return ms.through.obj(function (chunk, enc, cb) {
+ if (filter(chunk)) {
+ this.push(chunk)
+ }
+ cb()
})
-
- return colorize(str).trim()
}
diff --git a/deps/npm/lib/search/all-package-metadata.js b/deps/npm/lib/search/all-package-metadata.js
new file mode 100644
index 0000000000..ce917f5ef0
--- /dev/null
+++ b/deps/npm/lib/search/all-package-metadata.js
@@ -0,0 +1,311 @@
+'use strict'
+
+var fs = require('graceful-fs')
+var path = require('path')
+var mkdir = require('mkdirp')
+var chownr = require('chownr')
+var npm = require('../npm.js')
+var log = require('npmlog')
+var cacheFile = require('npm-cache-filename')
+var getCacheStat = require('../cache/get-stat.js')
+var mapToRegistry = require('../utils/map-to-registry.js')
+var jsonstream = require('JSONStream')
+var writeStreamAtomic = require('fs-write-stream-atomic')
+var ms = require('mississippi')
+var sortedUnionStream = require('sorted-union-stream')
+var once = require('once')
+
+// Returns a sorted stream of all package metadata. Internally, takes care of
+// maintaining its metadata cache and making partial or full remote requests,
+// according to staleness, validity, etc.
+//
+// The local cache must hold certain invariants:
+// 1. It must be a proper JSON object
+// 2. It must have its keys lexically sorted
+// 3. The first entry must be `_updated` with a millisecond timestamp as a val.
+// 4. It must include all entries that exist in the metadata endpoint as of
+// the value in `_updated`
+module.exports = allPackageMetadata
+function allPackageMetadata (staleness) {
+ var stream = ms.through.obj()
+
+ mapToRegistry('-/all', npm.config, function (er, uri, auth) {
+ if (er) return stream.emit('error', er)
+
+ var cacheBase = cacheFile(npm.config.get('cache'))(uri)
+ var cachePath = path.join(cacheBase, '.cache.json')
+
+ createEntryStream(cachePath, uri, auth, staleness, function (err, entryStream, latest, newEntries) {
+ if (err) return stream.emit('error', err)
+ log.silly('all-package-metadata', 'entry stream created')
+ if (entryStream && newEntries) {
+ createCacheWriteStream(cachePath, latest, function (err, writeStream) {
+ if (err) return stream.emit('error', err)
+ log.silly('all-package-metadata', 'output stream created')
+ ms.pipeline.obj(entryStream, writeStream, stream)
+ })
+ } else if (entryStream) {
+ ms.pipeline.obj(entryStream, stream)
+ } else {
+ stream.emit('error', new Error('No search sources available'))
+ }
+ })
+ })
+ return stream
+}
+
+// Creates a stream of the latest available package metadata.
+// Metadata will come from a combination of the local cache and remote data.
+module.exports._createEntryStream = createEntryStream
+function createEntryStream (cachePath, uri, auth, staleness, cb) {
+ createCacheEntryStream(cachePath, function (err, cacheStream, cacheLatest) {
+ cacheLatest = cacheLatest || 0
+ if (err) {
+ log.warn('', 'Failed to read search cache. Rebuilding')
+ log.silly('all-package-metadata', 'cache read error: ', err)
+ }
+ createEntryUpdateStream(uri, auth, staleness, cacheLatest, function (err, updateStream, updatedLatest) {
+ updatedLatest = updatedLatest || 0
+ var latest = updatedLatest || cacheLatest
+ if (!cacheStream && !updateStream) {
+ return cb(new Error('No search sources available'))
+ }
+ if (err) {
+ log.warn('', 'Search data request failed, search might be stale')
+ log.silly('all-package-metadata', 'update request error: ', err)
+ }
+ if (cacheStream && updateStream) {
+ // Deduped, unioned, sorted stream from the combination of both.
+ cb(null,
+ createMergedStream(cacheStream, updateStream),
+ latest,
+ !!updatedLatest)
+ } else {
+ // Either one works if one or the other failed
+ cb(null, cacheStream || updateStream, latest, !!updatedLatest)
+ }
+ })
+ })
+}
+
+// Merges `a` and `b` into one stream, dropping duplicates in favor of entries
+// in `b`. Both input streams should already be individually sorted, and the
+// returned output stream will have semantics resembling the merge step of a
+// plain old merge sort.
+module.exports._createMergedStream = createMergedStream
+function createMergedStream (a, b) {
+ linkStreams(a, b)
+ return sortedUnionStream(b, a, function (pkg) { return pkg.name })
+}
+
+// Reads the local index and returns a stream that spits out package data.
+module.exports._createCacheEntryStream = createCacheEntryStream
+function createCacheEntryStream (cacheFile, cb) {
+ log.verbose('all-package-metadata', 'creating entry stream from local cache')
+ log.verbose('all-package-metadata', cacheFile)
+ fs.stat(cacheFile, function (err, stat) {
+ if (err) return cb(err)
+ // TODO - This isn't very helpful if `cacheFile` is empty or just `{}`
+ var entryStream = ms.pipeline.obj(
+ fs.createReadStream(cacheFile),
+ jsonstream.parse('*'),
+ // I believe this passthrough is necessary cause `jsonstream` returns
+ // weird custom streams that behave funny sometimes.
+ ms.through.obj()
+ )
+ extractUpdated(entryStream, 'cached-entry-stream', cb)
+ })
+}
+
+// Stream of entry updates from the server. If `latest` is `0`, streams the
+// entire metadata object from the registry.
+module.exports._createEntryUpdateStream = createEntryUpdateStream
+function createEntryUpdateStream (all, auth, staleness, latest, cb) {
+ log.verbose('all-package-metadata', 'creating remote entry stream')
+ var params = {
+ timeout: 600,
+ follow: true,
+ staleOk: true,
+ auth: auth,
+ streaming: true
+ }
+ var partialUpdate = false
+ if (latest && (Date.now() - latest < (staleness * 1000))) {
+ // Skip the request altogether if our `latest` isn't stale.
+ log.verbose('all-package-metadata', 'Local data up to date, skipping update')
+ return cb(null)
+ } else if (latest === 0) {
+ log.warn('', 'Building the local index for the first time, please be patient')
+ log.verbose('all-package-metadata', 'No cached data: requesting full metadata db')
+ } else {
+ log.verbose('all-package-metadata', 'Cached data present with timestamp:', latest, 'requesting partial index update')
+ all += '/since?stale=update_after&startkey=' + latest
+ partialUpdate = true
+ }
+ npm.registry.request(all, params, function (er, res) {
+ if (er) return cb(er)
+ log.silly('all-package-metadata', 'request stream opened, code:', res.statusCode)
+ // NOTE - The stream returned by `request` seems to be very persnickety
+ // and this is almost a magic incantation to get it to work.
+ // Modify how `res` is used here at your own risk.
+ var entryStream = ms.pipeline.obj(
+ res,
+ ms.through(function (chunk, enc, cb) {
+ cb(null, chunk)
+ }),
+ jsonstream.parse('*', function (pkg, key) {
+ if (key[0] === '_updated' || key[0][0] !== '_') {
+ return pkg
+ }
+ })
+ )
+ if (partialUpdate) {
+ // The `/all/since` endpoint doesn't return `_updated`, so we
+ // just use the request's own timestamp.
+ cb(null, entryStream, Date.parse(res.headers.date))
+ } else {
+ extractUpdated(entryStream, 'entry-update-stream', cb)
+ }
+ })
+}
+
+// Both the (full) remote requests and the local index have `_updated` as their
+// first returned entries. This is the "latest" unix timestamp for the metadata
+// in question. This code does a bit of juggling with the data streams
+// so that we can pretend that field doesn't exist, but still extract `latest`
+function extractUpdated (entryStream, label, cb) {
+ cb = once(cb)
+ log.silly('all-package-metadata', 'extracting latest')
+ function nope (msg) {
+ return function () {
+ log.warn('all-package-metadata', label, msg)
+ entryStream.removeAllListeners()
+ entryStream.destroy()
+ cb(new Error(msg))
+ }
+ }
+ var onErr = nope('Failed to read stream')
+ var onEnd = nope('Empty or invalid stream')
+ entryStream.on('error', onErr)
+ entryStream.on('end', onEnd)
+ entryStream.once('data', function (latest) {
+ log.silly('all-package-metadata', 'got first stream entry for', label, latest)
+ entryStream.removeListener('error', onErr)
+ entryStream.removeListener('end', onEnd)
+ // Because `.once()` unpauses the stream, we re-pause it after the first
+ // entry so we don't vomit entries into the void.
+ entryStream.pause()
+ if (typeof latest === 'number') {
+ // The extra pipeline is to return a stream that will implicitly unpause
+ // after having an `.on('data')` listener attached, since using this
+ // `data` event broke its initial state.
+ cb(null, ms.pipeline.obj(entryStream, ms.through.obj()), latest)
+ } else {
+ cb(new Error('expected first entry to be _updated'))
+ }
+ })
+}
+
+// Creates a stream that writes input metadata to the current cache.
+// Cache updates are atomic, and the stream closes when *everything* is done.
+// The stream is also passthrough, so entries going through it will also
+// be output from it.
+module.exports._createCacheWriteStream = createCacheWriteStream
+function createCacheWriteStream (cacheFile, latest, cb) {
+ _ensureCacheDirExists(cacheFile, function (err) {
+ if (err) return cb(err)
+ log.silly('all-package-metadata', 'creating output stream')
+ var outStream = _createCacheOutStream()
+ var cacheFileStream = writeStreamAtomic(cacheFile)
+ var inputStream = _createCacheInStream(cacheFileStream, outStream, latest)
+
+ // Glue together the various streams so they fail together.
+ // `cacheFileStream` errors are already handled by the `inputStream`
+ // pipeline
+ var errEmitted = false
+ linkStreams(inputStream, outStream, function () { errEmitted = true })
+
+ cacheFileStream.on('close', function () { !errEmitted && outStream.end() })
+
+ cb(null, ms.duplex.obj(inputStream, outStream))
+ })
+}
+
+function _ensureCacheDirExists (cacheFile, cb) {
+ var cacheBase = path.dirname(cacheFile)
+ log.silly('all-package-metadata', 'making sure cache dir exists at', cacheBase)
+ getCacheStat(function (er, st) {
+ if (er) return cb(er)
+ mkdir(cacheBase, function (er, made) {
+ if (er) return cb(er)
+ chownr(made || cacheBase, st.uid, st.gid, cb)
+ })
+ })
+}
+
+function _createCacheOutStream () {
+ return ms.pipeline.obj(
+ // These two passthrough `through` streams compensate for some
+ // odd behavior with `jsonstream`.
+ ms.through(),
+ jsonstream.parse('*', function (obj, key) {
+ // This stream happens to get _updated passed through it, for
+ // implementation reasons. We make sure to filter it out cause
+ // the fact that it comes t
+ if (typeof obj === 'object') {
+ return obj
+ }
+ }),
+ ms.through.obj()
+ )
+}
+
+function _createCacheInStream (writer, outStream, latest) {
+ var updatedWritten = false
+ var inStream = ms.pipeline.obj(
+ ms.through.obj(function (pkg, enc, cb) {
+ if (!updatedWritten && typeof pkg === 'number') {
+ // This is the `_updated` value getting sent through.
+ updatedWritten = true
+ return cb(null, ['_updated', pkg])
+ } else if (typeof pkg !== 'object') {
+ this.emit('error', new Error('invalid value written to input stream'))
+ } else {
+ // The [key, val] format is expected by `jsonstream` for object writing
+ cb(null, [pkg.name, pkg])
+ }
+ }),
+ jsonstream.stringifyObject('{', ',', '}'),
+ ms.through(function (chunk, enc, cb) {
+ // This tees off the buffer data to `outStream`, and then continues
+ // the pipeline as usual
+ outStream.write(chunk, enc, function () {
+ cb(null, chunk)
+ })
+ }),
+ // And finally, we write to the cache file.
+ writer
+ )
+ inStream.write(latest)
+ return inStream
+}
+
+// Links errors between `a` and `b`, preventing cycles, and calls `cb` if
+// an error happens, once per error.
+function linkStreams (a, b, cb) {
+ var lastError = null
+ a.on('error', function (err) {
+ if (err !== lastError) {
+ lastError = err
+ b.emit('error', err)
+ cb(err)
+ }
+ })
+ b.on('error', function (err) {
+ if (err !== lastError) {
+ lastError = err
+ a.emit('error', err)
+ cb(err)
+ }
+ })
+}
diff --git a/deps/npm/lib/search/format-package-stream.js b/deps/npm/lib/search/format-package-stream.js
new file mode 100644
index 0000000000..d4333dc1a6
--- /dev/null
+++ b/deps/npm/lib/search/format-package-stream.js
@@ -0,0 +1,175 @@
+'use strict'
+
+var ms = require('mississippi')
+var jsonstream = require('JSONStream')
+var columnify = require('columnify')
+
+module.exports = formatPackageStream
+function formatPackageStream (opts) {
+ opts = opts || {}
+ if (opts.json) {
+ return jsonOutputStream()
+ } else {
+ return textOutputStream(opts)
+ }
+}
+
+function jsonOutputStream () {
+ return ms.pipeline.obj(
+ ms.through.obj(),
+ jsonstream.stringify('[', ',', ']'),
+ ms.through()
+ )
+}
+
+function textOutputStream (opts) {
+ var line = 0
+ return ms.through.obj(function (pkg, enc, cb) {
+ cb(null, prettify(pkg, ++line, opts))
+ })
+}
+
+function prettify (data, num, opts) {
+ opts = opts || {}
+ var truncate = !opts.long
+
+ var dat = stripData(data, opts)
+ dat.author = dat.maintainers
+ delete dat.maintainers
+ dat.date = dat.time
+ delete dat.time
+ // split keywords on whitespace or ,
+ if (typeof dat.keywords === 'string') {
+ dat.keywords = dat.keywords.split(/[,\s]+/)
+ }
+ if (Array.isArray(dat.keywords)) {
+ dat.keywords = dat.keywords.join(' ')
+ }
+
+ // split author on whitespace or ,
+ if (typeof dat.author === 'string') {
+ dat.author = dat.author.split(/[,\s]+/)
+ }
+ if (Array.isArray(dat.author)) {
+ dat.author = dat.author.join(' ')
+ }
+
+ var columns = opts.description
+ ? ['name', 'description', 'author', 'date', 'version', 'keywords']
+ : ['name', 'author', 'date', 'version', 'keywords']
+
+ if (opts.parseable) {
+ return columns.map(function (col) {
+ return dat[col] && ('' + dat[col]).replace(/\t/g, ' ')
+ }).join('\t')
+ }
+
+ var output = columnify(
+ [dat],
+ {
+ include: columns,
+ showHeaders: num <= 1,
+ columnSplitter: ' | ',
+ truncate: truncate,
+ config: {
+ name: { minWidth: 25, maxWidth: 25, truncate: false, truncateMarker: '' },
+ description: { minWidth: 20, maxWidth: 20 },
+ author: { minWidth: 15, maxWidth: 15 },
+ date: { maxWidth: 11 },
+ version: { minWidth: 8, maxWidth: 8 },
+ keywords: { maxWidth: Infinity }
+ }
+ }
+ )
+ output = trimToMaxWidth(output)
+ if (opts.color) {
+ output = highlightSearchTerms(output, opts.args)
+ }
+ return output
+}
+
+var colors = [31, 33, 32, 36, 34, 35]
+var cl = colors.length
+
+function addColorMarker (str, arg, i) {
+ var m = i % cl + 1
+ var markStart = String.fromCharCode(m)
+ var markEnd = String.fromCharCode(0)
+
+ if (arg.charAt(0) === '/') {
+ return str.replace(
+ new RegExp(arg.substr(1, arg.length - 2), 'gi'),
+ function (bit) { return markStart + bit + markEnd }
+ )
+ }
+
+ // just a normal string, do the split/map thing
+ var pieces = str.toLowerCase().split(arg.toLowerCase())
+ var p = 0
+
+ return pieces.map(function (piece) {
+ piece = str.substr(p, piece.length)
+ var mark = markStart +
+ str.substr(p + piece.length, arg.length) +
+ markEnd
+ p += piece.length + arg.length
+ return piece + mark
+ }).join('')
+}
+
+function colorize (line) {
+ for (var i = 0; i < cl; i++) {
+ var m = i + 1
+ var color = '\u001B[' + colors[i] + 'm'
+ line = line.split(String.fromCharCode(m)).join(color)
+ }
+ var uncolor = '\u001B[0m'
+ return line.split('\u0000').join(uncolor)
+}
+
+function getMaxWidth () {
+ var cols
+ try {
+ var tty = require('tty')
+ var stdout = process.stdout
+ cols = !tty.isatty(stdout.fd) ? Infinity : process.stdout.getWindowSize()[0]
+ cols = (cols === 0) ? Infinity : cols
+ } catch (ex) { cols = Infinity }
+ return cols
+}
+
+function trimToMaxWidth (str) {
+ var maxWidth = getMaxWidth()
+ return str.split('\n').map(function (line) {
+ return line.slice(0, maxWidth)
+ }).join('\n')
+}
+
+function highlightSearchTerms (str, terms) {
+ terms.forEach(function (arg, i) {
+ str = addColorMarker(str, arg, i)
+ })
+
+ return colorize(str).trim()
+}
+
+function stripData (data, opts) {
+ opts = opts || {}
+ return {
+ name: data.name,
+ description: opts.description ? data.description : '',
+ maintainers: (data.maintainers || []).map(function (m) {
+ return '=' + m.name
+ }),
+ url: !Object.keys(data.versions || {}).length ? data.url : null,
+ keywords: data.keywords || [],
+ version: Object.keys(data.versions || {})[0] || [],
+ time: data.time &&
+ data.time.modified &&
+ (new Date(data.time.modified).toISOString() // remove time
+ .split('T').join(' ')
+ .replace(/:[0-9]{2}\.[0-9]{3}Z$/, ''))
+ .slice(0, -5) ||
+ 'prehistoric'
+ }
+}
diff --git a/deps/npm/lib/search/package-filter.js b/deps/npm/lib/search/package-filter.js
new file mode 100644
index 0000000000..ac2950f46b
--- /dev/null
+++ b/deps/npm/lib/search/package-filter.js
@@ -0,0 +1,41 @@
+'use strict'
+
+module.exports = filter
+function filter (data, include, exclude, opts) {
+ return typeof data === 'object' &&
+ filterWords(data, include, exclude, opts)
+}
+
+function getWords (data, opts) {
+ return [ data.name ]
+ .concat((opts && opts.description) ? data.description : [])
+ .concat((data.maintainers || []).map(function (m) {
+ return '=' + m.name
+ }))
+ .concat(data.versions && data.versions.length && data.url && ('<' + data.url + '>'))
+ .concat(data.keywords || [])
+ .map(function (f) { return f && f.trim && f.trim() })
+ .filter(function (f) { return f })
+ .join(' ')
+ .toLowerCase()
+}
+
+function filterWords (data, include, exclude, opts) {
+ var words = getWords(data, opts)
+ for (var i = 0, l = include.length; i < l; i++) {
+ if (!match(words, include[i])) return false
+ }
+ for (i = 0, l = exclude.length; i < l; i++) {
+ if (match(words, exclude[i])) return false
+ }
+ return true
+}
+
+function match (words, pattern) {
+ if (pattern.charAt(0) === '/') {
+ pattern = pattern.replace(/\/$/, '')
+ pattern = new RegExp(pattern.substr(1, pattern.length - 1))
+ return words.match(pattern)
+ }
+ return words.indexOf(pattern) !== -1
+}
diff --git a/deps/npm/lib/shrinkwrap.js b/deps/npm/lib/shrinkwrap.js
index 2e9daf0d07..57ad0ab5d9 100644
--- a/deps/npm/lib/shrinkwrap.js
+++ b/deps/npm/lib/shrinkwrap.js
@@ -36,10 +36,10 @@ function shrinkwrap (args, silent, cb) {
}
var packagePath = path.join(npm.localPrefix, 'package.json')
- var dev = !!npm.config.get('dev') || /^dev(elopment)?$/.test(npm.config.get('also'))
+ var prod = npm.config.get('production') || /^prod/.test(npm.config.get('only'))
readPackageJson(packagePath, iferr(cb, function (pkg) {
- createShrinkwrap(npm.localPrefix, pkg, dev, silent, cb)
+ createShrinkwrap(npm.localPrefix, pkg, !prod, silent, cb)
}))
}
diff --git a/deps/npm/lib/tag.js b/deps/npm/lib/tag.js
deleted file mode 100644
index 01db4d8ea6..0000000000
--- a/deps/npm/lib/tag.js
+++ /dev/null
@@ -1,42 +0,0 @@
-// turns out tagging isn't very complicated
-// all the smarts are in the couch.
-module.exports = tag
-tag.usage = '[DEPRECATED] npm tag <name>@<version> [<tag>]' +
- '\nSee `dist-tag`'
-
-tag.completion = require('./unpublish.js').completion
-
-var npm = require('./npm.js')
-var mapToRegistry = require('./utils/map-to-registry.js')
-var npa = require('npm-package-arg')
-var semver = require('semver')
-var log = require('npmlog')
-
-function tag (args, cb) {
- var thing = npa(args.shift() || '')
- var project = thing.name
- var version = thing.rawSpec
- var t = args.shift() || npm.config.get('tag')
-
- t = t.trim()
-
- if (!project || !version || !t) return cb('Usage:\n' + tag.usage)
-
- if (semver.validRange(t)) {
- var er = new Error('Tag name must not be a valid SemVer range: ' + t)
- return cb(er)
- }
-
- log.warn('tag', 'This command is deprecated. Use `npm dist-tag` instead.')
-
- mapToRegistry(project, npm.config, function (er, uri, auth) {
- if (er) return cb(er)
-
- var params = {
- version: version,
- tag: t,
- auth: auth
- }
- npm.registry.tag(uri, params, cb)
- })
-}
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
index 6eac7e9577..6b2bf1c72d 100644
--- a/deps/npm/lib/utils/error-handler.js
+++ b/deps/npm/lib/utils/error-handler.js
@@ -38,7 +38,7 @@ process.on('exit', function (code) {
wroteLogFile = false
}
if (code) {
- log.error('code', code)
+ log.verbose('code', code)
}
}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
index adf534d882..a1d1625153 100644
--- a/deps/npm/lib/utils/lifecycle.js
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -142,13 +142,37 @@ function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
}
function shouldPrependCurrentNodeDirToPATH () {
+ var cfgsetting = npm.config.get('scripts-prepend-node-path')
+ if (cfgsetting === false) return false
+ if (cfgsetting === true) return true
+
+ var isDifferentNodeInPath
+
var isWindows = process.platform === 'win32'
+ var foundExecPath
try {
- var foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'})
- return process.execPath.toUpperCase() !== foundExecPath.toUpperCase()
+ foundExecPath = which.sync(path.basename(process.execPath), {pathExt: isWindows ? ';' : ':'})
+ // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
+ isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
+ fs.realpathSync(foundExecPath).toUpperCase()
} catch (e) {
- return true
+ isDifferentNodeInPath = true
+ }
+
+ if (cfgsetting === 'warn-only') {
+ if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
+ if (foundExecPath) {
+ log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ } else {
+ log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ }
+ shouldPrependCurrentNodeDirToPATH.hasWarned = true
+ }
+
+ return false
}
+
+ return isDifferentNodeInPath
}
function validWd (d, cb) {
@@ -308,9 +332,6 @@ function makeEnv (data, prefix, env) {
}
}
- // npat asks for tap output
- if (npm.config.get('npat')) env.TAP = 1
-
// express and others respect the NODE_ENV value.
if (npm.config.get('production')) env.NODE_ENV = 'production'
} else if (!data.hasOwnProperty('_lifecycleEnv')) {
diff --git a/deps/npm/lib/utils/unsupported.js b/deps/npm/lib/utils/unsupported.js
new file mode 100644
index 0000000000..5e74e3490f
--- /dev/null
+++ b/deps/npm/lib/utils/unsupported.js
@@ -0,0 +1,33 @@
+'use strict'
+var semver = require('semver')
+var supportedNode = '0.12 || >= 4'
+var knownBroken = '>=0.1 <=0.7'
+
+var checkVersion = exports.checkVersion = function (version) {
+ var versionNoPrerelease = version.replace(/-.*$/, '')
+ return {
+ broken: semver.satisfies(versionNoPrerelease, knownBroken),
+ unsupported: !semver.satisfies(versionNoPrerelease, supportedNode)
+ }
+}
+
+exports.checkForBrokenNode = function () {
+ var nodejs = checkVersion(process.version)
+ if (nodejs.broken) {
+ console.error('ERROR: npm is known not to run on Node.js ' + process.version)
+ console.error("You'll need to upgrade to a newer version in order to use this")
+ console.error('version of npm. You can find the latest version at https://nodejs.org/')
+ process.exit(1)
+ }
+}
+
+exports.checkForUnsupportedNode = function () {
+ var nodejs = checkVersion(process.version)
+ if (nodejs.unsupported) {
+ var log = require('npmlog')
+ log.warn('npm', 'npm does not support Node.js ' + process.version)
+ log.warn('npm', 'You should probably upgrade to a newer version of node as we')
+ log.warn('npm', "can't make any promises that npm will work with this version.")
+ log.warn('npm', 'You can find the latest version at https://nodejs.org/')
+ }
+}