diff options
Diffstat (limited to 'deps/node/deps/npm/test/tap/anon-cli-metrics.js')
-rw-r--r-- | deps/node/deps/npm/test/tap/anon-cli-metrics.js | 151 |
1 files changed, 151 insertions, 0 deletions
diff --git a/deps/node/deps/npm/test/tap/anon-cli-metrics.js b/deps/node/deps/npm/test/tap/anon-cli-metrics.js new file mode 100644 index 00000000..100ca526 --- /dev/null +++ b/deps/node/deps/npm/test/tap/anon-cli-metrics.js @@ -0,0 +1,151 @@ +'use strict' +var path = require('path') +var fs = require('graceful-fs') +var rimraf = require('rimraf') +var test = require('tap').test +var mr = require('npm-registry-mock') +var Tacks = require('tacks') +var File = Tacks.File +var Dir = Tacks.Dir +var common = require('../common-tap.js') + +var basedir = path.join(__dirname, path.basename(__filename, '.js')) +var testdir = path.join(basedir, 'testdir') +var cachedir = path.join(basedir, 'cache') +var globaldir = path.join(basedir, 'global') +var tmpdir = path.join(basedir, 'tmp') +var metricsFile = path.join(cachedir, 'anonymous-cli-metrics.json') + +var conf = { + cwd: testdir, + env: Object.assign({}, process.env, { + npm_config_cache: cachedir, + npm_config_tmp: tmpdir, + npm_config_prefix: globaldir, + npm_config_registry: common.registry, + npm_config_metrics_registry: null, + npm_config_loglevel: 'warn' + }) +} + +var server +var fixture = new Tacks(Dir({ + cache: Dir(), + global: Dir(), + tmp: Dir(), + testdir: Dir({ + failure: Dir({ + 'package.json': File({ + name: 'failure', + version: '1.0.0', + scripts: { + preinstall: 'false' + } + }) + }), + success: Dir({ + 'package.json': File({ + name: 'success', + version: '1.0.0' + }) + }), + slow: Dir({ + 'package.json': File({ + name: 'slow', + version: '1.0.0', + scripts: { + preinstall: "node -e 'setTimeout(function(){}, 500)'" + } + }) + }), + 'package.json': File({ + name: 'anon-cli-metrics-test', + version: '1.0.0' + }) + }) +})) + +function setup () { + cleanup() + fixture.create(basedir) +} + +function cleanup () { + fixture.remove(basedir) +} + +function reset () { + rimraf.sync(testdir + '/' + 'node_modules') +} + +test('setup', function (t) { + setup() + mr({port: common.port, throwOnUnmatched: true}, function (err, s) { + if (err) throw err + server = s + server.filteringPathRegEx(/([/]-[/]npm[/]anon-metrics[/]v1[/]).*/, '$1:id') + server.filteringRequestBody(function (body) { + var metrics = typeof body === 'string' ? JSON.parse(body) : body + delete metrics.from + delete metrics.to + return JSON.stringify(metrics) + }) + t.done() + }) +}) + +test('record success', function (t) { + common.npm(['install', '--no-save', '--no-send-metrics', 'file:success'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'always succeeding install succeeded') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + var data = JSON.parse(fs.readFileSync(metricsFile)) + t.is(data.metrics.successfulInstalls, 1, 'successes') + t.is(data.metrics.failedInstalls, 0, 'failures') + t.done() + }) +}) + +test('record failure', function (t) { + reset() + server.put('/-/npm/anon-metrics/v1/:id', { + successfulInstalls: 1, + failedInstalls: 0 + }).reply(500, {ok: false}) + common.npm(['install', '--no-save', '--send-metrics', 'file:failure'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.notEqual(code, 0, 'always failing install fails') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + var data = JSON.parse(fs.readFileSync(metricsFile)) + t.is(data.metrics.successfulInstalls, 1, 'successes') + t.is(data.metrics.failedInstalls, 1, 'failures') + t.done() + }) +}) + +test('report', function (t) { + reset() + server.put('/-/npm/anon-metrics/v1/:id', { + successfulInstalls: 1, + failedInstalls: 1 + }).reply(200, {ok: true}) + common.npm(['install', '--no-save', '--send-metrics', 'file:slow'], conf, function (err, code, stdout, stderr) { + if (err) throw err + t.is(code, 0, 'command ran ok') + t.comment(stdout.trim()) + t.comment(stderr.trim()) + // todo check mock registry for post + var data = JSON.parse(fs.readFileSync(metricsFile)) + t.is(data.metrics.successfulInstalls, 1, 'successes') + t.is(data.metrics.failedInstalls, 0, 'failures') + t.done() + }) +}) + +test('cleanup', function (t) { + server.close() + cleanup() + t.done() +}) |