summaryrefslogtreecommitdiff
path: root/deps/node/deps/npm/node_modules/lockfile/speedtest.js
blob: 4433dce1d96169fc0701b0928a5fd9221d8571d1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
const path = require('path');
const async = require('async');
const lf = require('lockfile');
const fs = require('fs');

const n = +process.argv[3] || 300;
const a = Array.apply(null, {length: n}).map(function(_, i) {
  return i
})
const file = path.resolve(__dirname, 'speed-test.lock');

try{
    fs.unlinkSync(file);
}
catch(e){}


/// NOTE: this should run in about 30ms on a SSD Ubuntu 16.04, that is fast, because we are locking/unlocking 300 locks
/// *HOWEVER* if we change async.eachSeries to async.each, lockfile will barf immediately, and I can't get lockfile
/// to not barf, using any of the options {} available to lockfile#lock.


const parallel = process.argv[2] === 'parallel';

var fn, msg;

if(parallel){
    msg = 'parallel';
    fn = async.each;
}
else{
    msg = 'series';
    fn = async.eachSeries;
}


const start = Date.now();
console.log(' => locking/unlocking ' + a.length + ' times, in ' + msg);

fn(a, function (val, cb) {

    console.log('try %d', val)

    lf.lock(file, { retries: n * 3 }, function (err) {
        if (err) {
            cb(err);
        }
        else {
            console.log('complete %d', val)
            lf.unlock(file, cb);
        }
    });

}, function complete(err) {

    if (err) {
        throw err;
    }

    console.log(' => Time required for lockfile => ', Date.now() - start, 'ms');
    process.exit(0);

});