summaryrefslogtreecommitdiff
path: root/test/pummel/test-stream-pipe-multi.js
blob: 478b61c822d4e05949a0d2f27ba15ab105458d17 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
'use strict';
// Test that having a bunch of streams piping in parallel
// doesn't break anything.

require('../common');
const assert = require('assert');
const Stream = require('stream').Stream;
const rr = [];
const ww = [];
const cnt = 100;
const chunks = 1000;
const chunkSize = 250;
const data = Buffer.allocUnsafe(chunkSize);
let wclosed = 0;
let rclosed = 0;

function FakeStream() {
  Stream.apply(this);
  this.wait = false;
  this.writable = true;
  this.readable = true;
}

FakeStream.prototype = Object.create(Stream.prototype);

FakeStream.prototype.write = function(chunk) {
  console.error(this.ID, 'write', this.wait);
  if (this.wait) {
    process.nextTick(this.emit.bind(this, 'drain'));
  }
  this.wait = !this.wait;
  return this.wait;
};

FakeStream.prototype.end = function() {
  this.emit('end');
  process.nextTick(this.close.bind(this));
};

// noop - closes happen automatically on end.
FakeStream.prototype.close = function() {
  this.emit('close');
};


// expect all streams to close properly.
process.on('exit', function() {
  assert.strictEqual(cnt, wclosed, 'writable streams closed');
  assert.strictEqual(cnt, rclosed, 'readable streams closed');
});

for (let i = 0; i < chunkSize; i++) {
  data[i] = i;
}

for (let i = 0; i < cnt; i++) {
  const r = new FakeStream();
  r.on('close', function() {
    console.error(this.ID, 'read close');
    rclosed++;
  });
  rr.push(r);

  const w = new FakeStream();
  w.on('close', function() {
    console.error(this.ID, 'write close');
    wclosed++;
  });
  ww.push(w);

  r.ID = w.ID = i;
  r.pipe(w);
}

// now start passing through data
// simulate a relatively fast async stream.
rr.forEach(function(r) {
  let cnt = chunks;
  let paused = false;

  r.on('pause', function() {
    paused = true;
  });

  r.on('resume', function() {
    paused = false;
    step();
  });

  function step() {
    r.emit('data', data);
    if (--cnt === 0) {
      r.end();
      return;
    }
    if (paused) return;
    process.nextTick(step);
  }

  process.nextTick(step);
});