summaryrefslogtreecommitdiff
path: root/test/parallel/test-zlib-flush-write-sync-interleaved.js
blob: 9fed592a34bb1b5cdc0d881324db0b74c71ab0bb (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
'use strict';
const common = require('../common');
const assert = require('assert');
const { createGzip, createGunzip, Z_PARTIAL_FLUSH } = require('zlib');

// Verify that .flush() behaves like .write() in terms of ordering, e.g. in
// a sequence like .write() + .flush() + .write() + .flush() each .flush() call
// only affects the data written before it.
// Refs: https://github.com/nodejs/node/issues/28478

const compress = createGzip();
const decompress = createGunzip();
decompress.setEncoding('utf8');

const events = [];
const compressedChunks = [];

for (const chunk of ['abc', 'def', 'ghi']) {
  compress.write(chunk, common.mustCall(() => events.push({ written: chunk })));
  compress.flush(Z_PARTIAL_FLUSH, common.mustCall(() => {
    events.push('flushed');
    const chunk = compress.read();
    if (chunk !== null)
      compressedChunks.push(chunk);
  }));
}

compress.end(common.mustCall(() => {
  events.push('compress end');
  writeToDecompress();
}));

function writeToDecompress() {
  // Write the compressed chunks to a decompressor, one by one, in order to
  // verify that the flushes actually worked.
  const chunk = compressedChunks.shift();
  if (chunk === undefined) return decompress.end();
  decompress.write(chunk, common.mustCall(() => {
    events.push({ read: decompress.read() });
    writeToDecompress();
  }));
}

process.on('exit', () => {
  assert.deepStrictEqual(events, [
    { written: 'abc' },
    'flushed',
    { written: 'def' },
    'flushed',
    { written: 'ghi' },
    'flushed',
    'compress end',
    { read: 'abc' },
    { read: 'def' },
    { read: 'ghi' }
  ]);
});