summaryrefslogtreecommitdiff
path: root/test/parallel/test-fs-read-stream-concurrent-reads.js
diff options
context:
space:
mode:
authorAnna Henningsen <anna@addaleax.net>2018-07-25 14:00:02 +0200
committerAnna Henningsen <anna@addaleax.net>2018-07-29 17:11:30 +0200
commite3a47025ac0c8e89b73b91b137bb70f6b2f3d73a (patch)
tree47d8145f855fd4e3a3838d3943c07325b6591412 /test/parallel/test-fs-read-stream-concurrent-reads.js
parent07cb69720bec251a1c450b3770db2136ac009778 (diff)
downloadandroid-node-v8-e3a47025ac0c8e89b73b91b137bb70f6b2f3d73a.tar.gz
android-node-v8-e3a47025ac0c8e89b73b91b137bb70f6b2f3d73a.tar.bz2
android-node-v8-e3a47025ac0c8e89b73b91b137bb70f6b2f3d73a.zip
fs: reduce memory retention when streaming small files
Fixes: https://github.com/nodejs/node/issues/21967 PR-URL: https://github.com/nodejs/node/pull/21968 Reviewed-By: Сковорода Никита Андреевич <chalkerx@gmail.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Benjamin Gruenbaum <benjamingr@gmail.com> Reviewed-By: Trivikram Kamat <trivikr.dev@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com>
Diffstat (limited to 'test/parallel/test-fs-read-stream-concurrent-reads.js')
-rw-r--r--test/parallel/test-fs-read-stream-concurrent-reads.js47
1 files changed, 47 insertions, 0 deletions
diff --git a/test/parallel/test-fs-read-stream-concurrent-reads.js b/test/parallel/test-fs-read-stream-concurrent-reads.js
new file mode 100644
index 0000000000..32a6cd6236
--- /dev/null
+++ b/test/parallel/test-fs-read-stream-concurrent-reads.js
@@ -0,0 +1,47 @@
+'use strict';
+const common = require('../common');
+const fixtures = require('../common/fixtures');
+const assert = require('assert');
+const fs = require('fs');
+
+// Test that concurrent file read streams don’t interfere with each other’s
+// contents, and that the chunks generated by the reads only retain a
+// 'reasonable' amount of memory.
+
+// Refs: https://github.com/nodejs/node/issues/21967
+
+const filename = fixtures.path('loop.js'); // Some small non-homogeneous file.
+const content = fs.readFileSync(filename);
+
+const N = 1000;
+let started = 0;
+let done = 0;
+
+const arrayBuffers = new Set();
+
+function startRead() {
+ ++started;
+ const chunks = [];
+ fs.createReadStream(filename)
+ .on('data', (chunk) => {
+ chunks.push(chunk);
+ arrayBuffers.add(chunk.buffer);
+ if (started < N)
+ startRead();
+ })
+ .on('end', common.mustCall(() => {
+ assert.deepStrictEqual(Buffer.concat(chunks), content);
+ if (++done === N) {
+ const retainedMemory =
+ [...arrayBuffers].map((ab) => ab.byteLength).reduce((a, b) => a + b);
+ assert(retainedMemory / (N * content.length) <= 3,
+ `Retaining ${retainedMemory} bytes in ABs for ${N} ` +
+ `chunks of size ${content.length}`);
+ }
+ }));
+}
+
+// Don’t start the reads all at once – that way we would have to allocate
+// a large amount of memory upfront.
+for (let i = 0; i < 4; ++i)
+ startRead();