• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1'use strict';
2const common = require('../common');
3const fixtures = require('../common/fixtures');
4const assert = require('assert');
5const fs = require('fs');
6
7// Test that concurrent file read streams don’t interfere with each other’s
8// contents, and that the chunks generated by the reads only retain a
9// 'reasonable' amount of memory.
10
11// Refs: https://github.com/nodejs/node/issues/21967
12
13const filename = fixtures.path('loop.js');  // Some small non-homogeneous file.
14const content = fs.readFileSync(filename);
15
16const N = 2000;
17let started = 0;
18let done = 0;
19
20const arrayBuffers = new Set();
21
22function startRead() {
23  ++started;
24  const chunks = [];
25  fs.createReadStream(filename)
26    .on('data', (chunk) => {
27      chunks.push(chunk);
28      arrayBuffers.add(chunk.buffer);
29    })
30    .on('end', common.mustCall(() => {
31      if (started < N)
32        startRead();
33      assert.deepStrictEqual(Buffer.concat(chunks), content);
34      if (++done === N) {
35        const retainedMemory =
36          [...arrayBuffers].map((ab) => ab.byteLength).reduce((a, b) => a + b);
37        assert(retainedMemory / (N * content.length) <= 3,
38               `Retaining ${retainedMemory} bytes in ABs for ${N} ` +
39               `chunks of size ${content.length}`);
40      }
41    }));
42}
43
44// Don’t start the reads all at once – that way we would have to allocate
45// a large amount of memory upfront.
46for (let i = 0; i < 6; ++i)
47  startRead();
48