1'use strict'; 2const common = require('../common'); 3 4// This test ensures that fs.readFile correctly returns the 5// contents of varying-sized files. 6 7const tmpdir = require('../../test/common/tmpdir'); 8const assert = require('assert'); 9const fs = require('fs'); 10const path = require('path'); 11 12const prefix = `.removeme-fs-readfile-${process.pid}`; 13 14tmpdir.refresh(); 15 16const fileInfo = [ 17 { name: path.join(tmpdir.path, `${prefix}-1K.txt`), 18 len: 1024, 19 }, 20 { name: path.join(tmpdir.path, `${prefix}-64K.txt`), 21 len: 64 * 1024, 22 }, 23 { name: path.join(tmpdir.path, `${prefix}-64KLessOne.txt`), 24 len: (64 * 1024) - 1, 25 }, 26 { name: path.join(tmpdir.path, `${prefix}-1M.txt`), 27 len: 1 * 1024 * 1024, 28 }, 29 { name: path.join(tmpdir.path, `${prefix}-1MPlusOne.txt`), 30 len: (1 * 1024 * 1024) + 1, 31 }, 32]; 33 34// Populate each fileInfo (and file) with unique fill. 35const sectorSize = 512; 36for (const e of fileInfo) { 37 e.contents = Buffer.allocUnsafe(e.len); 38 39 // This accounts for anything unusual in Node's implementation of readFile. 40 // Using e.g. 'aa...aa' would miss bugs like Node re-reading 41 // the same section twice instead of two separate sections. 42 for (let offset = 0; offset < e.len; offset += sectorSize) { 43 const fillByte = 256 * Math.random(); 44 const nBytesToFill = Math.min(sectorSize, e.len - offset); 45 e.contents.fill(fillByte, offset, offset + nBytesToFill); 46 } 47 48 fs.writeFileSync(e.name, e.contents); 49} 50// All files are now populated. 51 52// Test readFile on each size. 53for (const e of fileInfo) { 54 fs.readFile(e.name, common.mustCall((err, buf) => { 55 console.log(`Validating readFile on file ${e.name} of length ${e.len}`); 56 assert.ifError(err); 57 assert.deepStrictEqual(buf, e.contents); 58 })); 59} 60