• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1'use strict';
2
3const common = require('../common');
4const assert = require('assert');
5const { Readable } = require('stream');
6
7{
8  // Check that strings are saved as Buffer
9  const readable = new Readable({ read() {} });
10
11  const string = 'abc';
12
13  readable.on('data', common.mustCall((chunk) => {
14    assert(Buffer.isBuffer(chunk));
15    assert.strictEqual(chunk.toString('utf8'), string);
16  }, 1));
17
18  readable.unshift(string);
19
20}
21
22{
23  // Check that data goes at the beginning
24  const readable = new Readable({ read() {} });
25  const unshift = 'front';
26  const push = 'back';
27
28  const expected = [unshift, push];
29  readable.on('data', common.mustCall((chunk) => {
30    assert.strictEqual(chunk.toString('utf8'), expected.shift());
31  }, 2));
32
33
34  readable.push(push);
35  readable.unshift(unshift);
36}
37
38{
39  // Check that buffer is saved with correct encoding
40  const readable = new Readable({ read() {} });
41
42  const encoding = 'base64';
43  const string = Buffer.from('abc').toString(encoding);
44
45  readable.on('data', common.mustCall((chunk) => {
46    assert.strictEqual(chunk.toString(encoding), string);
47  }, 1));
48
49  readable.unshift(string, encoding);
50
51}
52
53{
54
55  const streamEncoding = 'base64';
56
57  function checkEncoding(readable) {
58
59    // chunk encodings
60    const encodings = ['utf8', 'binary', 'hex', 'base64'];
61    const expected = [];
62
63    readable.on('data', common.mustCall((chunk) => {
64      const { encoding, string } = expected.pop();
65      assert.strictEqual(chunk.toString(encoding), string);
66    }, encodings.length));
67
68    for (const encoding of encodings) {
69      const string = 'abc';
70
71      // If encoding is the same as the state.encoding the string is
72      // saved as is
73      const expect = encoding !== streamEncoding ?
74        Buffer.from(string, encoding).toString(streamEncoding) : string;
75
76      expected.push({ encoding, string: expect });
77
78      readable.unshift(string, encoding);
79    }
80  }
81
82  const r1 = new Readable({ read() {} });
83  r1.setEncoding(streamEncoding);
84  checkEncoding(r1);
85
86  const r2 = new Readable({ read() {}, encoding: streamEncoding });
87  checkEncoding(r2);
88
89}
90
91{
92  // Both .push & .unshift should have the same behaviour
93  // When setting an encoding, each chunk should be emitted with that encoding
94  const encoding = 'base64';
95
96  function checkEncoding(readable) {
97    const string = 'abc';
98    readable.on('data', common.mustCall((chunk) => {
99      assert.strictEqual(chunk, Buffer.from(string).toString(encoding));
100    }, 2));
101
102    readable.push(string);
103    readable.unshift(string);
104  }
105
106  const r1 = new Readable({ read() {} });
107  r1.setEncoding(encoding);
108  checkEncoding(r1);
109
110  const r2 = new Readable({ read() {}, encoding });
111  checkEncoding(r2);
112
113}
114
115{
116  // Check that error is thrown for invalid chunks
117
118  const readable = new Readable({ read() {} });
119  function checkError(fn) {
120    assert.throws(fn, {
121      code: 'ERR_INVALID_ARG_TYPE',
122      name: 'TypeError'
123    });
124  }
125
126  checkError(() => readable.unshift([]));
127  checkError(() => readable.unshift({}));
128  checkError(() => readable.unshift(0));
129
130}
131
132{
133  // Check that ObjectMode works
134  const readable = new Readable({ objectMode: true, read() {} });
135
136  const chunks = ['a', 1, {}, []];
137
138  readable.on('data', common.mustCall((chunk) => {
139    assert.strictEqual(chunk, chunks.pop());
140  }, chunks.length));
141
142  for (const chunk of chunks) {
143    readable.unshift(chunk);
144  }
145}
146
147{
148
149  // Should not throw: https://github.com/nodejs/node/issues/27192
150  const highWaterMark = 50;
151  class ArrayReader extends Readable {
152    constructor(opt) {
153      super({ highWaterMark });
154      // The error happened only when pushing above hwm
155      this.buffer = new Array(highWaterMark * 2).fill(0).map(String);
156    }
157    _read(size) {
158      while (this.buffer.length) {
159        const chunk = this.buffer.shift();
160        if (!this.buffer.length) {
161          this.push(chunk);
162          this.push(null);
163          return true;
164        }
165        if (!this.push(chunk))
166          return;
167      }
168    }
169  }
170
171  function onRead() {
172    while (null !== (stream.read())) {
173      // Remove the 'readable' listener before unshifting
174      stream.removeListener('readable', onRead);
175      stream.unshift('a');
176      stream.on('data', (chunk) => {
177        console.log(chunk.length);
178      });
179      break;
180    }
181  }
182
183  const stream = new ArrayReader();
184  stream.once('readable', common.mustCall(onRead));
185  stream.on('end', common.mustCall(() => {}));
186
187}
188