1// META: global=window,worker 2// META: script=../resources/test-utils.js 3// META: script=../resources/rs-utils.js 4'use strict'; 5 6test(() => { 7 new TransformStream({ transform() { } }); 8}, 'TransformStream can be constructed with a transform function'); 9 10test(() => { 11 new TransformStream(); 12 new TransformStream({}); 13}, 'TransformStream can be constructed with no transform function'); 14 15test(() => { 16 const ts = new TransformStream({ transform() { } }); 17 18 const writer = ts.writable.getWriter(); 19 assert_equals(writer.desiredSize, 1, 'writer.desiredSize should be 1'); 20}, 'TransformStream writable starts in the writable state'); 21 22promise_test(() => { 23 const ts = new TransformStream(); 24 25 const writer = ts.writable.getWriter(); 26 writer.write('a'); 27 assert_equals(writer.desiredSize, 0, 'writer.desiredSize should be 0 after write()'); 28 29 return ts.readable.getReader().read().then(result => { 30 assert_equals(result.value, 'a', 31 'result from reading the readable is the same as was written to writable'); 32 assert_false(result.done, 'stream should not be done'); 33 34 return delay(0).then(() => assert_equals(writer.desiredSize, 1, 'desiredSize should be 1 again')); 35 }); 36}, 'Identity TransformStream: can read from readable what is put into writable'); 37 38promise_test(() => { 39 let c; 40 const ts = new TransformStream({ 41 start(controller) { 42 c = controller; 43 }, 44 transform(chunk) { 45 c.enqueue(chunk.toUpperCase()); 46 } 47 }); 48 49 const writer = ts.writable.getWriter(); 50 writer.write('a'); 51 52 return ts.readable.getReader().read().then(result => { 53 assert_equals(result.value, 'A', 54 'result from reading the readable is the transformation of what was written to writable'); 55 assert_false(result.done, 'stream should not be done'); 56 }); 57}, 'Uppercaser sync TransformStream: can read from readable transformed version of what is put into writable'); 58 59promise_test(() => { 60 let c; 61 const ts = new TransformStream({ 62 start(controller) { 63 c = controller; 64 }, 65 transform(chunk) { 66 c.enqueue(chunk.toUpperCase()); 67 c.enqueue(chunk.toUpperCase()); 68 } 69 }); 70 71 const writer = ts.writable.getWriter(); 72 writer.write('a'); 73 74 const reader = ts.readable.getReader(); 75 76 return reader.read().then(result1 => { 77 assert_equals(result1.value, 'A', 78 'the first chunk read is the transformation of the single chunk written'); 79 assert_false(result1.done, 'stream should not be done'); 80 81 return reader.read().then(result2 => { 82 assert_equals(result2.value, 'A', 83 'the second chunk read is also the transformation of the single chunk written'); 84 assert_false(result2.done, 'stream should not be done'); 85 }); 86 }); 87}, 'Uppercaser-doubler sync TransformStream: can read both chunks put into the readable'); 88 89promise_test(() => { 90 let c; 91 const ts = new TransformStream({ 92 start(controller) { 93 c = controller; 94 }, 95 transform(chunk) { 96 return delay(0).then(() => c.enqueue(chunk.toUpperCase())); 97 } 98 }); 99 100 const writer = ts.writable.getWriter(); 101 writer.write('a'); 102 103 return ts.readable.getReader().read().then(result => { 104 assert_equals(result.value, 'A', 105 'result from reading the readable is the transformation of what was written to writable'); 106 assert_false(result.done, 'stream should not be done'); 107 }); 108}, 'Uppercaser async TransformStream: can read from readable transformed version of what is put into writable'); 109 110promise_test(() => { 111 let doSecondEnqueue; 112 let returnFromTransform; 113 const ts = new TransformStream({ 114 transform(chunk, controller) { 115 delay(0).then(() => controller.enqueue(chunk.toUpperCase())); 116 doSecondEnqueue = () => controller.enqueue(chunk.toUpperCase()); 117 return new Promise(resolve => { 118 returnFromTransform = resolve; 119 }); 120 } 121 }); 122 123 const reader = ts.readable.getReader(); 124 125 const writer = ts.writable.getWriter(); 126 writer.write('a'); 127 128 return reader.read().then(result1 => { 129 assert_equals(result1.value, 'A', 130 'the first chunk read is the transformation of the single chunk written'); 131 assert_false(result1.done, 'stream should not be done'); 132 doSecondEnqueue(); 133 134 return reader.read().then(result2 => { 135 assert_equals(result2.value, 'A', 136 'the second chunk read is also the transformation of the single chunk written'); 137 assert_false(result2.done, 'stream should not be done'); 138 returnFromTransform(); 139 }); 140 }); 141}, 'Uppercaser-doubler async TransformStream: can read both chunks put into the readable'); 142 143promise_test(() => { 144 const ts = new TransformStream({ transform() { } }); 145 146 const writer = ts.writable.getWriter(); 147 writer.close(); 148 149 return Promise.all([writer.closed, ts.readable.getReader().closed]); 150}, 'TransformStream: by default, closing the writable closes the readable (when there are no queued writes)'); 151 152promise_test(() => { 153 let transformResolve; 154 const transformPromise = new Promise(resolve => { 155 transformResolve = resolve; 156 }); 157 const ts = new TransformStream({ 158 transform() { 159 return transformPromise; 160 } 161 }, undefined, { highWaterMark: 1 }); 162 163 const writer = ts.writable.getWriter(); 164 writer.write('a'); 165 writer.close(); 166 167 let rsClosed = false; 168 ts.readable.getReader().closed.then(() => { 169 rsClosed = true; 170 }); 171 172 return delay(0).then(() => { 173 assert_equals(rsClosed, false, 'readable is not closed after a tick'); 174 transformResolve(); 175 176 return writer.closed.then(() => { 177 // TODO: Is this expectation correct? 178 assert_equals(rsClosed, true, 'readable is closed at that point'); 179 }); 180 }); 181}, 'TransformStream: by default, closing the writable waits for transforms to finish before closing both'); 182 183promise_test(() => { 184 let c; 185 const ts = new TransformStream({ 186 start(controller) { 187 c = controller; 188 }, 189 transform() { 190 c.enqueue('x'); 191 c.enqueue('y'); 192 return delay(0); 193 } 194 }); 195 196 const writer = ts.writable.getWriter(); 197 writer.write('a'); 198 writer.close(); 199 200 const readableChunks = readableStreamToArray(ts.readable); 201 202 return writer.closed.then(() => { 203 return readableChunks.then(chunks => { 204 assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); 205 }); 206 }); 207}, 'TransformStream: by default, closing the writable closes the readable after sync enqueues and async done'); 208 209promise_test(() => { 210 let c; 211 const ts = new TransformStream({ 212 start(controller) { 213 c = controller; 214 }, 215 transform() { 216 return delay(0) 217 .then(() => c.enqueue('x')) 218 .then(() => c.enqueue('y')) 219 .then(() => delay(0)); 220 } 221 }); 222 223 const writer = ts.writable.getWriter(); 224 writer.write('a'); 225 writer.close(); 226 227 const readableChunks = readableStreamToArray(ts.readable); 228 229 return writer.closed.then(() => { 230 return readableChunks.then(chunks => { 231 assert_array_equals(chunks, ['x', 'y'], 'both enqueued chunks can be read from the readable'); 232 }); 233 }); 234}, 'TransformStream: by default, closing the writable closes the readable after async enqueues and async done'); 235 236promise_test(() => { 237 let c; 238 const ts = new TransformStream({ 239 suffix: '-suffix', 240 241 start(controller) { 242 c = controller; 243 c.enqueue('start' + this.suffix); 244 }, 245 246 transform(chunk) { 247 c.enqueue(chunk + this.suffix); 248 }, 249 250 flush() { 251 c.enqueue('flushed' + this.suffix); 252 } 253 }); 254 255 const writer = ts.writable.getWriter(); 256 writer.write('a'); 257 writer.close(); 258 259 const readableChunks = readableStreamToArray(ts.readable); 260 261 return writer.closed.then(() => { 262 return readableChunks.then(chunks => { 263 assert_array_equals(chunks, ['start-suffix', 'a-suffix', 'flushed-suffix'], 'all enqueued chunks have suffixes'); 264 }); 265 }); 266}, 'Transform stream should call transformer methods as methods'); 267 268promise_test(() => { 269 function functionWithOverloads() {} 270 functionWithOverloads.apply = () => assert_unreached('apply() should not be called'); 271 functionWithOverloads.call = () => assert_unreached('call() should not be called'); 272 const ts = new TransformStream({ 273 start: functionWithOverloads, 274 transform: functionWithOverloads, 275 flush: functionWithOverloads 276 }); 277 const writer = ts.writable.getWriter(); 278 writer.write('a'); 279 writer.close(); 280 281 return readableStreamToArray(ts.readable); 282}, 'methods should not not have .apply() or .call() called'); 283 284promise_test(t => { 285 let startCalled = false; 286 let startDone = false; 287 let transformDone = false; 288 let flushDone = false; 289 const ts = new TransformStream({ 290 start() { 291 startCalled = true; 292 return flushAsyncEvents().then(() => { 293 startDone = true; 294 }); 295 }, 296 transform() { 297 return t.step(() => { 298 assert_true(startDone, 'transform() should not be called until the promise returned from start() has resolved'); 299 return flushAsyncEvents().then(() => { 300 transformDone = true; 301 }); 302 }); 303 }, 304 flush() { 305 return t.step(() => { 306 assert_true(transformDone, 307 'flush() should not be called until the promise returned from transform() has resolved'); 308 return flushAsyncEvents().then(() => { 309 flushDone = true; 310 }); 311 }); 312 } 313 }, undefined, { highWaterMark: 1 }); 314 315 assert_true(startCalled, 'start() should be called synchronously'); 316 317 const writer = ts.writable.getWriter(); 318 const writePromise = writer.write('a'); 319 return writer.close().then(() => { 320 assert_true(flushDone, 'promise returned from flush() should have resolved'); 321 return writePromise; 322 }); 323}, 'TransformStream start, transform, and flush should be strictly ordered'); 324 325promise_test(() => { 326 let transformCalled = false; 327 const ts = new TransformStream({ 328 transform() { 329 transformCalled = true; 330 } 331 }, undefined, { highWaterMark: Infinity }); 332 // transform() is only called synchronously when there is no backpressure and all microtasks have run. 333 return delay(0).then(() => { 334 const writePromise = ts.writable.getWriter().write(); 335 assert_true(transformCalled, 'transform() should have been called'); 336 return writePromise; 337 }); 338}, 'it should be possible to call transform() synchronously'); 339 340promise_test(() => { 341 const ts = new TransformStream({}, undefined, { highWaterMark: 0 }); 342 343 const writer = ts.writable.getWriter(); 344 writer.close(); 345 346 return Promise.all([writer.closed, ts.readable.getReader().closed]); 347}, 'closing the writable should close the readable when there are no queued chunks, even with backpressure'); 348 349test(() => { 350 new TransformStream({ 351 start(controller) { 352 controller.terminate(); 353 assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); 354 } 355 }); 356}, 'enqueue() should throw after controller.terminate()'); 357 358promise_test(() => { 359 let controller; 360 const ts = new TransformStream({ 361 start(c) { 362 controller = c; 363 } 364 }); 365 const cancelPromise = ts.readable.cancel(); 366 assert_throws_js(TypeError, () => controller.enqueue(), 'enqueue should throw'); 367 return cancelPromise; 368}, 'enqueue() should throw after readable.cancel()'); 369 370test(() => { 371 new TransformStream({ 372 start(controller) { 373 controller.terminate(); 374 controller.terminate(); 375 } 376 }); 377}, 'controller.terminate() should do nothing the second time it is called'); 378 379promise_test(t => { 380 let controller; 381 const ts = new TransformStream({ 382 start(c) { 383 controller = c; 384 } 385 }); 386 const cancelReason = { name: 'cancelReason' }; 387 const cancelPromise = ts.readable.cancel(cancelReason); 388 controller.terminate(); 389 return Promise.all([ 390 cancelPromise, 391 promise_rejects_exactly(t, cancelReason, ts.writable.getWriter().closed, 'closed should reject with cancelReason') 392 ]); 393}, 'terminate() should do nothing after readable.cancel()'); 394 395promise_test(() => { 396 let calls = 0; 397 new TransformStream({ 398 start() { 399 ++calls; 400 } 401 }); 402 return flushAsyncEvents().then(() => { 403 assert_equals(calls, 1, 'start() should have been called exactly once'); 404 }); 405}, 'start() should not be called twice'); 406 407test(() => { 408 assert_throws_js(RangeError, () => new TransformStream({ readableType: 'bytes' }), 'constructor should throw'); 409}, 'specifying a defined readableType should throw'); 410 411test(() => { 412 assert_throws_js(RangeError, () => new TransformStream({ writableType: 'bytes' }), 'constructor should throw'); 413}, 'specifying a defined writableType should throw'); 414 415test(() => { 416 class Subclass extends TransformStream { 417 extraFunction() { 418 return true; 419 } 420 } 421 assert_equals( 422 Object.getPrototypeOf(Subclass.prototype), TransformStream.prototype, 423 'Subclass.prototype\'s prototype should be TransformStream.prototype'); 424 assert_equals(Object.getPrototypeOf(Subclass), TransformStream, 425 'Subclass\'s prototype should be TransformStream'); 426 const sub = new Subclass(); 427 assert_true(sub instanceof TransformStream, 428 'Subclass object should be an instance of TransformStream'); 429 assert_true(sub instanceof Subclass, 430 'Subclass object should be an instance of Subclass'); 431 const readableGetter = Object.getOwnPropertyDescriptor( 432 TransformStream.prototype, 'readable').get; 433 assert_equals(readableGetter.call(sub), sub.readable, 434 'Subclass object should pass brand check'); 435 assert_true(sub.extraFunction(), 436 'extraFunction() should be present on Subclass object'); 437}, 'Subclassing TransformStream should work'); 438