Skip to content
7 changes: 7 additions & 0 deletions lib/internal/webstreams/adapters.js
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ const {
Buffer,
} = require('buffer');

const {
isArrayBuffer,
} = require('internal/util/types');

const {
AbortError,
ErrnoException,
Expand Down Expand Up @@ -213,6 +217,9 @@ function newWritableStreamFromStreamWritable(streamWritable) {
start(c) { controller = c; },

write(chunk) {
if (isArrayBuffer(chunk)) {
Comment thread
MattiasBuelens marked this conversation as resolved.
Outdated
chunk = new Uint8Array(chunk);
}
if (streamWritable.writableNeedDrain || !streamWritable.write(chunk)) {
backpressurePromise = PromiseWithResolvers();
return SafePromisePrototypeFinally(
Expand Down
71 changes: 71 additions & 0 deletions test/parallel/test-webstreams-compression-buffer-source.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
'use strict';
const common = require('../common');
const assert = require('assert');
const { DecompressionStream, CompressionStream } = require('stream/web');

// Minimal gzip-compressed bytes for "hello"
const compressedGzip = new Uint8Array([
31, 139, 8, 0, 0, 0, 0, 0, 0, 3,
203, 72, 205, 201, 201, 7, 0, 134, 166, 16, 54, 5, 0, 0, 0,
]);

async function testDecompressionAcceptsArrayBuffer() {
const ds = new DecompressionStream('gzip');
const reader = ds.readable.getReader();
const writer = ds.writable.getWriter();

const writePromise = writer.write(compressedGzip.buffer);
writer.close();

const chunks = [];
let done = false;
while (!done) {
const { value, done: d } = await reader.read();
if (value) chunks.push(value);
done = d;
}
Comment thread
Renegade334 marked this conversation as resolved.
Outdated
await writePromise;
const out = Buffer.concat(chunks.map((c) => Buffer.from(c)));
assert.strictEqual(out.toString(), 'hello');
}

async function testCompressionRoundTripWithArrayBuffer() {
const cs = new CompressionStream('gzip');
const ds = new DecompressionStream('gzip');

const csWriter = cs.writable.getWriter();
const csReader = cs.readable.getReader();
const dsWriter = ds.writable.getWriter();
const dsReader = ds.readable.getReader();

const input = new TextEncoder().encode('hello').buffer;

await csWriter.write(input);
csWriter.close();

const compressed = [];
let done = false;
while (!done) {
const { value, done: d } = await csReader.read();
if (value) compressed.push(value);
done = d;
}

for (const chunk of compressed) await dsWriter.write(chunk);
dsWriter.close();
Comment thread
MattiasBuelens marked this conversation as resolved.
Outdated

const out = [];
done = false;
while (!done) {
const { value, done: d } = await dsReader.read();
if (value) out.push(value);
done = d;
}
const result = Buffer.concat(out.map((c) => Buffer.from(c)));
assert.strictEqual(result.toString(), 'hello');
}

Promise.all([
testDecompressionAcceptsArrayBuffer(),
testCompressionRoundTripWithArrayBuffer(),
]).then(common.mustCall());
Comment thread
MattiasBuelens marked this conversation as resolved.
Outdated
3 changes: 0 additions & 3 deletions test/wpt/status/compression.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,6 @@
"decompression-bad-chunks.tentative.any.js": {
"skip": "Execution \"hangs\", ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write"
},
"decompression-buffersource.tentative.any.js": {
"skip": "ArrayBuffer and TypedArray is not accepted and throws, instead of rejects during writer.write"
},
"compression-with-detach.tentative.window.js": {
"requires": ["crypto"]
},
Expand Down
Loading