Skip to content

Commit d81adf8

Browse files
committed
stream: reject SharedArrayBuffer-backed chunks in web compression
Per the Compression Streams spec, chunks must be BufferSource which excludes SharedArrayBuffer-backed views. Add validation to both CompressionStream and DecompressionStream that rejects chunks backed by SharedArrayBuffer with a typed TypeError.
1 parent 46c309e commit d81adf8

3 files changed

Lines changed: 55 additions & 23 deletions

File tree

lib/internal/webstreams/compression.js

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
'use strict';
22

33
const {
4+
FunctionPrototypeCall,
45
ObjectDefineProperties,
56
SymbolToStringTag,
67
} = primordials;
@@ -11,11 +12,22 @@ const {
1112

1213
const { customInspect } = require('internal/webstreams/util');
1314

15+
const {
16+
isArrayBufferView,
17+
isSharedArrayBuffer,
18+
} = require('internal/util/types');
19+
1420
const {
1521
customInspectSymbol: kInspect,
1622
kEnumerableProperty,
1723
} = require('internal/util');
1824

25+
const {
26+
codes: {
27+
ERR_INVALID_ARG_TYPE,
28+
},
29+
} = require('internal/errors');
30+
1931
const { createEnumConverter } = require('internal/webidl');
2032

2133
let zlib;
@@ -24,6 +36,25 @@ function lazyZlib() {
2436
return zlib;
2537
}
2638

39+
// Per the Compression Streams spec, chunks must be BufferSource
40+
// (ArrayBuffer or ArrayBufferView not backed by SharedArrayBuffer).
41+
// Override the handle's write method to reject shared buffer views
42+
// synchronously which the web stream adapter's try/catch will
43+
// propagate as a write rejection.
44+
function addBufferSourceValidation(handle) {
45+
const origWrite = handle.write;
46+
handle.write = function(chunk, encoding, cb) {
47+
if (isArrayBufferView(chunk) && isSharedArrayBuffer(chunk.buffer)) {
48+
throw new ERR_INVALID_ARG_TYPE(
49+
'chunk',
50+
['Buffer', 'TypedArray', 'DataView'],
51+
chunk,
52+
);
53+
}
54+
return FunctionPrototypeCall(origWrite, this, chunk, encoding, cb);
55+
};
56+
}
57+
2758
const formatConverter = createEnumConverter('CompressionFormat', [
2859
'deflate',
2960
'deflate-raw',
@@ -62,6 +93,7 @@ class CompressionStream {
6293
this.#handle = lazyZlib().createBrotliCompress();
6394
break;
6495
}
96+
addBufferSourceValidation(this.#handle);
6597
this.#transform = newReadableWritablePairFromDuplex(this.#handle);
6698
}
6799

@@ -123,6 +155,7 @@ class DecompressionStream {
123155
});
124156
break;
125157
}
158+
addBufferSourceValidation(this.#handle);
126159
this.#transform = newReadableWritablePairFromDuplex(this.#handle);
127160

128161
this.#handle.on('error', (err) => {

test/parallel/test-webstreams-compression-bad-chunks.js

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,15 +9,27 @@ const { CompressionStream, DecompressionStream } = require('stream/web');
99
// on both the write and the read side, instead of hanging.
1010

1111
const badChunks = [
12-
{ name: 'undefined', value: undefined },
13-
{ name: 'null', value: null },
14-
{ name: 'number', value: 3.14 },
15-
{ name: 'object', value: {} },
16-
{ name: 'array', value: [65] },
12+
{ name: 'undefined', value: undefined, code: 'ERR_INVALID_ARG_TYPE' },
13+
{ name: 'null', value: null, code: 'ERR_STREAM_NULL_VALUES' },
14+
{ name: 'number', value: 3.14, code: 'ERR_INVALID_ARG_TYPE' },
15+
{ name: 'object', value: {}, code: 'ERR_INVALID_ARG_TYPE' },
16+
{ name: 'array', value: [65], code: 'ERR_INVALID_ARG_TYPE' },
17+
{
18+
name: 'SharedArrayBuffer',
19+
value: new SharedArrayBuffer(1),
20+
code: 'ERR_INVALID_ARG_TYPE',
21+
},
22+
{
23+
name: 'Uint8Array backed by SharedArrayBuffer',
24+
value: new Uint8Array(new SharedArrayBuffer(1)),
25+
code: 'ERR_INVALID_ARG_TYPE',
26+
},
1727
];
1828

1929
for (const format of ['deflate', 'deflate-raw', 'gzip', 'brotli']) {
20-
for (const { name, value } of badChunks) {
30+
for (const { name, value, code } of badChunks) {
31+
const expected = { name: 'TypeError', code };
32+
2133
test(`CompressionStream rejects bad chunk (${name}) for ${format}`, async () => {
2234
const cs = new CompressionStream(format);
2335
const writer = cs.writable.getWriter();
@@ -26,8 +38,8 @@ for (const format of ['deflate', 'deflate-raw', 'gzip', 'brotli']) {
2638
const writePromise = writer.write(value);
2739
const readPromise = reader.read();
2840

29-
await assert.rejects(writePromise, { name: 'TypeError' });
30-
await assert.rejects(readPromise, { name: 'TypeError' });
41+
await assert.rejects(writePromise, expected);
42+
await assert.rejects(readPromise, expected);
3143
});
3244

3345
test(`DecompressionStream rejects bad chunk (${name}) for ${format}`, async () => {
@@ -38,8 +50,8 @@ for (const format of ['deflate', 'deflate-raw', 'gzip', 'brotli']) {
3850
const writePromise = writer.write(value);
3951
const readPromise = reader.read();
4052

41-
await assert.rejects(writePromise, { name: 'TypeError' });
42-
await assert.rejects(readPromise, { name: 'TypeError' });
53+
await assert.rejects(writePromise, expected);
54+
await assert.rejects(readPromise, expected);
4355
});
4456
}
4557
}

test/wpt/status/compression.json

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,7 @@
11
{
2-
"compression-bad-chunks.any.js": {
3-
"fail": {
4-
"note": "Node.js accepts SharedArrayBuffer-backed TypedArrays",
5-
"expected": [
6-
"chunk of type shared Uint8Array should error the stream for deflate",
7-
"chunk of type shared Uint8Array should error the stream for deflate-raw",
8-
"chunk of type shared Uint8Array should error the stream for gzip",
9-
"chunk of type shared Uint8Array should error the stream for brotli"
10-
]
11-
}
12-
},
132
"decompression-bad-chunks.any.js": {
143
"fail": {
15-
"note": "Node.js accepts SharedArrayBuffer-backed TypedArrays and rejects Error instead of TypeError",
164
"expected": [
17-
"chunk of type shared Uint8Array should error the stream for brotli",
185
"chunk of type invalid deflate bytes should error the stream for brotli",
196
"chunk of type invalid gzip bytes should error the stream for brotli"
207
]

0 commit comments

Comments
 (0)