forked from nodejs/node
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest-webstreams-compression-buffer-source.js
More file actions
56 lines (46 loc) · 1.59 KB
/
test-webstreams-compression-buffer-source.js
File metadata and controls
56 lines (46 loc) · 1.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
'use strict';
require('../common');
const assert = require('assert');
const test = require('node:test');
const { DecompressionStream, CompressionStream } = require('stream/web');
// Minimal gzip-compressed bytes for "hello"
const compressedGzip = new Uint8Array([
31, 139, 8, 0, 0, 0, 0, 0, 0, 3,
203, 72, 205, 201, 201, 7, 0, 134, 166, 16, 54, 5, 0, 0, 0,
]);
test('DecompressionStream accepts ArrayBuffer chunks', async () => {
const ds = new DecompressionStream('gzip');
const reader = ds.readable.getReader();
const writer = ds.writable.getWriter();
const writePromise = writer.write(compressedGzip.buffer);
writer.close();
const chunks = [];
let done = false;
while (!done) {
const { value, done: d } = await reader.read();
if (value) chunks.push(value);
done = d;
}
await writePromise;
const out = Buffer.concat(chunks.map((c) => Buffer.from(c)));
assert.strictEqual(out.toString(), 'hello');
});
test('CompressionStream round-trip with ArrayBuffer input', async () => {
const cs = new CompressionStream('gzip');
const ds = new DecompressionStream('gzip');
const csWriter = cs.writable.getWriter();
const dsReader = ds.readable.getReader();
const input = new TextEncoder().encode('hello').buffer;
await csWriter.write(input);
csWriter.close();
await cs.readable.pipeTo(ds.writable);
const out = [];
let done = false;
while (!done) {
const { value, done: d } = await dsReader.read();
if (value) out.push(value);
done = d;
}
const result = Buffer.concat(out.map((c) => Buffer.from(c)));
assert.strictEqual(result.toString(), 'hello');
});