Skip to content

Commit 02e76e9

Browse files
committed
stream: fixup from memory issue / batch yielding
1 parent e1e1911 commit 02e76e9

1 file changed

Lines changed: 29 additions & 4 deletions

File tree

lib/internal/streams/new/from.js

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,11 @@ const {
3535
// Shared TextEncoder instance for string conversion.
3636
const encoder = new TextEncoder();
3737

38+
// Maximum number of chunks to yield per batch from from(Uint8Array[]).
39+
// Bounds peak memory when arrays flow through transforms, which must
40+
// allocate output for the entire batch at once.
41+
const FROM_BATCH_SIZE = 128;
42+
3843
// =============================================================================
3944
// Type Guards and Detection
4045
// =============================================================================
@@ -458,7 +463,11 @@ function fromSync(input) {
458463
};
459464
}
460465

461-
// Fast path: Uint8Array[] - yield as a single batch
466+
// Fast path: Uint8Array[] - yield in bounded sub-batches.
467+
// Yielding the entire array as one batch forces downstream transforms
468+
// to process all data at once, causing peak memory proportional to total
469+
// data volume. Sub-batching keeps peak memory bounded while preserving
470+
// the throughput benefit of batched processing.
462471
if (ArrayIsArray(input)) {
463472
if (input.length === 0) {
464473
return {
@@ -475,7 +484,13 @@ function fromSync(input) {
475484
const batch = input;
476485
return {
477486
*[SymbolIterator]() {
478-
yield batch;
487+
if (batch.length <= FROM_BATCH_SIZE) {
488+
yield batch;
489+
} else {
490+
for (let i = 0; i < batch.length; i += FROM_BATCH_SIZE) {
491+
yield ArrayPrototypeSlice(batch, i, i + FROM_BATCH_SIZE);
492+
}
493+
}
479494
},
480495
};
481496
}
@@ -514,7 +529,11 @@ function from(input) {
514529
};
515530
}
516531

517-
// Fast path: Uint8Array[] - yield as a single batch
532+
// Fast path: Uint8Array[] - yield in bounded sub-batches.
533+
// Yielding the entire array as one batch forces downstream transforms
534+
// to process all data at once, causing peak memory proportional to total
535+
// data volume. Sub-batching keeps peak memory bounded while preserving
536+
// the throughput benefit of batched processing.
518537
if (ArrayIsArray(input)) {
519538
if (input.length === 0) {
520539
return {
@@ -530,7 +549,13 @@ function from(input) {
530549
const batch = input;
531550
return {
532551
async *[SymbolAsyncIterator]() {
533-
yield batch;
552+
if (batch.length <= FROM_BATCH_SIZE) {
553+
yield batch;
554+
} else {
555+
for (let i = 0; i < batch.length; i += FROM_BATCH_SIZE) {
556+
yield ArrayPrototypeSlice(batch, i, i + FROM_BATCH_SIZE);
557+
}
558+
}
534559
},
535560
};
536561
}

0 commit comments

Comments
 (0)