fix(ext/node): avoid stack overflow in node:zlib's gunzip (#30865)

Fixes #30829

## Problem

The zlib polyfill had recursive calls between `processCallback` and
`handle.write()` that caused stack overflow when decompressing large
data.

## Solution

Defer `handle.write()` calls with `process.nextTick()` to break the
synchronous call chain.

## Changes
- `ext/node/polyfills/zlib.js`: Wrapped `handle.write()` in
`process.nextTick()`
- `tests/unit_node/zlib_test.ts`: Added test for 64MiB data
decompression
This commit is contained in:
Yusuke Tanaka 2025-10-03 17:57:23 +09:00 committed by GitHub
parent 926bcbc4c2
commit 415acdd462
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 46 additions and 13 deletions

View file

@ -608,19 +608,7 @@ function processCallback() {
handle.availInBefore = availInAfter;
if (!streamBufferIsFull) {
this.write(
handle.flushFlag,
this.buffer, // in
handle.inOff, // in_off
handle.availInBefore, // in_len
self._outBuffer, // out
self._outOffset, // out_off
self._chunkSize,
); // out_len
} else {
const oldRead = self._read;
self._read = (n) => {
self._read = oldRead;
process.nextTick(() => {
this.write(
handle.flushFlag,
this.buffer, // in
@ -630,6 +618,22 @@ function processCallback() {
self._outOffset, // out_off
self._chunkSize,
); // out_len
});
} else {
const oldRead = self._read;
self._read = (n) => {
self._read = oldRead;
process.nextTick(() => {
this.write(
handle.flushFlag,
this.buffer, // in
handle.inOff, // in_off
handle.availInBefore, // in_len
self._outBuffer, // out
self._outOffset, // out_off
self._chunkSize,
); // out_len
});
self._read(n);
};
}

View file

@ -16,6 +16,7 @@ import {
createBrotliDecompress,
createDeflate,
deflateSync,
gunzip,
gzip,
gzipSync,
unzipSync,
@ -286,3 +287,31 @@ Deno.test("ERR_BUFFER_TOO_LARGE works correctly", () => {
"Cannot create a Buffer larger than 1 bytes",
);
});
// https://github.com/denoland/deno/issues/30829
Deno.test("gunzip doesn't cause stack overflow with 64MiB data", async () => {
const data = Buffer.alloc(64 * 1024 * 1024);
const compressed = gzipSync(data);
const { promise, resolve, reject } = Promise.withResolvers<void>();
gunzip(compressed, (err, result) => {
if (err) {
reject(err);
return;
}
if (!result) {
reject(new Error("expected gunzip to return a Buffer"));
return;
}
if (result.length !== data.length) {
reject(
new Error(`expected ${data.length} bytes, got ${result.length}`),
);
return;
}
resolve();
});
await promise;
});