fix(ext/node): upgrade node:stream (#28855)

Ref https://github.com/denoland/deno/issues/28836

This PR replaces the _stream.mjs bundle with a file-by-file port instead. A codemod transpiles Node.js internals to ESM. The codemod performs three tasks: translating CJS to ESM, remapping internal dependencies, and hoisting lazy requires as imports.

The process is fully automated through the `update_node_stream.ts` script, simplifying future internal updates. The script checks out Node.js from a specific tag defined in the `tests/node_compat/runner`.

Additionally, the update enables new tests in our Node test runner and adds features (like compose()) that were missing from the outdated bundle.

## Performance

There is a 140KB+ binary size increase on aarch64-apple-darwin and nop startup time stays the same.
This commit is contained in:
Divy Srivastava 2025-04-14 09:05:34 -07:00 committed by GitHub
parent 6e49a4b3bd
commit 01b6da9d9b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
76 changed files with 11145 additions and 7032 deletions

View file

@ -541,7 +541,6 @@ deno_core::extension!(deno_node,
"_process/process.ts",
"_process/streams.mjs",
"_readline.mjs",
"_stream.mjs",
"_util/_util_callbackify.js",
"_util/asserts.ts",
"_util/async.ts",
@ -634,12 +633,24 @@ deno_core::extension!(deno_node,
"internal/readline/symbols.mjs",
"internal/readline/utils.mjs",
"internal/stream_base_commons.ts",
"internal/streams/add-abort-signal.mjs",
"internal/streams/destroy.mjs",
"internal/streams/end-of-stream.mjs",
"internal/streams/lazy_transform.mjs",
"internal/streams/state.mjs",
"internal/streams/utils.mjs",
"internal/streams/add-abort-signal.js",
"internal/streams/compose.js",
"internal/streams/destroy.js",
"internal/streams/duplex.js",
"internal/streams/duplexify.js",
"internal/streams/duplexpair.js",
"internal/streams/end-of-stream.js",
"internal/streams/from.js",
"internal/streams/lazy_transform.js",
"internal/streams/legacy.js",
"internal/streams/operators.js",
"internal/streams/passthrough.js",
"internal/streams/pipeline.js",
"internal/streams/readable.js",
"internal/streams/state.js",
"internal/streams/transform.js",
"internal/streams/utils.js",
"internal/streams/writable.js",
"internal/test/binding.ts",
"internal/timers.mjs",
"internal/url.ts",
@ -651,6 +662,7 @@ deno_core::extension!(deno_node,
"internal/util/parse_args/utils.js",
"internal/util/types.ts",
"internal/validators.mjs",
"internal/webstreams/adapters.js",
"path/_constants.ts",
"path/_interface.ts",
"path/_util.ts",
@ -664,11 +676,11 @@ deno_core::extension!(deno_node,
"node:_http_common" = "_http_common.ts",
"node:_http_outgoing" = "_http_outgoing.ts",
"node:_http_server" = "_http_server.ts",
"node:_stream_duplex" = "internal/streams/duplex.mjs",
"node:_stream_passthrough" = "internal/streams/passthrough.mjs",
"node:_stream_readable" = "internal/streams/readable.mjs",
"node:_stream_transform" = "internal/streams/transform.mjs",
"node:_stream_writable" = "internal/streams/writable.mjs",
"node:_stream_duplex" = "internal/streams/duplex.js",
"node:_stream_passthrough" = "internal/streams/passthrough.js",
"node:_stream_readable" = "internal/streams/readable.js",
"node:_stream_transform" = "internal/streams/transform.js",
"node:_stream_writable" = "internal/streams/writable.js",
"node:_tls_common" = "_tls_common.ts",
"node:_tls_wrap" = "_tls_wrap.ts",
"node:assert" = "assert.ts",
@ -708,9 +720,9 @@ deno_core::extension!(deno_node,
"node:repl" = "repl.ts",
"node:sqlite" = "sqlite.ts",
"node:stream" = "stream.ts",
"node:stream/consumers" = "stream/consumers.mjs",
"node:stream/promises" = "stream/promises.mjs",
"node:stream/web" = "stream/web.ts",
"node:stream/consumers" = "stream/consumers.js",
"node:stream/promises" = "stream/promises.js",
"node:stream/web" = "stream/web.js",
"node:string_decoder" = "string_decoder.ts",
"node:sys" = "sys.ts",
"node:test" = "testing.ts",

View file

@ -121,9 +121,9 @@ import internalEventTarget from "ext:deno_node/internal/event_target.mjs";
import internalFsUtils from "ext:deno_node/internal/fs/utils.mjs";
import internalHttp from "ext:deno_node/internal/http.ts";
import internalReadlineUtils from "ext:deno_node/internal/readline/utils.mjs";
import internalStreamsAddAbortSignal from "ext:deno_node/internal/streams/add-abort-signal.mjs";
import internalStreamsLazyTransform from "ext:deno_node/internal/streams/lazy_transform.mjs";
import internalStreamsState from "ext:deno_node/internal/streams/state.mjs";
import internalStreamsAddAbortSignal from "ext:deno_node/internal/streams/add-abort-signal.js";
import internalStreamsLazyTransform from "ext:deno_node/internal/streams/lazy_transform.js";
import internalStreamsState from "ext:deno_node/internal/streams/state.js";
import internalTestBinding from "ext:deno_node/internal/test/binding.ts";
import internalTimers from "ext:deno_node/internal/timers.mjs";
import internalUtil from "ext:deno_node/internal/util.mjs";

View file

@ -4,7 +4,7 @@
// TODO(petamoriken): enable prefer-primordials for node polyfills
// deno-lint-ignore-file prefer-primordials
import { getDefaultHighWaterMark } from "ext:deno_node/internal/streams/state.mjs";
import { getDefaultHighWaterMark } from "ext:deno_node/internal/streams/state.js";
import assert from "ext:deno_node/internal/assert.mjs";
import EE from "node:events";
import { Stream } from "node:stream";

File diff suppressed because it is too large Load diff

View file

@ -26,7 +26,7 @@ import {
import { Buffer } from "node:buffer";
import { notImplemented } from "ext:deno_node/_utils.ts";
import type { TransformOptions } from "ext:deno_node/_stream.d.ts";
import { Transform } from "ext:deno_node/_stream.mjs";
import { Transform } from "node:stream";
import {
getArrayBufferOrView,
KeyObject,

View file

@ -41,7 +41,7 @@ import {
ERR_INVALID_ARG_TYPE,
NodeError,
} from "ext:deno_node/internal/errors.ts";
import LazyTransform from "ext:deno_node/internal/streams/lazy_transform.mjs";
import LazyTransform from "ext:deno_node/internal/streams/lazy_transform.js";
import {
getDefaultEncoding,
toBuf,

View file

@ -2675,6 +2675,11 @@ export class NodeAggregateError extends AggregateError {
}
codes.ERR_IPC_CHANNEL_CLOSED = ERR_IPC_CHANNEL_CLOSED;
codes.ERR_METHOD_NOT_IMPLEMENTED = ERR_METHOD_NOT_IMPLEMENTED;
codes.ERR_INVALID_RETURN_VALUE = ERR_INVALID_RETURN_VALUE;
codes.ERR_MISSING_ARGS = ERR_MISSING_ARGS;
codes.ERR_MULTIPLE_CALLBACK = ERR_MULTIPLE_CALLBACK;
codes.ERR_STREAM_WRITE_AFTER_END = ERR_STREAM_WRITE_AFTER_END;
codes.ERR_INVALID_ARG_TYPE = ERR_INVALID_ARG_TYPE;
codes.ERR_INVALID_ARG_VALUE = ERR_INVALID_ARG_VALUE;
codes.ERR_OUT_OF_RANGE = ERR_OUT_OF_RANGE;
@ -2686,6 +2691,15 @@ codes.ERR_PARSE_ARGS_INVALID_OPTION_VALUE = ERR_PARSE_ARGS_INVALID_OPTION_VALUE;
codes.ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL =
ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL;
codes.ERR_PARSE_ARGS_UNKNOWN_OPTION = ERR_PARSE_ARGS_UNKNOWN_OPTION;
codes.ERR_STREAM_ALREADY_FINISHED = ERR_STREAM_ALREADY_FINISHED;
codes.ERR_STREAM_CANNOT_PIPE = ERR_STREAM_CANNOT_PIPE;
codes.ERR_STREAM_DESTROYED = ERR_STREAM_DESTROYED;
codes.ERR_STREAM_NULL_VALUES = ERR_STREAM_NULL_VALUES;
codes.ERR_STREAM_PREMATURE_CLOSE = ERR_STREAM_PREMATURE_CLOSE;
codes.ERR_STREAM_PUSH_AFTER_EOF = ERR_STREAM_PUSH_AFTER_EOF;
codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT = ERR_STREAM_UNSHIFT_AFTER_END_EVENT;
codes.ERR_STREAM_WRAP = ERR_STREAM_WRAP;
codes.ERR_STREAM_WRITE_AFTER_END = ERR_STREAM_WRITE_AFTER_END;
// TODO(kt3k): assign all error classes here.

View file

@ -43,6 +43,7 @@ const kStop = Symbol("kStop");
const kTarget = Symbol("kTarget");
const kHandlers = Symbol("khandlers");
const kWeakHandler = Symbol("kWeak");
const kResistStopPropagation = Symbol("kResistStopPropagation");
const kHybridDispatch = Symbol.for("nodejs.internal.kHybridDispatch");
const kCreateEvent = Symbol("kCreateEvent");
@ -1096,6 +1097,7 @@ export {
kEvents,
kNewListener,
kRemoveListener,
kResistStopPropagation,
kTrustEvent,
kWeakHandler,
NodeEventTarget,
@ -1116,5 +1118,6 @@ export default {
kRemoveListener,
kEvents,
kWeakHandler,
kResistStopPropagation,
isEventTarget,
};

View file

@ -41,4 +41,6 @@ function addAbortListener(signal, listener) {
};
}
export default { addAbortListener };
export { addAbortListener };

View file

@ -14,7 +14,7 @@ import {
validateFunction,
validateInteger,
} from "ext:deno_node/internal/validators.mjs";
import { errorOrDestroy } from "ext:deno_node/internal/streams/destroy.mjs";
import { errorOrDestroy } from "ext:deno_node/internal/streams/destroy.js";
import { open as fsOpen } from "ext:deno_node/_fs/_fs_open.ts";
import { read as fsRead } from "ext:deno_node/_fs/_fs_read.ts";
import { write as fsWrite } from "ext:deno_node/_fs/_fs_write.mjs";

View file

@ -52,7 +52,7 @@ import {
cursorTo,
moveCursor,
} from "ext:deno_node/internal/readline/callbacks.mjs";
import { Readable } from "ext:deno_node/_stream.mjs";
import { Readable } from "node:stream";
import process from "node:process";
import { StringDecoder } from "node:string_decoder";

View file

@ -14,7 +14,7 @@ import {
validateBoolean,
validateInteger,
} from "ext:deno_node/internal/validators.mjs";
import { isWritable } from "ext:deno_node/internal/streams/utils.mjs";
import { isWritable } from "ext:deno_node/internal/streams/utils.js";
import { ERR_INVALID_ARG_TYPE } from "ext:deno_node/internal/errors.ts";
const {

View file

@ -0,0 +1,83 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import {
isNodeStream,
isWebStream,
kControllerErrorFunction,
} from "ext:deno_node/internal/streams/utils.js";
import eos from "ext:deno_node/internal/streams/end-of-stream.js";
import * as _mod2 from "ext:deno_node/internal/events/abort_listener.mjs";
const {
AbortError,
codes: {
ERR_INVALID_ARG_TYPE,
},
} = imported1;
"use strict";
const {
SymbolDispose,
} = primordials;
let addAbortListener;
// This method is inlined here for readable-stream
// It also does not allow for signal to not exist on the stream
// https://github.com/nodejs/node/pull/36061#discussion_r533718029
const validateAbortSignal = (signal, name) => {
if (
typeof signal !== "object" ||
!("aborted" in signal)
) {
throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal);
}
};
const addAbortSignal = function addAbortSignal(signal, stream) {
validateAbortSignal(signal, "signal");
if (!isNodeStream(stream) && !isWebStream(stream)) {
throw new ERR_INVALID_ARG_TYPE("stream", [
"ReadableStream",
"WritableStream",
"Stream",
], stream);
}
return addAbortSignalNoValidate(signal, stream);
};
export { addAbortSignal };
const addAbortSignalNoValidate = function (signal, stream) {
if (typeof signal !== "object" || !("aborted" in signal)) {
return stream;
}
const onAbort = isNodeStream(stream)
? () => {
stream.destroy(new AbortError(undefined, { cause: signal.reason }));
}
: () => {
stream[kControllerErrorFunction](
new AbortError(undefined, { cause: signal.reason }),
);
};
if (signal.aborted) {
onAbort();
} else {
addAbortListener ??= _mod2.addAbortListener;
const disposable = addAbortListener(signal, onAbort);
eos(stream, disposable[SymbolDispose]);
}
return stream;
};
export { addAbortSignalNoValidate };
export default {
addAbortSignal,
addAbortSignalNoValidate,
};

View file

@ -1,51 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import {
AbortError,
ERR_INVALID_ARG_TYPE,
} from "ext:deno_node/internal/errors.ts";
import eos from "ext:deno_node/internal/streams/end-of-stream.mjs";
// This method is inlined here for readable-stream
// It also does not allow for signal to not exist on the stream
// https://github.com/nodejs/node/pull/36061#discussion_r533718029
const validateAbortSignal = (signal, name) => {
if (
typeof signal !== "object" ||
!("aborted" in signal)
) {
throw new ERR_INVALID_ARG_TYPE(name, "AbortSignal", signal);
}
};
function isStream(obj) {
return !!(obj && typeof obj.pipe === "function");
}
function addAbortSignal(signal, stream) {
validateAbortSignal(signal, "signal");
if (!isStream(stream)) {
throw new ERR_INVALID_ARG_TYPE("stream", "stream.Stream", stream);
}
return addAbortSignalNoValidate(signal, stream);
}
function addAbortSignalNoValidate(signal, stream) {
if (typeof signal !== "object" || !("aborted" in signal)) {
return stream;
}
const onAbort = () => {
stream.destroy(new AbortError());
};
if (signal.aborted) {
onAbort();
} else {
signal.addEventListener("abort", onAbort);
eos(stream, () => signal.removeEventListener("abort", onAbort));
}
return stream;
}
export default { addAbortSignal, addAbortSignalNoValidate };
export { addAbortSignal, addAbortSignalNoValidate };

View file

@ -0,0 +1,265 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import { pipeline } from "ext:deno_node/internal/streams/pipeline.js";
import Duplex from "ext:deno_node/internal/streams/duplex.js";
import { destroyer } from "ext:deno_node/internal/streams/destroy.js";
import {
isNodeStream,
isReadable,
isReadableStream,
isTransformStream,
isWebStream,
isWritable,
isWritableStream,
} from "ext:deno_node/internal/streams/utils.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import eos from "ext:deno_node/internal/streams/end-of-stream.js";
const {
AbortError,
codes: {
ERR_INVALID_ARG_VALUE,
ERR_MISSING_ARGS,
},
} = imported1;
"use strict";
const {
ArrayPrototypeSlice,
} = primordials;
export default function compose(...streams) {
if (streams.length === 0) {
throw new ERR_MISSING_ARGS("streams");
}
if (streams.length === 1) {
return Duplex.from(streams[0]);
}
const orgStreams = ArrayPrototypeSlice(streams);
if (typeof streams[0] === "function") {
streams[0] = Duplex.from(streams[0]);
}
if (typeof streams[streams.length - 1] === "function") {
const idx = streams.length - 1;
streams[idx] = Duplex.from(streams[idx]);
}
for (let n = 0; n < streams.length; ++n) {
if (!isNodeStream(streams[n]) && !isWebStream(streams[n])) {
// TODO(ronag): Add checks for non streams.
continue;
}
if (
n < streams.length - 1 &&
!(
isReadable(streams[n]) ||
isReadableStream(streams[n]) ||
isTransformStream(streams[n])
)
) {
throw new ERR_INVALID_ARG_VALUE(
`streams[${n}]`,
orgStreams[n],
"must be readable",
);
}
if (
n > 0 &&
!(
isWritable(streams[n]) ||
isWritableStream(streams[n]) ||
isTransformStream(streams[n])
)
) {
throw new ERR_INVALID_ARG_VALUE(
`streams[${n}]`,
orgStreams[n],
"must be writable",
);
}
}
let ondrain;
let onfinish;
let onreadable;
let onclose;
let d;
function onfinished(err) {
const cb = onclose;
onclose = null;
if (cb) {
cb(err);
} else if (err) {
d.destroy(err);
} else if (!readable && !writable) {
d.destroy();
}
}
const head = streams[0];
const tail = pipeline(streams, onfinished);
const writable = !!(
isWritable(head) ||
isWritableStream(head) ||
isTransformStream(head)
);
const readable = !!(
isReadable(tail) ||
isReadableStream(tail) ||
isTransformStream(tail)
);
// TODO(ronag): Avoid double buffering.
// Implement Writable/Readable/Duplex traits.
// See, https://github.com/nodejs/node/pull/33515.
d = new Duplex({
// TODO (ronag): highWaterMark?
writableObjectMode: !!head?.writableObjectMode,
readableObjectMode: !!tail?.readableObjectMode,
writable,
readable,
});
if (writable) {
if (isNodeStream(head)) {
d._write = function (chunk, encoding, callback) {
if (head.write(chunk, encoding)) {
callback();
} else {
ondrain = callback;
}
};
d._final = function (callback) {
head.end();
onfinish = callback;
};
head.on("drain", function () {
if (ondrain) {
const cb = ondrain;
ondrain = null;
cb();
}
});
} else if (isWebStream(head)) {
const writable = isTransformStream(head) ? head.writable : head;
const writer = writable.getWriter();
d._write = async function (chunk, encoding, callback) {
try {
await writer.ready;
writer.write(chunk).catch(() => {});
callback();
} catch (err) {
callback(err);
}
};
d._final = async function (callback) {
try {
await writer.ready;
writer.close().catch(() => {});
onfinish = callback;
} catch (err) {
callback(err);
}
};
}
const toRead = isTransformStream(tail) ? tail.readable : tail;
eos(toRead, () => {
if (onfinish) {
const cb = onfinish;
onfinish = null;
cb();
}
});
}
if (readable) {
if (isNodeStream(tail)) {
tail.on("readable", function () {
if (onreadable) {
const cb = onreadable;
onreadable = null;
cb();
}
});
tail.on("end", function () {
d.push(null);
});
d._read = function () {
while (true) {
const buf = tail.read();
if (buf === null) {
onreadable = d._read;
return;
}
if (!d.push(buf)) {
return;
}
}
};
} else if (isWebStream(tail)) {
const readable = isTransformStream(tail) ? tail.readable : tail;
const reader = readable.getReader();
d._read = async function () {
while (true) {
try {
const { value, done } = await reader.read();
if (!d.push(value)) {
return;
}
if (done) {
d.push(null);
return;
}
} catch {
return;
}
}
};
}
}
d._destroy = function (err, callback) {
if (!err && onclose !== null) {
err = new AbortError();
}
onreadable = null;
ondrain = null;
onfinish = null;
if (isNodeStream(tail)) {
destroyer(tail, err);
}
if (onclose === null) {
callback(err);
} else {
onclose = callback;
}
};
return d;
}

View file

@ -0,0 +1,371 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import {
isDestroyed,
isFinished,
isServerRequest,
kAutoDestroy,
kClosed,
kCloseEmitted,
kConstructed,
kDestroyed,
kEmitClose,
kErrored,
kErrorEmitted,
kIsDestroyed,
kState,
} from "ext:deno_node/internal/streams/utils.js";
const {
AbortError,
aggregateTwoErrors,
codes: {
ERR_MULTIPLE_CALLBACK,
},
} = imported1;
"use strict";
const {
Symbol,
} = primordials;
const kDestroy = Symbol("kDestroy");
const kConstruct = Symbol("kConstruct");
function checkError(err, w, r) {
if (err) {
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
err.stack; // eslint-disable-line no-unused-expressions
if (w && !w.errored) {
w.errored = err;
}
if (r && !r.errored) {
r.errored = err;
}
}
}
// Backwards compat. cb() is undocumented and unused in core but
// unfortunately might be used by modules.
function destroy(err, cb) {
const r = this._readableState;
const w = this._writableState;
// With duplex streams we use the writable side for state.
const s = w || r;
if (
(w && (w[kState] & kDestroyed) !== 0) ||
(r && (r[kState] & kDestroyed) !== 0)
) {
if (typeof cb === "function") {
cb();
}
return this;
}
// We set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
checkError(err, w, r);
if (w) {
w[kState] |= kDestroyed;
}
if (r) {
r[kState] |= kDestroyed;
}
// If still constructing then defer calling _destroy.
if ((s[kState] & kConstructed) === 0) {
this.once(kDestroy, function (er) {
_destroy(this, aggregateTwoErrors(er, err), cb);
});
} else {
_destroy(this, err, cb);
}
return this;
}
function _destroy(self, err, cb) {
let called = false;
function onDestroy(err) {
if (called) {
return;
}
called = true;
const r = self._readableState;
const w = self._writableState;
checkError(err, w, r);
if (w) {
w[kState] |= kClosed;
}
if (r) {
r[kState] |= kClosed;
}
if (typeof cb === "function") {
cb(err);
}
if (err) {
process.nextTick(emitErrorCloseNT, self, err);
} else {
process.nextTick(emitCloseNT, self);
}
}
try {
self._destroy(err || null, onDestroy);
} catch (err) {
onDestroy(err);
}
}
function emitErrorCloseNT(self, err) {
emitErrorNT(self, err);
emitCloseNT(self);
}
function emitCloseNT(self) {
const r = self._readableState;
const w = self._writableState;
if (w) {
w[kState] |= kCloseEmitted;
}
if (r) {
r[kState] |= kCloseEmitted;
}
if (
(w && (w[kState] & kEmitClose) !== 0) ||
(r && (r[kState] & kEmitClose) !== 0)
) {
self.emit("close");
}
}
function emitErrorNT(self, err) {
const r = self._readableState;
const w = self._writableState;
if (
(w && (w[kState] & kErrorEmitted) !== 0) ||
(r && (r[kState] & kErrorEmitted) !== 0)
) {
return;
}
if (w) {
w[kState] |= kErrorEmitted;
}
if (r) {
r[kState] |= kErrorEmitted;
}
self.emit("error", err);
}
function undestroy() {
const r = this._readableState;
const w = this._writableState;
if (r) {
r.constructed = true;
r.closed = false;
r.closeEmitted = false;
r.destroyed = false;
r.errored = null;
r.errorEmitted = false;
r.reading = false;
r.ended = r.readable === false;
r.endEmitted = r.readable === false;
}
if (w) {
w.constructed = true;
w.destroyed = false;
w.closed = false;
w.closeEmitted = false;
w.errored = null;
w.errorEmitted = false;
w.finalCalled = false;
w.prefinished = false;
w.ended = w.writable === false;
w.ending = w.writable === false;
w.finished = w.writable === false;
}
}
function errorOrDestroy(stream, err, sync) {
// We have tests that rely on errors being emitted
// in the same tick, so changing this is semver major.
// For now when you opt-in to autoDestroy we allow
// the error to be emitted nextTick. In a future
// semver major update we should change the default to this.
const r = stream._readableState;
const w = stream._writableState;
if (
(w && (w[kState] ? (w[kState] & kDestroyed) !== 0 : w.destroyed)) ||
(r && (r[kState] ? (r[kState] & kDestroyed) !== 0 : r.destroyed))
) {
return this;
}
if (
(r && (r[kState] & kAutoDestroy) !== 0) ||
(w && (w[kState] & kAutoDestroy) !== 0)
) {
stream.destroy(err);
} else if (err) {
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
err.stack; // eslint-disable-line no-unused-expressions
if (w && (w[kState] & kErrored) === 0) {
w.errored = err;
}
if (r && (r[kState] & kErrored) === 0) {
r.errored = err;
}
if (sync) {
process.nextTick(emitErrorNT, stream, err);
} else {
emitErrorNT(stream, err);
}
}
}
function construct(stream, cb) {
if (typeof stream._construct !== "function") {
return;
}
const r = stream._readableState;
const w = stream._writableState;
if (r) {
r[kState] &= ~kConstructed;
}
if (w) {
w[kState] &= ~kConstructed;
}
stream.once(kConstruct, cb);
if (stream.listenerCount(kConstruct) > 1) {
// Duplex
return;
}
process.nextTick(constructNT, stream);
}
function constructNT(stream) {
let called = false;
function onConstruct(err) {
if (called) {
errorOrDestroy(stream, err ?? new ERR_MULTIPLE_CALLBACK());
return;
}
called = true;
const r = stream._readableState;
const w = stream._writableState;
const s = w || r;
if (r) {
r[kState] |= kConstructed;
}
if (w) {
w[kState] |= kConstructed;
}
if (s.destroyed) {
stream.emit(kDestroy, err);
} else if (err) {
errorOrDestroy(stream, err, true);
} else {
stream.emit(kConstruct);
}
}
try {
stream._construct((err) => {
process.nextTick(onConstruct, err);
});
} catch (err) {
process.nextTick(onConstruct, err);
}
}
function isRequest(stream) {
return stream?.setHeader && typeof stream.abort === "function";
}
function emitCloseLegacy(stream) {
stream.emit("close");
}
function emitErrorCloseLegacy(stream, err) {
stream.emit("error", err);
process.nextTick(emitCloseLegacy, stream);
}
// Normalize destroy for legacy.
function destroyer(stream, err) {
if (!stream || isDestroyed(stream)) {
return;
}
if (!err && !isFinished(stream)) {
err = new AbortError();
}
// TODO: Remove isRequest branches.
if (isServerRequest(stream)) {
stream.socket = null;
stream.destroy(err);
} else if (isRequest(stream)) {
stream.abort();
} else if (isRequest(stream.req)) {
stream.req.abort();
} else if (typeof stream.destroy === "function") {
stream.destroy(err);
} else if (typeof stream.close === "function") {
// TODO: Don't lose err?
stream.close();
} else if (err) {
process.nextTick(emitErrorCloseLegacy, stream, err);
} else {
process.nextTick(emitCloseLegacy, stream);
}
if (!stream.destroyed) {
stream[kIsDestroyed] = true;
}
}
const _defaultExport2 = {
construct,
destroyer,
destroy,
undestroy,
errorOrDestroy,
};
export default _defaultExport2;
export { construct, destroy, destroyer, errorOrDestroy, undestroy };

View file

@ -1,189 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { aggregateTwoErrors } from "ext:deno_node/internal/errors.ts";
import * as process from "ext:deno_node/_process/process.ts";
const kDestroy = Symbol("kDestroy");
function checkError(err, w, r) {
if (err) {
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
err.stack; // eslint-disable-line no-unused-expressions
if (w && !w.errored) {
w.errored = err;
}
if (r && !r.errored) {
r.errored = err;
}
}
}
// Backwards compat. cb() is undocumented and unused in core but
// unfortunately might be used by modules.
function destroy(err, cb) {
const r = this._readableState;
const w = this._writableState;
// With duplex streams we use the writable side for state.
const s = w || r;
if ((w && w.destroyed) || (r && r.destroyed)) {
if (typeof cb === "function") {
cb();
}
return this;
}
// We set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
checkError(err, w, r);
if (w) {
w.destroyed = true;
}
if (r) {
r.destroyed = true;
}
// If still constructing then defer calling _destroy.
if (!s.constructed) {
this.once(kDestroy, function (er) {
_destroy(this, aggregateTwoErrors(er, err), cb);
});
} else {
_destroy(this, err, cb);
}
return this;
}
function _destroy(self, err, cb) {
let called = false;
function onDestroy(err) {
if (called) {
return;
}
called = true;
const r = self._readableState;
const w = self._writableState;
checkError(err, w, r);
if (w) {
w.closed = true;
}
if (r) {
r.closed = true;
}
if (typeof cb === "function") {
cb(err);
}
if (err) {
process.nextTick(emitErrorCloseNT, self, err);
} else {
process.nextTick(emitCloseNT, self);
}
}
try {
const result = self._destroy(err || null, onDestroy);
if (result != null) {
const then = result.then;
if (typeof then === "function") {
then.call(
result,
function () {
process.nextTick(onDestroy, null);
},
function (err) {
process.nextTick(onDestroy, err);
},
);
}
}
} catch (err) {
onDestroy(err);
}
}
function emitErrorCloseNT(self, err) {
emitErrorNT(self, err);
emitCloseNT(self);
}
function emitCloseNT(self) {
const r = self._readableState;
const w = self._writableState;
if (w) {
w.closeEmitted = true;
}
if (r) {
r.closeEmitted = true;
}
if ((w && w.emitClose) || (r && r.emitClose)) {
self.emit("close");
}
}
function emitErrorNT(self, err) {
const r = self._readableState;
const w = self._writableState;
if ((w && w.errorEmitted) || (r && r.errorEmitted)) {
return;
}
if (w) {
w.errorEmitted = true;
}
if (r) {
r.errorEmitted = true;
}
self.emit("error", err);
}
function errorOrDestroy(stream, err, sync) {
// We have tests that rely on errors being emitted
// in the same tick, so changing this is semver major.
// For now when you opt-in to autoDestroy we allow
// the error to be emitted nextTick. In a future
// semver major update we should change the default to this.
const r = stream._readableState;
const w = stream._writableState;
if ((w && w.destroyed) || (r && r.destroyed)) {
return this;
}
if ((r && r.autoDestroy) || (w && w.autoDestroy)) {
stream.destroy(err);
} else if (err) {
// Avoid V8 leak, https://github.com/nodejs/node/pull/34103#issuecomment-652002364
err.stack; // eslint-disable-line no-unused-expressions
if (w && !w.errored) {
w.errored = err;
}
if (r && !r.errored) {
r.errored = err;
}
if (sync) {
process.nextTick(emitErrorNT, stream, err);
} else {
emitErrorNT(stream, err);
}
}
}
export default { destroy, errorOrDestroy };
export { destroy, errorOrDestroy };

View file

@ -0,0 +1,238 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import _mod1 from "ext:deno_node/internal/streams/legacy.js";
import Readable from "ext:deno_node/internal/streams/readable.js";
import Writable from "ext:deno_node/internal/streams/writable.js";
import { addAbortSignal } from "ext:deno_node/internal/streams/add-abort-signal.js";
import destroyImpl from "ext:deno_node/internal/streams/destroy.js";
import { kOnConstructed } from "ext:deno_node/internal/streams/utils.js";
import * as _mod2 from "ext:deno_node/internal/webstreams/adapters.js";
import _mod3 from "ext:deno_node/internal/streams/duplexify.js";
const Stream = _mod1.Stream;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototype inheritance, this class
// prototypically inherits from Readable, and then parasitically from
// Writable.
"use strict";
const {
ObjectDefineProperties,
ObjectGetOwnPropertyDescriptor,
ObjectKeys,
ObjectSetPrototypeOf,
} = primordials;
ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype);
ObjectSetPrototypeOf(Duplex, Readable);
{
const keys = ObjectKeys(Writable.prototype);
// Allow the keys array to be GC'ed.
for (let i = 0; i < keys.length; i++) {
const method = keys[i];
Duplex.prototype[method] ||= Writable.prototype[method];
}
}
// Use the `destroy` method of `Writable`.
Duplex.prototype.destroy = Writable.prototype.destroy;
function Duplex(options) {
if (!(this instanceof Duplex)) {
return new Duplex(options);
}
this._events ??= {
close: undefined,
error: undefined,
prefinish: undefined,
finish: undefined,
drain: undefined,
data: undefined,
end: undefined,
readable: undefined,
// Skip uncommon events...
// pause: undefined,
// resume: undefined,
// pipe: undefined,
// unpipe: undefined,
// [destroyImpl.kConstruct]: undefined,
// [destroyImpl.kDestroy]: undefined,
};
this._readableState = new Readable.ReadableState(options, this, true);
this._writableState = new Writable.WritableState(options, this, true);
if (options) {
this.allowHalfOpen = options.allowHalfOpen !== false;
if (options.readable === false) {
this._readableState.readable = false;
this._readableState.ended = true;
this._readableState.endEmitted = true;
}
if (options.writable === false) {
this._writableState.writable = false;
this._writableState.ending = true;
this._writableState.ended = true;
this._writableState.finished = true;
}
if (typeof options.read === "function") {
this._read = options.read;
}
if (typeof options.write === "function") {
this._write = options.write;
}
if (typeof options.writev === "function") {
this._writev = options.writev;
}
if (typeof options.destroy === "function") {
this._destroy = options.destroy;
}
if (typeof options.final === "function") {
this._final = options.final;
}
if (typeof options.construct === "function") {
this._construct = options.construct;
}
if (options.signal) {
addAbortSignal(options.signal, this);
}
} else {
this.allowHalfOpen = true;
}
Stream.call(this, options);
if (this._construct != null) {
destroyImpl.construct(this, () => {
this._readableState[kOnConstructed](this);
this._writableState[kOnConstructed](this);
});
}
}
ObjectDefineProperties(Duplex.prototype, {
writable: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writable"),
},
writableHighWaterMark: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(
Writable.prototype,
"writableHighWaterMark",
),
},
writableObjectMode: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableObjectMode"),
},
writableBuffer: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableBuffer"),
},
writableLength: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableLength"),
},
writableFinished: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableFinished"),
},
writableCorked: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableCorked"),
},
writableEnded: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableEnded"),
},
writableNeedDrain: {
__proto__: null,
...ObjectGetOwnPropertyDescriptor(Writable.prototype, "writableNeedDrain"),
},
destroyed: {
__proto__: null,
get() {
if (
this._readableState === undefined ||
this._writableState === undefined
) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set(value) {
// Backward compatibility, the user is explicitly
// managing destroyed.
if (this._readableState && this._writableState) {
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
},
},
});
let webStreamsAdapters;
// Lazy to avoid circular references
function lazyWebStreams() {
if (webStreamsAdapters === undefined) {
webStreamsAdapters = _mod2;
}
return webStreamsAdapters;
}
Duplex.fromWeb = function (pair, options) {
return lazyWebStreams().newStreamDuplexFromReadableWritablePair(
pair,
options,
);
};
Duplex.toWeb = function (duplex) {
return lazyWebStreams().newReadableWritablePairFromDuplex(duplex);
};
let duplexify;
Duplex.from = function (body) {
duplexify ??= _mod3;
return duplexify(body, "body");
};
export default Duplex;
export { Duplex };

View file

@ -1,9 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { Duplex } from "ext:deno_node/_stream.mjs";
const { from, fromWeb, toWeb } = Duplex;
export default Duplex;
export { from, fromWeb, toWeb };

View file

@ -0,0 +1,408 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import {
isDuplexNodeStream,
isIterable,
isNodeStream,
isReadable,
isReadableNodeStream,
isReadableStream,
isWritable,
isWritableNodeStream,
isWritableStream,
} from "ext:deno_node/internal/streams/utils.js";
import eos from "ext:deno_node/internal/streams/end-of-stream.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import { destroyer } from "ext:deno_node/internal/streams/destroy.js";
import Duplex from "ext:deno_node/internal/streams/duplex.js";
import Readable from "ext:deno_node/internal/streams/readable.js";
import Writable from "ext:deno_node/internal/streams/writable.js";
import from from "ext:deno_node/internal/streams/from.js";
import { isBlob } from "ext:deno_web/09_file.js";
import { AbortController } from "ext:deno_web/03_abort_signal.js";
const {
AbortError,
codes: {
ERR_INVALID_ARG_TYPE,
ERR_INVALID_RETURN_VALUE,
},
} = imported1;
"use strict";
const {
FunctionPrototypeCall,
PromiseWithResolvers,
} = primordials;
let _Duplexify;
export default function duplexify(body, name) {
// This is needed for pre node 17.
class Duplexify extends Duplex {
constructor(options) {
super(options);
// https://github.com/nodejs/node/pull/34385
if (options?.readable === false) {
this._readableState.readable = false;
this._readableState.ended = true;
this._readableState.endEmitted = true;
}
if (options?.writable === false) {
this._writableState.writable = false;
this._writableState.ending = true;
this._writableState.ended = true;
this._writableState.finished = true;
}
}
}
_Duplexify = Duplexify;
if (isDuplexNodeStream(body)) {
return body;
}
if (isReadableNodeStream(body)) {
return _duplexify({ readable: body });
}
if (isWritableNodeStream(body)) {
return _duplexify({ writable: body });
}
if (isNodeStream(body)) {
return _duplexify({ writable: false, readable: false });
}
if (isReadableStream(body)) {
return _duplexify({ readable: Readable.fromWeb(body) });
}
if (isWritableStream(body)) {
return _duplexify({ writable: Writable.fromWeb(body) });
}
if (typeof body === "function") {
const { value, write, final, destroy } = fromAsyncGen(body);
// Body might be a constructor function instead of an async generator function.
if (isDuplexNodeStream(value)) {
return value;
}
if (isIterable(value)) {
return from(Duplexify, value, {
// TODO (ronag): highWaterMark?
objectMode: true,
write,
final,
destroy,
});
}
const then = value?.then;
if (typeof then === "function") {
let d;
const promise = FunctionPrototypeCall(
then,
value,
(val) => {
if (val != null) {
throw new ERR_INVALID_RETURN_VALUE("nully", "body", val);
}
},
(err) => {
destroyer(d, err);
},
);
return d = new Duplexify({
// TODO (ronag): highWaterMark?
objectMode: true,
readable: false,
write,
final(cb) {
final(async () => {
try {
await promise;
process.nextTick(cb, null);
} catch (err) {
process.nextTick(cb, err);
}
});
},
destroy,
});
}
throw new ERR_INVALID_RETURN_VALUE(
"Iterable, AsyncIterable or AsyncFunction",
name,
value,
);
}
if (isBlob(body)) {
return duplexify(body.arrayBuffer());
}
if (isIterable(body)) {
return from(Duplexify, body, {
// TODO (ronag): highWaterMark?
objectMode: true,
writable: false,
});
}
if (
isReadableStream(body?.readable) &&
isWritableStream(body?.writable)
) {
return Duplexify.fromWeb(body);
}
if (
typeof body?.writable === "object" ||
typeof body?.readable === "object"
) {
const readable = body?.readable
? isReadableNodeStream(body?.readable)
? body?.readable
: duplexify(body.readable)
: undefined;
const writable = body?.writable
? isWritableNodeStream(body?.writable)
? body?.writable
: duplexify(body.writable)
: undefined;
return _duplexify({ readable, writable });
}
const then = body?.then;
if (typeof then === "function") {
let d;
FunctionPrototypeCall(
then,
body,
(val) => {
if (val != null) {
d.push(val);
}
d.push(null);
},
(err) => {
destroyer(d, err);
},
);
return d = new Duplexify({
objectMode: true,
writable: false,
read() {},
});
}
throw new ERR_INVALID_ARG_TYPE(
name,
[
"Blob",
"ReadableStream",
"WritableStream",
"Stream",
"Iterable",
"AsyncIterable",
"Function",
"{ readable, writable } pair",
"Promise",
],
body,
);
}
function fromAsyncGen(fn) {
let { promise, resolve } = PromiseWithResolvers();
const ac = new AbortController();
const signal = ac.signal;
const value = fn(
async function* () {
while (true) {
const _promise = promise;
promise = null;
const { chunk, done, cb } = await _promise;
process.nextTick(cb);
if (done) return;
if (signal.aborted) {
throw new AbortError(undefined, { cause: signal.reason });
}
({ promise, resolve } = PromiseWithResolvers());
yield chunk;
}
}(),
{ signal },
);
return {
value,
write(chunk, encoding, cb) {
const _resolve = resolve;
resolve = null;
_resolve({ chunk, done: false, cb });
},
final(cb) {
const _resolve = resolve;
resolve = null;
_resolve({ done: true, cb });
},
destroy(err, cb) {
ac.abort();
cb(err);
},
};
}
function _duplexify(pair) {
const r = pair.readable && typeof pair.readable.read !== "function"
? Readable.wrap(pair.readable)
: pair.readable;
const w = pair.writable;
let readable = !!isReadable(r);
let writable = !!isWritable(w);
let ondrain;
let onfinish;
let onreadable;
let onclose;
let d;
function onfinished(err) {
const cb = onclose;
onclose = null;
if (cb) {
cb(err);
} else if (err) {
d.destroy(err);
}
}
// TODO(ronag): Avoid double buffering.
// Implement Writable/Readable/Duplex traits.
// See, https://github.com/nodejs/node/pull/33515.
d = new _Duplexify({
// TODO (ronag): highWaterMark?
readableObjectMode: !!r?.readableObjectMode,
writableObjectMode: !!w?.writableObjectMode,
readable,
writable,
});
if (writable) {
eos(w, (err) => {
writable = false;
if (err) {
destroyer(r, err);
}
onfinished(err);
});
d._write = function (chunk, encoding, callback) {
if (w.write(chunk, encoding)) {
callback();
} else {
ondrain = callback;
}
};
d._final = function (callback) {
w.end();
onfinish = callback;
};
w.on("drain", function () {
if (ondrain) {
const cb = ondrain;
ondrain = null;
cb();
}
});
w.on("finish", function () {
if (onfinish) {
const cb = onfinish;
onfinish = null;
cb();
}
});
}
if (readable) {
eos(r, (err) => {
readable = false;
if (err) {
destroyer(r, err);
}
onfinished(err);
});
r.on("readable", function () {
if (onreadable) {
const cb = onreadable;
onreadable = null;
cb();
}
});
r.on("end", function () {
d.push(null);
});
d._read = function () {
while (true) {
const buf = r.read();
if (buf === null) {
onreadable = d._read;
return;
}
if (!d.push(buf)) {
return;
}
}
};
}
d._destroy = function (err, callback) {
if (!err && onclose !== null) {
err = new AbortError();
}
onreadable = null;
ondrain = null;
onfinish = null;
if (onclose === null) {
callback(err);
} else {
onclose = callback;
destroyer(w, err);
destroyer(r, err);
}
};
return d;
}

View file

@ -0,0 +1,67 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import { Duplex } from "node:stream";
import assert from "ext:deno_node/internal/assert.mjs";
"use strict";
const {
Symbol,
} = primordials;
const kCallback = Symbol("Callback");
const kInitOtherSide = Symbol("InitOtherSide");
class DuplexSide extends Duplex {
#otherSide = null;
constructor(options) {
super(options);
this[kCallback] = null;
this.#otherSide = null;
}
[kInitOtherSide](otherSide) {
// Ensure this can only be set once, to enforce encapsulation.
if (this.#otherSide === null) {
this.#otherSide = otherSide;
} else {
assert(this.#otherSide === null);
}
}
_read() {
const callback = this[kCallback];
if (callback) {
this[kCallback] = null;
callback();
}
}
_write(chunk, encoding, callback) {
assert(this.#otherSide !== null);
assert(this.#otherSide[kCallback] === null);
if (chunk.length === 0) {
process.nextTick(callback);
} else {
this.#otherSide.push(chunk);
this.#otherSide[kCallback] = callback;
}
}
_final(callback) {
this.#otherSide.on("end", callback);
this.#otherSide.push(null);
}
}
function duplexPair(options) {
const side0 = new DuplexSide(options);
const side1 = new DuplexSide(options);
side0[kInitOtherSide](side1);
side1[kInitOtherSide](side0);
return [side0, side1];
}
export default duplexPair;
export { duplexPair };

View file

@ -0,0 +1,341 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import { kEmptyObject, once } from "ext:deno_node/internal/util.mjs";
import {
validateAbortSignal,
validateBoolean,
validateFunction,
validateObject,
} from "ext:deno_node/internal/validators.mjs";
import {
isClosed,
isNodeStream,
isReadable,
isReadableErrored,
isReadableFinished,
isReadableNodeStream,
isReadableStream,
isWritable,
isWritableErrored,
isWritableFinished,
isWritableNodeStream,
isWritableStream,
kIsClosedPromise,
willEmitClose as _willEmitClose,
} from "ext:deno_node/internal/streams/utils.js";
import * as _mod2 from "ext:deno_node/internal/events/abort_listener.mjs";
const {
AbortError,
codes: {
ERR_INVALID_ARG_TYPE,
ERR_STREAM_PREMATURE_CLOSE,
},
} = imported1;
// Ported from https://github.com/mafintosh/end-of-stream with
// permission from the author, Mathias Buus (@mafintosh).
"use strict";
const {
Promise,
PromisePrototypeThen,
SymbolDispose,
} = primordials;
let addAbortListener;
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === "function";
}
const nop = () => {};
function eos(stream, options, callback) {
if (arguments.length === 2) {
callback = options;
options = kEmptyObject;
} else if (options == null) {
options = kEmptyObject;
} else {
validateObject(options, "options");
}
validateFunction(callback, "callback");
validateAbortSignal(options.signal, "options.signal");
callback = once(callback);
if (isReadableStream(stream) || isWritableStream(stream)) {
return eosWeb(stream, options, callback);
}
if (!isNodeStream(stream)) {
throw new ERR_INVALID_ARG_TYPE("stream", [
"ReadableStream",
"WritableStream",
"Stream",
], stream);
}
const readable = options.readable ?? isReadableNodeStream(stream);
const writable = options.writable ?? isWritableNodeStream(stream);
const wState = stream._writableState;
const rState = stream._readableState;
const onlegacyfinish = () => {
if (!stream.writable) {
onfinish();
}
};
// TODO (ronag): Improve soft detection to include core modules and
// common ecosystem modules that do properly emit 'close' but fail
// this generic check.
let willEmitClose = _willEmitClose(stream) &&
isReadableNodeStream(stream) === readable &&
isWritableNodeStream(stream) === writable;
let writableFinished = isWritableFinished(stream, false);
const onfinish = () => {
writableFinished = true;
// Stream should not be destroyed here. If it is that
// means that user space is doing something differently and
// we cannot trust willEmitClose.
if (stream.destroyed) {
willEmitClose = false;
}
if (willEmitClose && (!stream.readable || readable)) {
return;
}
if (!readable || readableFinished) {
callback.call(stream);
}
};
let readableFinished = isReadableFinished(stream, false);
const onend = () => {
readableFinished = true;
// Stream should not be destroyed here. If it is that
// means that user space is doing something differently and
// we cannot trust willEmitClose.
if (stream.destroyed) {
willEmitClose = false;
}
if (willEmitClose && (!stream.writable || writable)) {
return;
}
if (!writable || writableFinished) {
callback.call(stream);
}
};
const onerror = (err) => {
callback.call(stream, err);
};
let closed = isClosed(stream);
const onclose = () => {
closed = true;
const errored = isWritableErrored(stream) || isReadableErrored(stream);
if (errored && typeof errored !== "boolean") {
return callback.call(stream, errored);
}
if (readable && !readableFinished && isReadableNodeStream(stream, true)) {
if (!isReadableFinished(stream, false)) {
return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
}
}
if (writable && !writableFinished) {
if (!isWritableFinished(stream, false)) {
return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
}
}
callback.call(stream);
};
const onclosed = () => {
closed = true;
const errored = isWritableErrored(stream) || isReadableErrored(stream);
if (errored && typeof errored !== "boolean") {
return callback.call(stream, errored);
}
callback.call(stream);
};
const onrequest = () => {
stream.req.on("finish", onfinish);
};
if (isRequest(stream)) {
stream.on("complete", onfinish);
if (!willEmitClose) {
stream.on("abort", onclose);
}
if (stream.req) {
onrequest();
} else {
stream.on("request", onrequest);
}
} else if (writable && !wState) { // legacy streams
stream.on("end", onlegacyfinish);
stream.on("close", onlegacyfinish);
}
// Not all streams will emit 'close' after 'aborted'.
if (!willEmitClose && typeof stream.aborted === "boolean") {
stream.on("aborted", onclose);
}
stream.on("end", onend);
stream.on("finish", onfinish);
if (options.error !== false) {
stream.on("error", onerror);
}
stream.on("close", onclose);
if (closed) {
process.nextTick(onclose);
} else if (wState?.errorEmitted || rState?.errorEmitted) {
if (!willEmitClose) {
process.nextTick(onclosed);
}
} else if (
!readable &&
(!willEmitClose || isReadable(stream)) &&
(writableFinished || isWritable(stream) === false) &&
(wState == null || wState.pendingcb === undefined || wState.pendingcb === 0)
) {
process.nextTick(onclosed);
} else if (
!writable &&
(!willEmitClose || isWritable(stream)) &&
(readableFinished || isReadable(stream) === false)
) {
process.nextTick(onclosed);
} else if ((rState && stream.req && stream.aborted)) {
process.nextTick(onclosed);
}
const cleanup = () => {
callback = nop;
stream.removeListener("aborted", onclose);
stream.removeListener("complete", onfinish);
stream.removeListener("abort", onclose);
stream.removeListener("request", onrequest);
if (stream.req) stream.req.removeListener("finish", onfinish);
stream.removeListener("end", onlegacyfinish);
stream.removeListener("close", onlegacyfinish);
stream.removeListener("finish", onfinish);
stream.removeListener("end", onend);
stream.removeListener("error", onerror);
stream.removeListener("close", onclose);
};
if (options.signal && !closed) {
const abort = () => {
// Keep it because cleanup removes it.
const endCallback = callback;
cleanup();
endCallback.call(
stream,
new AbortError(undefined, { cause: options.signal.reason }),
);
};
if (options.signal.aborted) {
process.nextTick(abort);
} else {
addAbortListener ??= _mod2.addAbortListener;
const disposable = addAbortListener(options.signal, abort);
const originalCallback = callback;
callback = once((...args) => {
disposable[SymbolDispose]();
originalCallback.apply(stream, args);
});
}
}
return cleanup;
}
function eosWeb(stream, options, callback) {
let isAborted = false;
let abort = nop;
if (options.signal) {
abort = () => {
isAborted = true;
callback.call(
stream,
new AbortError(undefined, { cause: options.signal.reason }),
);
};
if (options.signal.aborted) {
process.nextTick(abort);
} else {
addAbortListener ??= _mod2.addAbortListener;
const disposable = addAbortListener(options.signal, abort);
const originalCallback = callback;
callback = once((...args) => {
disposable[SymbolDispose]();
originalCallback.apply(stream, args);
});
}
}
const resolverFn = (...args) => {
if (!isAborted) {
process.nextTick(() => callback.apply(stream, args));
}
};
PromisePrototypeThen(
stream[kIsClosedPromise].promise,
resolverFn,
resolverFn,
);
return nop;
}
function finished(stream, opts) {
let autoCleanup = false;
if (opts === null) {
opts = kEmptyObject;
}
if (opts?.cleanup) {
validateBoolean(opts.cleanup, "cleanup");
autoCleanup = opts.cleanup;
}
return new Promise((resolve, reject) => {
const cleanup = eos(stream, opts, (err) => {
if (autoCleanup) {
cleanup();
}
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
export { finished };
export default eos;
export { eos };

View file

@ -1,232 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import {
AbortError,
ERR_STREAM_PREMATURE_CLOSE,
} from "ext:deno_node/internal/errors.ts";
import { once } from "ext:deno_node/internal/util.mjs";
import {
validateAbortSignal,
validateFunction,
validateObject,
} from "ext:deno_node/internal/validators.mjs";
import * as process from "ext:deno_node/_process/process.ts";
function isRequest(stream) {
return stream.setHeader && typeof stream.abort === "function";
}
function isServerResponse(stream) {
return (
typeof stream._sent100 === "boolean" &&
typeof stream._removedConnection === "boolean" &&
typeof stream._removedContLen === "boolean" &&
typeof stream._removedTE === "boolean" &&
typeof stream._closed === "boolean"
);
}
function isReadable(stream) {
return typeof stream.readable === "boolean" ||
typeof stream.readableEnded === "boolean" ||
!!stream._readableState;
}
function isWritable(stream) {
return typeof stream.writable === "boolean" ||
typeof stream.writableEnded === "boolean" ||
!!stream._writableState;
}
function isWritableFinished(stream) {
if (stream.writableFinished) return true;
const wState = stream._writableState;
if (!wState || wState.errored) return false;
return wState.finished || (wState.ended && wState.length === 0);
}
const nop = () => {};
function isReadableEnded(stream) {
if (stream.readableEnded) return true;
const rState = stream._readableState;
if (!rState || rState.errored) return false;
return rState.endEmitted || (rState.ended && rState.length === 0);
}
function eos(stream, options, callback) {
if (arguments.length === 2) {
callback = options;
options = {};
} else if (options == null) {
options = {};
} else {
validateObject(options, "options");
}
validateFunction(callback, "callback");
validateAbortSignal(options.signal, "options.signal");
callback = once(callback);
const readable = options.readable ||
(options.readable !== false && isReadable(stream));
const writable = options.writable ||
(options.writable !== false && isWritable(stream));
const wState = stream._writableState;
const rState = stream._readableState;
const state = wState || rState;
const onlegacyfinish = () => {
if (!stream.writable) onfinish();
};
// TODO (ronag): Improve soft detection to include core modules and
// common ecosystem modules that do properly emit 'close' but fail
// this generic check.
let willEmitClose = isServerResponse(stream) || (
state &&
state.autoDestroy &&
state.emitClose &&
state.closed === false &&
isReadable(stream) === readable &&
isWritable(stream) === writable
);
let writableFinished = stream.writableFinished ||
(wState && wState.finished);
const onfinish = () => {
writableFinished = true;
// Stream should not be destroyed here. If it is that
// means that user space is doing something differently and
// we cannot trust willEmitClose.
if (stream.destroyed) willEmitClose = false;
if (willEmitClose && (!stream.readable || readable)) return;
if (!readable || readableEnded) callback.call(stream);
};
let readableEnded = stream.readableEnded ||
(rState && rState.endEmitted);
const onend = () => {
readableEnded = true;
// Stream should not be destroyed here. If it is that
// means that user space is doing something differently and
// we cannot trust willEmitClose.
if (stream.destroyed) willEmitClose = false;
if (willEmitClose && (!stream.writable || writable)) return;
if (!writable || writableFinished) callback.call(stream);
};
const onerror = (err) => {
callback.call(stream, err);
};
const onclose = () => {
if (readable && !readableEnded) {
if (!isReadableEnded(stream)) {
return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
}
}
if (writable && !writableFinished) {
if (!isWritableFinished(stream)) {
return callback.call(stream, new ERR_STREAM_PREMATURE_CLOSE());
}
}
callback.call(stream);
};
const onrequest = () => {
stream.req.on("finish", onfinish);
};
if (isRequest(stream)) {
stream.on("complete", onfinish);
if (!willEmitClose) {
stream.on("abort", onclose);
}
if (stream.req) onrequest();
else stream.on("request", onrequest);
} else if (writable && !wState) { // legacy streams
stream.on("end", onlegacyfinish);
stream.on("close", onlegacyfinish);
}
// Not all streams will emit 'close' after 'aborted'.
if (!willEmitClose && typeof stream.aborted === "boolean") {
stream.on("aborted", onclose);
}
stream.on("end", onend);
stream.on("finish", onfinish);
if (options.error !== false) stream.on("error", onerror);
stream.on("close", onclose);
// _closed is for OutgoingMessage which is not a proper Writable.
const closed = (!wState && !rState && stream._closed === true) || (
(wState && wState.closed) ||
(rState && rState.closed) ||
(wState && wState.errorEmitted) ||
(rState && rState.errorEmitted) ||
(rState && stream.req && stream.aborted) ||
(
(!wState || !willEmitClose || typeof wState.closed !== "boolean") &&
(!rState || !willEmitClose || typeof rState.closed !== "boolean") &&
(!writable || (wState && wState.finished)) &&
(!readable || (rState && rState.endEmitted))
)
);
if (closed) {
// TODO(ronag): Re-throw error if errorEmitted?
// TODO(ronag): Throw premature close as if finished was called?
// before being closed? i.e. if closed but not errored, ended or finished.
// TODO(ronag): Throw some kind of error? Does it make sense
// to call finished() on a "finished" stream?
// TODO(ronag): willEmitClose?
process.nextTick(() => {
callback();
});
}
const cleanup = () => {
callback = nop;
stream.removeListener("aborted", onclose);
stream.removeListener("complete", onfinish);
stream.removeListener("abort", onclose);
stream.removeListener("request", onrequest);
if (stream.req) stream.req.removeListener("finish", onfinish);
stream.removeListener("end", onlegacyfinish);
stream.removeListener("close", onlegacyfinish);
stream.removeListener("finish", onfinish);
stream.removeListener("end", onend);
stream.removeListener("error", onerror);
stream.removeListener("close", onclose);
};
if (options.signal && !closed) {
const abort = () => {
// Keep it because cleanup removes it.
const endCallback = callback;
cleanup();
endCallback.call(stream, new AbortError());
};
if (options.signal.aborted) {
process.nextTick(abort);
} else {
const originalCallback = callback;
callback = once((...args) => {
options.signal.removeEventListener("abort", abort);
originalCallback.apply(stream, args);
});
options.signal.addEventListener("abort", abort);
}
}
return cleanup;
}
export default eos;

View file

@ -0,0 +1,218 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import { Buffer } from "node:buffer";
import _mod1 from "ext:deno_node/internal/errors.ts";
const {
ERR_INVALID_ARG_TYPE,
ERR_STREAM_NULL_VALUES,
} = _mod1.codes;
"use strict";
const {
PromisePrototypeThen,
SymbolAsyncIterator,
SymbolIterator,
} = primordials;
function from(Readable, iterable, opts) {
let iterator;
if (typeof iterable === "string" || iterable instanceof Buffer) {
return new Readable({
objectMode: true,
...opts,
read() {
this.push(iterable);
this.push(null);
},
});
}
let isAsync;
if (iterable?.[SymbolAsyncIterator]) {
isAsync = true;
iterator = iterable[SymbolAsyncIterator]();
} else if (iterable?.[SymbolIterator]) {
isAsync = false;
iterator = iterable[SymbolIterator]();
} else {
throw new ERR_INVALID_ARG_TYPE("iterable", ["Iterable"], iterable);
}
const readable = new Readable({
objectMode: true,
highWaterMark: 1,
// TODO(ronag): What options should be allowed?
...opts,
});
// Flag to protect against _read
// being called before last iteration completion.
let reading = false;
let isAsyncValues = false;
readable._read = function () {
if (!reading) {
reading = true;
if (isAsync) {
nextAsync();
} else if (isAsyncValues) {
nextSyncWithAsyncValues();
} else {
nextSyncWithSyncValues();
}
}
};
readable._destroy = function (error, cb) {
PromisePrototypeThen(
close(error),
() => process.nextTick(cb, error), // nextTick is here in case cb throws
(e) => process.nextTick(cb, e || error),
);
};
async function close(error) {
const hadError = (error !== undefined) && (error !== null);
const hasThrow = typeof iterator.throw === "function";
if (hadError && hasThrow) {
const { value, done } = await iterator.throw(error);
await value;
if (done) {
return;
}
}
if (typeof iterator.return === "function") {
const { value } = await iterator.return();
await value;
}
}
// There are a lot of duplication here, it's done on purpose for performance
// reasons - avoid await when not needed.
function nextSyncWithSyncValues() {
for (;;) {
try {
const { value, done } = iterator.next();
if (done) {
readable.push(null);
return;
}
if (
value &&
typeof value.then === "function"
) {
return changeToAsyncValues(value);
}
if (value === null) {
reading = false;
throw new ERR_STREAM_NULL_VALUES();
}
if (readable.push(value)) {
continue;
}
reading = false;
} catch (err) {
readable.destroy(err);
}
break;
}
}
async function changeToAsyncValues(value) {
isAsyncValues = true;
try {
const res = await value;
if (res === null) {
reading = false;
throw new ERR_STREAM_NULL_VALUES();
}
if (readable.push(res)) {
nextSyncWithAsyncValues();
return;
}
reading = false;
} catch (err) {
readable.destroy(err);
}
}
async function nextSyncWithAsyncValues() {
for (;;) {
try {
const { value, done } = iterator.next();
if (done) {
readable.push(null);
return;
}
const res = (value &&
typeof value.then === "function")
? await value
: value;
if (res === null) {
reading = false;
throw new ERR_STREAM_NULL_VALUES();
}
if (readable.push(res)) {
continue;
}
reading = false;
} catch (err) {
readable.destroy(err);
}
break;
}
}
async function nextAsync() {
for (;;) {
try {
const { value, done } = await iterator.next();
if (done) {
readable.push(null);
return;
}
if (value === null) {
reading = false;
throw new ERR_STREAM_NULL_VALUES();
}
if (readable.push(value)) {
continue;
}
reading = false;
} catch (err) {
readable.destroy(err);
}
break;
}
}
return readable;
}
const _defaultExport = from;
export default _defaultExport;
export { from };

View file

@ -1,32 +1,37 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { getDefaultEncoding } from "ext:deno_node/internal/crypto/util.ts";
import { primordials } from "ext:core/mod.js";
import stream from "node:stream";
// LazyTransform is a special type of Transform stream that is lazily loaded.
// This is used for performance with bi-API-ship: when two APIs are available
// for the stream, one conventional and one non-conventional.
"use strict";
const {
ObjectDefineProperties,
ObjectDefineProperty,
ObjectSetPrototypeOf,
} = primordials;
function LazyTransform(options) {
this._options = options;
}
Object.setPrototypeOf(LazyTransform.prototype, stream.Transform.prototype);
Object.setPrototypeOf(LazyTransform, stream.Transform);
ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype);
ObjectSetPrototypeOf(LazyTransform, stream.Transform);
function makeGetter(name) {
return function () {
stream.Transform.call(this, this._options);
this._writableState.decodeStrings = false;
if (!this._options || !this._options.defaultEncoding) {
this._writableState.defaultEncoding = getDefaultEncoding();
}
return this[name];
};
}
function makeSetter(name) {
return function (val) {
Object.defineProperty(this, name, {
ObjectDefineProperty(this, name, {
__proto__: null,
value: val,
enumerable: true,
configurable: true,
@ -35,19 +40,21 @@ function makeSetter(name) {
};
}
Object.defineProperties(LazyTransform.prototype, {
ObjectDefineProperties(LazyTransform.prototype, {
_readableState: {
__proto__: null,
get: makeGetter("_readableState"),
set: makeSetter("_readableState"),
configurable: true,
enumerable: true,
},
_writableState: {
__proto__: null,
get: makeGetter("_writableState"),
set: makeSetter("_writableState"),
configurable: true,
enumerable: true,
},
});
export default LazyTransform;
export { LazyTransform };

View file

@ -0,0 +1,134 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import EE from "node:events";
"use strict";
const {
ArrayIsArray,
ObjectSetPrototypeOf,
ReflectOwnKeys,
} = primordials;
function Stream(opts) {
EE.call(this, opts);
}
ObjectSetPrototypeOf(Stream.prototype, EE.prototype);
ObjectSetPrototypeOf(Stream, EE);
Stream.prototype.pipe = function (dest, options) {
const source = this;
function ondata(chunk) {
if (dest.writable && dest.write(chunk) === false && source.pause) {
source.pause();
}
}
source.on("data", ondata);
function ondrain() {
if (source.readable && source.resume) {
source.resume();
}
}
dest.on("drain", ondrain);
// If the 'end' option is not supplied, dest.end() will be called when
// source gets the 'end' or 'close' events. Only dest.end() once.
if (!dest._isStdio && (!options || options.end !== false)) {
source.on("end", onend);
source.on("close", onclose);
}
let didOnEnd = false;
function onend() {
if (didOnEnd) return;
didOnEnd = true;
dest.end();
}
function onclose() {
if (didOnEnd) return;
didOnEnd = true;
if (typeof dest.destroy === "function") dest.destroy();
}
// Don't leave dangling pipes when there are errors.
function onerror(er) {
cleanup();
if (EE.listenerCount(this, "error") === 0) {
this.emit("error", er);
}
}
prependListener(source, "error", onerror);
prependListener(dest, "error", onerror);
// Remove all the event listeners that were added.
function cleanup() {
source.removeListener("data", ondata);
dest.removeListener("drain", ondrain);
source.removeListener("end", onend);
source.removeListener("close", onclose);
source.removeListener("error", onerror);
dest.removeListener("error", onerror);
source.removeListener("end", cleanup);
source.removeListener("close", cleanup);
dest.removeListener("close", cleanup);
}
source.on("end", cleanup);
source.on("close", cleanup);
dest.on("close", cleanup);
dest.emit("pipe", source);
// Allow for unix-like usage: A.pipe(B).pipe(C)
return dest;
};
Stream.prototype.eventNames = function eventNames() {
const names = [];
for (const key of ReflectOwnKeys(this._events)) {
if (
typeof this._events[key] === "function" ||
(ArrayIsArray(this._events[key]) && this._events[key].length > 0)
) {
names.push(key);
}
}
return names;
};
function prependListener(emitter, event, fn) {
// Sadly this is not cacheable as some libraries bundle their own
// event emitter implementation with them.
if (typeof emitter.prependListener === "function") {
return emitter.prependListener(event, fn);
}
// This is a hack to make sure that our error handler is attached before any
// userland ones. NEVER DO THIS. This is here only because this code needs
// to continue to work with older versions of Node.js that do not include
// the prependListener() method. The goal is to eventually remove this hack.
if (!emitter._events || !emitter._events[event]) {
emitter.on(event, fn);
} else if (ArrayIsArray(emitter._events[event])) {
emitter._events[event].unshift(fn);
} else {
emitter._events[event] = [fn, emitter._events[event]];
}
}
const _defaultExport1 = { Stream, prependListener };
export default _defaultExport1;
export { prependListener, Stream };

View file

@ -0,0 +1,473 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import { AbortController, AbortSignal } from "ext:deno_web/03_abort_signal.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import {
validateAbortSignal,
validateInteger,
validateObject,
} from "ext:deno_node/internal/validators.mjs";
import {
kResistStopPropagation,
kWeakHandler,
} from "ext:deno_node/internal/event_target.mjs";
import { finished } from "ext:deno_node/internal/streams/end-of-stream.js";
import staticCompose from "ext:deno_node/internal/streams/compose.js";
import { addAbortSignalNoValidate } from "ext:deno_node/internal/streams/add-abort-signal.js";
import {
isNodeStream,
isWritable,
} from "ext:deno_node/internal/streams/utils.js";
const {
AbortError,
codes: {
ERR_INVALID_ARG_TYPE,
ERR_INVALID_ARG_VALUE,
ERR_MISSING_ARGS,
ERR_OUT_OF_RANGE,
},
} = imported1;
"use strict";
const {
ArrayPrototypePush,
Boolean,
MathFloor,
Number,
NumberIsNaN,
Promise,
PromisePrototypeThen,
PromiseReject,
PromiseResolve,
Symbol,
} = primordials;
const kEmpty = Symbol("kEmpty");
const kEof = Symbol("kEof");
function compose(stream, options) {
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
if (isNodeStream(stream) && !isWritable(stream)) {
throw new ERR_INVALID_ARG_VALUE("stream", stream, "must be writable");
}
const composedStream = staticCompose(this, stream);
if (options?.signal) {
// Not validating as we already validated before
addAbortSignalNoValidate(
options.signal,
composedStream,
);
}
return composedStream;
}
function map(fn, options) {
if (typeof fn !== "function") {
throw new ERR_INVALID_ARG_TYPE(
"fn",
["Function", "AsyncFunction"],
fn,
);
}
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
let concurrency = 1;
if (options?.concurrency != null) {
concurrency = MathFloor(options.concurrency);
}
let highWaterMark = concurrency - 1;
if (options?.highWaterMark != null) {
highWaterMark = MathFloor(options.highWaterMark);
}
validateInteger(concurrency, "options.concurrency", 1);
validateInteger(highWaterMark, "options.highWaterMark", 0);
highWaterMark += concurrency;
return async function* map() {
const signal = AbortSignal.any([options?.signal].filter(Boolean));
const stream = this;
const queue = [];
const signalOpt = { signal };
let next;
let resume;
let done = false;
let cnt = 0;
function onCatch() {
done = true;
afterItemProcessed();
}
function afterItemProcessed() {
cnt -= 1;
maybeResume();
}
function maybeResume() {
if (
resume &&
!done &&
cnt < concurrency &&
queue.length < highWaterMark
) {
resume();
resume = null;
}
}
async function pump() {
try {
for await (let val of stream) {
if (done) {
return;
}
if (signal.aborted) {
throw new AbortError();
}
try {
val = fn(val, signalOpt);
if (val === kEmpty) {
continue;
}
val = PromiseResolve(val);
} catch (err) {
val = PromiseReject(err);
}
cnt += 1;
PromisePrototypeThen(val, afterItemProcessed, onCatch);
queue.push(val);
if (next) {
next();
next = null;
}
if (!done && (queue.length >= highWaterMark || cnt >= concurrency)) {
await new Promise((resolve) => {
resume = resolve;
});
}
}
queue.push(kEof);
} catch (err) {
const val = PromiseReject(err);
PromisePrototypeThen(val, afterItemProcessed, onCatch);
queue.push(val);
} finally {
done = true;
if (next) {
next();
next = null;
}
}
}
pump();
try {
while (true) {
while (queue.length > 0) {
const val = await queue[0];
if (val === kEof) {
return;
}
if (signal.aborted) {
throw new AbortError();
}
if (val !== kEmpty) {
yield val;
}
queue.shift();
maybeResume();
}
await new Promise((resolve) => {
next = resolve;
});
}
} finally {
done = true;
if (resume) {
resume();
resume = null;
}
}
}.call(this);
}
async function some(fn, options = undefined) {
for await (const unused of filter.call(this, fn, options)) {
return true;
}
return false;
}
async function every(fn, options = undefined) {
if (typeof fn !== "function") {
throw new ERR_INVALID_ARG_TYPE(
"fn",
["Function", "AsyncFunction"],
fn,
);
}
// https://en.wikipedia.org/wiki/De_Morgan%27s_laws
return !(await some.call(this, async (...args) => {
return !(await fn(...args));
}, options));
}
async function find(fn, options) {
for await (const result of filter.call(this, fn, options)) {
return result;
}
return undefined;
}
async function forEach(fn, options) {
if (typeof fn !== "function") {
throw new ERR_INVALID_ARG_TYPE(
"fn",
["Function", "AsyncFunction"],
fn,
);
}
async function forEachFn(value, options) {
await fn(value, options);
return kEmpty;
}
// eslint-disable-next-line no-unused-vars
for await (const unused of map.call(this, forEachFn, options));
}
function filter(fn, options) {
if (typeof fn !== "function") {
throw new ERR_INVALID_ARG_TYPE(
"fn",
["Function", "AsyncFunction"],
fn,
);
}
async function filterFn(value, options) {
if (await fn(value, options)) {
return value;
}
return kEmpty;
}
return map.call(this, filterFn, options);
}
// Specific to provide better error to reduce since the argument is only
// missing if the stream has no items in it - but the code is still appropriate
class ReduceAwareErrMissingArgs extends ERR_MISSING_ARGS {
constructor() {
super("reduce");
this.message = "Reduce of an empty stream requires an initial value";
}
}
async function reduce(reducer, initialValue, options) {
if (typeof reducer !== "function") {
throw new ERR_INVALID_ARG_TYPE(
"reducer",
["Function", "AsyncFunction"],
reducer,
);
}
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
let hasInitialValue = arguments.length > 1;
if (options?.signal?.aborted) {
const err = new AbortError(undefined, { cause: options.signal.reason });
this.once("error", () => {}); // The error is already propagated
await finished(this.destroy(err));
throw err;
}
const ac = new AbortController();
const signal = ac.signal;
if (options?.signal) {
const opts = {
once: true,
[kWeakHandler]: this,
[kResistStopPropagation]: true,
};
options.signal.addEventListener("abort", () => ac.abort(), opts);
}
let gotAnyItemFromStream = false;
try {
for await (const value of this) {
gotAnyItemFromStream = true;
if (options?.signal?.aborted) {
throw new AbortError();
}
if (!hasInitialValue) {
initialValue = value;
hasInitialValue = true;
} else {
initialValue = await reducer(initialValue, value, { signal });
}
}
if (!gotAnyItemFromStream && !hasInitialValue) {
throw new ReduceAwareErrMissingArgs();
}
} finally {
ac.abort();
}
return initialValue;
}
async function toArray(options) {
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
const result = [];
for await (const val of this) {
if (options?.signal?.aborted) {
throw new AbortError(undefined, { cause: options.signal.reason });
}
ArrayPrototypePush(result, val);
}
return result;
}
function flatMap(fn, options) {
const values = map.call(this, fn, options);
return async function* flatMap() {
for await (const val of values) {
yield* val;
}
}.call(this);
}
function toIntegerOrInfinity(number) {
// We coerce here to align with the spec
// https://github.com/tc39/proposal-iterator-helpers/issues/169
number = Number(number);
if (NumberIsNaN(number)) {
return 0;
}
if (number < 0) {
throw new ERR_OUT_OF_RANGE("number", ">= 0", number);
}
return number;
}
function drop(number, options = undefined) {
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
number = toIntegerOrInfinity(number);
return async function* drop() {
if (options?.signal?.aborted) {
throw new AbortError();
}
for await (const val of this) {
if (options?.signal?.aborted) {
throw new AbortError();
}
if (number-- <= 0) {
yield val;
}
}
}.call(this);
}
function take(number, options = undefined) {
if (options != null) {
validateObject(options, "options");
}
if (options?.signal != null) {
validateAbortSignal(options.signal, "options.signal");
}
number = toIntegerOrInfinity(number);
return async function* take() {
if (options?.signal?.aborted) {
throw new AbortError();
}
for await (const val of this) {
if (options?.signal?.aborted) {
throw new AbortError();
}
if (number-- > 0) {
yield val;
}
// Don't get another item from iterator in case we reached the end
if (number <= 0) {
return;
}
}
}.call(this);
}
const streamReturningOperators = {
drop,
filter,
flatMap,
map,
take,
compose,
};
export { streamReturningOperators };
const promiseReturningOperators = {
every,
forEach,
reduce,
toArray,
some,
find,
};
export { promiseReturningOperators };
export default {
streamReturningOperators,
promiseReturningOperators,
};

View file

@ -0,0 +1,52 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import Transform from "ext:deno_node/internal/streams/transform.js";
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
"use strict";
const {
ObjectSetPrototypeOf,
} = primordials;
ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype);
ObjectSetPrototypeOf(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) {
return new PassThrough(options);
}
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
export default PassThrough;
export { PassThrough };

View file

@ -1,7 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { PassThrough } from "ext:deno_node/_stream.mjs";
export default PassThrough;

View file

@ -0,0 +1,522 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import eos from "ext:deno_node/internal/streams/end-of-stream.js";
import { once } from "ext:deno_node/internal/util.mjs";
import destroyImpl from "ext:deno_node/internal/streams/destroy.js";
import Duplex from "ext:deno_node/internal/streams/duplex.js";
import imported1 from "ext:deno_node/internal/errors.ts";
import {
validateAbortSignal,
validateFunction,
} from "ext:deno_node/internal/validators.mjs";
import {
isIterable,
isNodeStream,
isReadable,
isReadableFinished,
isReadableNodeStream,
isReadableStream,
isTransformStream,
isWebStream,
} from "ext:deno_node/internal/streams/utils.js";
import { AbortController } from "ext:deno_web/03_abort_signal.js";
import _mod3 from "ext:deno_node/internal/streams/readable.js";
import * as _mod4 from "ext:deno_node/internal/events/abort_listener.mjs";
import _mod5 from "ext:deno_node/internal/streams/passthrough.js";
const {
AbortError,
aggregateTwoErrors,
codes: {
ERR_INVALID_ARG_TYPE,
ERR_INVALID_RETURN_VALUE,
ERR_MISSING_ARGS,
ERR_STREAM_DESTROYED,
ERR_STREAM_PREMATURE_CLOSE,
ERR_STREAM_UNABLE_TO_PIPE,
},
} = imported1;
// Ported from https://github.com/mafintosh/pump with
// permission from the author, Mathias Buus (@mafintosh).
"use strict";
const {
ArrayIsArray,
Promise,
SymbolAsyncIterator,
SymbolDispose,
} = primordials;
let PassThrough;
let Readable;
let addAbortListener;
function destroyer(stream, reading, writing) {
let finished = false;
stream.on("close", () => {
finished = true;
});
const cleanup = eos(
stream,
{ readable: reading, writable: writing },
(err) => {
finished = !err;
},
);
return {
destroy: (err) => {
if (finished) return;
finished = true;
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED("pipe"));
},
cleanup,
};
}
function popCallback(streams) {
// Streams should never be an empty array. It should always contain at least
// a single stream. Therefore optimize for the average case instead of
// checking for length === 0 as well.
validateFunction(streams[streams.length - 1], "streams[stream.length - 1]");
return streams.pop();
}
function makeAsyncIterable(val) {
if (isIterable(val)) {
return val;
} else if (isReadableNodeStream(val)) {
// Legacy streams are not Iterable.
return fromReadable(val);
}
throw new ERR_INVALID_ARG_TYPE(
"val",
["Readable", "Iterable", "AsyncIterable"],
val,
);
}
async function* fromReadable(val) {
Readable ??= _mod3;
yield* Readable.prototype[SymbolAsyncIterator].call(val);
}
async function pumpToNode(iterable, writable, finish, { end }) {
let error;
let onresolve = null;
const resume = (err) => {
if (err) {
error = err;
}
if (onresolve) {
const callback = onresolve;
onresolve = null;
callback();
}
};
const wait = () =>
new Promise((resolve, reject) => {
if (error) {
reject(error);
} else {
onresolve = () => {
if (error) {
reject(error);
} else {
resolve();
}
};
}
});
writable.on("drain", resume);
const cleanup = eos(writable, { readable: false }, resume);
try {
if (writable.writableNeedDrain) {
await wait();
}
for await (const chunk of iterable) {
if (!writable.write(chunk)) {
await wait();
}
}
if (end) {
writable.end();
await wait();
}
finish();
} catch (err) {
finish(error !== err ? aggregateTwoErrors(error, err) : err);
} finally {
cleanup();
writable.off("drain", resume);
}
}
async function pumpToWeb(readable, writable, finish, { end }) {
if (isTransformStream(writable)) {
writable = writable.writable;
}
// https://streams.spec.whatwg.org/#example-manual-write-with-backpressure
const writer = writable.getWriter();
try {
for await (const chunk of readable) {
await writer.ready;
writer.write(chunk).catch(() => {});
}
await writer.ready;
if (end) {
await writer.close();
}
finish();
} catch (err) {
try {
await writer.abort(err);
finish(err);
} catch (err) {
finish(err);
}
}
}
function pipeline(...streams) {
return pipelineImpl(streams, once(popCallback(streams)));
}
function pipelineImpl(streams, callback, opts) {
if (streams.length === 1 && ArrayIsArray(streams[0])) {
streams = streams[0];
}
if (streams.length < 2) {
throw new ERR_MISSING_ARGS("streams");
}
const ac = new AbortController();
const signal = ac.signal;
const outerSignal = opts?.signal;
// Need to cleanup event listeners if last stream is readable
// https://github.com/nodejs/node/issues/35452
const lastStreamCleanup = [];
validateAbortSignal(outerSignal, "options.signal");
function abort() {
finishImpl(new AbortError(undefined, { cause: outerSignal?.reason }));
}
addAbortListener ??= _mod4.addAbortListener;
let disposable;
if (outerSignal) {
disposable = addAbortListener(outerSignal, abort);
}
let error;
let value;
const destroys = [];
let finishCount = 0;
function finish(err) {
finishImpl(err, --finishCount === 0);
}
function finishOnlyHandleError(err) {
finishImpl(err, false);
}
function finishImpl(err, final) {
if (err && (!error || error.code === "ERR_STREAM_PREMATURE_CLOSE")) {
error = err;
}
if (!error && !final) {
return;
}
while (destroys.length) {
destroys.shift()(error);
}
disposable?.[SymbolDispose]();
ac.abort();
if (final) {
if (!error) {
lastStreamCleanup.forEach((fn) => fn());
}
process.nextTick(callback, error, value);
}
}
let ret;
for (let i = 0; i < streams.length; i++) {
const stream = streams[i];
const reading = i < streams.length - 1;
const writing = i > 0;
const next = i + 1 < streams.length ? streams[i + 1] : null;
const end = reading || opts?.end !== false;
const isLastStream = i === streams.length - 1;
if (isNodeStream(stream)) {
if (next !== null && (next?.closed || next?.destroyed)) {
throw new ERR_STREAM_UNABLE_TO_PIPE();
}
if (end) {
const { destroy, cleanup } = destroyer(stream, reading, writing);
destroys.push(destroy);
if (isReadable(stream) && isLastStream) {
lastStreamCleanup.push(cleanup);
}
}
// Catch stream errors that occur after pipe/pump has completed.
function onError(err) {
if (
err &&
err.name !== "AbortError" &&
err.code !== "ERR_STREAM_PREMATURE_CLOSE"
) {
finishOnlyHandleError(err);
}
}
stream.on("error", onError);
if (isReadable(stream) && isLastStream) {
lastStreamCleanup.push(() => {
stream.removeListener("error", onError);
});
}
}
if (i === 0) {
if (typeof stream === "function") {
ret = stream({ signal });
if (!isIterable(ret)) {
throw new ERR_INVALID_RETURN_VALUE(
"Iterable, AsyncIterable or Stream",
"source",
ret,
);
}
} else if (
isIterable(stream) || isReadableNodeStream(stream) ||
isTransformStream(stream)
) {
ret = stream;
} else {
ret = Duplex.from(stream);
}
} else if (typeof stream === "function") {
if (isTransformStream(ret)) {
ret = makeAsyncIterable(ret?.readable);
} else {
ret = makeAsyncIterable(ret);
}
ret = stream(ret, { signal });
if (reading) {
if (!isIterable(ret, true)) {
throw new ERR_INVALID_RETURN_VALUE(
"AsyncIterable",
`transform[${i - 1}]`,
ret,
);
}
} else {
PassThrough ??= _mod5;
// If the last argument to pipeline is not a stream
// we must create a proxy stream so that pipeline(...)
// always returns a stream which can be further
// composed through `.pipe(stream)`.
const pt = new PassThrough({
objectMode: true,
});
// Handle Promises/A+ spec, `then` could be a getter that throws on
// second use.
const then = ret?.then;
if (typeof then === "function") {
finishCount++;
then.call(ret, (val) => {
value = val;
if (val != null) {
pt.write(val);
}
if (end) {
pt.end();
}
process.nextTick(finish);
}, (err) => {
pt.destroy(err);
process.nextTick(finish, err);
});
} else if (isIterable(ret, true)) {
finishCount++;
pumpToNode(ret, pt, finish, { end });
} else if (isReadableStream(ret) || isTransformStream(ret)) {
const toRead = ret.readable || ret;
finishCount++;
pumpToNode(toRead, pt, finish, { end });
} else {
throw new ERR_INVALID_RETURN_VALUE(
"AsyncIterable or Promise",
"destination",
ret,
);
}
ret = pt;
const { destroy, cleanup } = destroyer(ret, false, true);
destroys.push(destroy);
if (isLastStream) {
lastStreamCleanup.push(cleanup);
}
}
} else if (isNodeStream(stream)) {
if (isReadableNodeStream(ret)) {
finishCount += 2;
const cleanup = pipe(ret, stream, finish, finishOnlyHandleError, {
end,
});
if (isReadable(stream) && isLastStream) {
lastStreamCleanup.push(cleanup);
}
} else if (isTransformStream(ret) || isReadableStream(ret)) {
const toRead = ret.readable || ret;
finishCount++;
pumpToNode(toRead, stream, finish, { end });
} else if (isIterable(ret)) {
finishCount++;
pumpToNode(ret, stream, finish, { end });
} else {
throw new ERR_INVALID_ARG_TYPE(
"val",
[
"Readable",
"Iterable",
"AsyncIterable",
"ReadableStream",
"TransformStream",
],
ret,
);
}
ret = stream;
} else if (isWebStream(stream)) {
if (isReadableNodeStream(ret)) {
finishCount++;
pumpToWeb(makeAsyncIterable(ret), stream, finish, { end });
} else if (isReadableStream(ret) || isIterable(ret)) {
finishCount++;
pumpToWeb(ret, stream, finish, { end });
} else if (isTransformStream(ret)) {
finishCount++;
pumpToWeb(ret.readable, stream, finish, { end });
} else {
throw new ERR_INVALID_ARG_TYPE(
"val",
[
"Readable",
"Iterable",
"AsyncIterable",
"ReadableStream",
"TransformStream",
],
ret,
);
}
ret = stream;
} else {
ret = Duplex.from(stream);
}
}
if (signal?.aborted || outerSignal?.aborted) {
process.nextTick(abort);
}
return ret;
}
function pipe(src, dst, finish, finishOnlyHandleError, { end }) {
let ended = false;
dst.on("close", () => {
if (!ended) {
// Finish if the destination closes before the source has completed.
finishOnlyHandleError(new ERR_STREAM_PREMATURE_CLOSE());
}
});
src.pipe(dst, { end: false }); // If end is true we already will have a listener to end dst.
if (end) {
// Compat. Before node v10.12.0 stdio used to throw an error so
// pipe() did/does not end() stdio destinations.
// Now they allow it but "secretly" don't close the underlying fd.
function endFn() {
ended = true;
dst.end();
}
if (isReadableFinished(src)) { // End the destination if the source has already ended.
process.nextTick(endFn);
} else {
src.once("end", endFn);
}
} else {
finish();
}
eos(src, { readable: true, writable: false }, (err) => {
const rState = src._readableState;
if (
err &&
err.code === "ERR_STREAM_PREMATURE_CLOSE" &&
(rState?.ended && !rState.errored && !rState.errorEmitted)
) {
// Some readable streams will emit 'close' before 'end'. However, since
// this is on the readable side 'end' should still be emitted if the
// stream has been ended and no error emitted. This should be allowed in
// favor of backwards compatibility. Since the stream is piped to a
// destination this should not result in any observable difference.
// We don't need to check if this is a writable premature close since
// eos will only fail with premature close on the reading side for
// duplex streams.
src
.once("end", finish)
.once("error", finish);
} else {
finish(err);
}
});
return eos(dst, { readable: false, writable: true }, finish);
}
const _defaultExport2 = { pipelineImpl, pipeline };
export default _defaultExport2;
export { pipeline, pipelineImpl };

File diff suppressed because it is too large Load diff

View file

@ -1,9 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { Readable } from "ext:deno_node/_stream.mjs";
const { ReadableState, _fromList, from, fromWeb, toWeb, wrap } = Readable;
export default Readable;
export { _fromList, from, fromWeb, ReadableState, toWeb, wrap };

View file

@ -0,0 +1,66 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import { validateInteger } from "ext:deno_node/internal/validators.mjs";
import _mod1 from "ext:deno_node/internal/errors.ts";
const { ERR_INVALID_ARG_VALUE } = _mod1.codes;
"use strict";
const {
MathFloor,
NumberIsInteger,
} = primordials;
// TODO (fix): For some reason Windows CI fails with bigger hwm.
let defaultHighWaterMarkBytes = process.platform === "win32"
? 16 * 1024
: 64 * 1024;
let defaultHighWaterMarkObjectMode = 16;
function highWaterMarkFrom(options, isDuplex, duplexKey) {
return options.highWaterMark != null
? options.highWaterMark
: isDuplex
? options[duplexKey]
: null;
}
function getDefaultHighWaterMark(objectMode) {
return objectMode
? defaultHighWaterMarkObjectMode
: defaultHighWaterMarkBytes;
}
function setDefaultHighWaterMark(objectMode, value) {
validateInteger(value, "value", 0);
if (objectMode) {
defaultHighWaterMarkObjectMode = value;
} else {
defaultHighWaterMarkBytes = value;
}
}
function getHighWaterMark(state, options, duplexKey, isDuplex) {
const hwm = highWaterMarkFrom(options, isDuplex, duplexKey);
if (hwm != null) {
if (!NumberIsInteger(hwm) || hwm < 0) {
const name = isDuplex ? `options.${duplexKey}` : "options.highWaterMark";
throw new ERR_INVALID_ARG_VALUE(name, hwm);
}
return MathFloor(hwm);
}
// Default value
return getDefaultHighWaterMark(state.objectMode);
}
const _defaultExport2 = {
getHighWaterMark,
getDefaultHighWaterMark,
setDefaultHighWaterMark,
};
export default _defaultExport2;
export { getDefaultHighWaterMark, getHighWaterMark, setDefaultHighWaterMark };

View file

@ -1,25 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
// TODO(bartlomieju): this should be 64?
let defaultHighWaterMarkBytes = 16 * 1024;
let defaultHighWaterMarkObjectMode = 16;
function getDefaultHighWaterMark(objectMode) {
return objectMode
? defaultHighWaterMarkObjectMode
: defaultHighWaterMarkBytes;
}
function setDefaultHighWaterMark(objectMode, value) {
validateInteger(value, "value", 0);
if (objectMode) {
defaultHighWaterMarkObjectMode = value;
} else {
defaultHighWaterMarkBytes = value;
}
}
export default { getDefaultHighWaterMark, setDefaultHighWaterMark };
export { getDefaultHighWaterMark, setDefaultHighWaterMark };

View file

@ -0,0 +1,218 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import process from "node:process";
import { primordials } from "ext:core/mod.js";
import _mod1 from "ext:deno_node/internal/errors.ts";
import Duplex from "ext:deno_node/internal/streams/duplex.js";
import { getHighWaterMark } from "ext:deno_node/internal/streams/state.js";
const {
ERR_METHOD_NOT_IMPLEMENTED,
} = _mod1.codes;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
"use strict";
const {
ObjectSetPrototypeOf,
Symbol,
} = primordials;
ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype);
ObjectSetPrototypeOf(Transform, Duplex);
const kCallback = Symbol("kCallback");
function Transform(options) {
if (!(this instanceof Transform)) {
return new Transform(options);
}
// TODO (ronag): This should preferably always be
// applied but would be semver-major. Or even better;
// make Transform a Readable with the Writable interface.
const readableHighWaterMark = options
? getHighWaterMark(this, options, "readableHighWaterMark", true)
: null;
if (readableHighWaterMark === 0) {
// A Duplex will buffer both on the writable and readable side while
// a Transform just wants to buffer hwm number of elements. To avoid
// buffering twice we disable buffering on the writable side.
options = {
...options,
highWaterMark: null,
readableHighWaterMark,
writableHighWaterMark: options.writableHighWaterMark || 0,
};
}
Duplex.call(this, options);
// We have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this[kCallback] = null;
if (options) {
if (typeof options.transform === "function") {
this._transform = options.transform;
}
if (typeof options.flush === "function") {
this._flush = options.flush;
}
}
// When the writable side finishes, then flush out anything remaining.
// Backwards compat. Some Transform streams incorrectly implement _final
// instead of or in addition to _flush. By using 'prefinish' instead of
// implementing _final we continue supporting this unfortunate use case.
this.on("prefinish", prefinish);
}
function final(cb) {
if (typeof this._flush === "function" && !this.destroyed) {
this._flush((er, data) => {
if (er) {
if (cb) {
cb(er);
} else {
this.destroy(er);
}
return;
}
if (data != null) {
this.push(data);
}
this.push(null);
if (cb) {
cb();
}
});
} else {
this.push(null);
if (cb) {
cb();
}
}
}
function prefinish() {
if (this._final !== final) {
final.call(this);
}
}
Transform.prototype._final = final;
Transform.prototype._transform = function (chunk, encoding, callback) {
throw new ERR_METHOD_NOT_IMPLEMENTED("_transform()");
};
Transform.prototype._write = function (chunk, encoding, callback) {
const rState = this._readableState;
const wState = this._writableState;
const length = rState.length;
this._transform(chunk, encoding, (err, val) => {
if (err) {
callback(err);
return;
}
if (val != null) {
this.push(val);
}
if (rState.ended) {
// If user has called this.push(null) we have to
// delay the callback to properly propagate the new
// state.
process.nextTick(callback);
} else if (
wState.ended || // Backwards compat.
length === rState.length || // Backwards compat.
rState.length < rState.highWaterMark
) {
callback();
} else {
this[kCallback] = callback;
}
});
};
Transform.prototype._read = function () {
if (this[kCallback]) {
const callback = this[kCallback];
this[kCallback] = null;
callback();
}
};
export default Transform;
export { Transform };

View file

@ -1,7 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { Transform } from "ext:deno_node/_stream.mjs";
export default Transform;

View file

@ -0,0 +1,419 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
"use strict";
const {
Symbol,
SymbolAsyncIterator,
SymbolFor,
SymbolIterator,
} = primordials;
// We need to use SymbolFor to make these globally available
// for interoperability with readable-stream, i.e. readable-stream
// and node core needs to be able to read/write private state
// from each other for proper interoperability.
const kIsDestroyed = SymbolFor("nodejs.stream.destroyed");
const kIsErrored = SymbolFor("nodejs.stream.errored");
const kIsReadable = SymbolFor("nodejs.stream.readable");
const kIsWritable = SymbolFor("nodejs.stream.writable");
const kIsDisturbed = SymbolFor("nodejs.stream.disturbed");
const kOnConstructed = Symbol("kOnConstructed");
const kIsClosedPromise = SymbolFor("nodejs.webstream.isClosedPromise");
const kControllerErrorFunction = SymbolFor(
"nodejs.webstream.controllerErrorFunction",
);
const kState = Symbol("kState");
const kObjectMode = 1 << 0;
const kErrorEmitted = 1 << 1;
const kAutoDestroy = 1 << 2;
const kEmitClose = 1 << 3;
const kDestroyed = 1 << 4;
const kClosed = 1 << 5;
const kCloseEmitted = 1 << 6;
const kErrored = 1 << 7;
const kConstructed = 1 << 8;
function isReadableNodeStream(obj, strict = false) {
return !!(
( // Duplex
obj &&
typeof obj.pipe === "function" &&
typeof obj.on === "function" &&
(
!strict ||
(typeof obj.pause === "function" && typeof obj.resume === "function")
) &&
(!obj._writableState || obj._readableState?.readable !== false) &&
(!obj._writableState || obj._readableState)
) // Writable has .pipe.
);
}
function isWritableNodeStream(obj) {
return !!(
obj &&
typeof obj.write === "function" &&
typeof obj.on === "function" &&
(!obj._readableState || obj._writableState?.writable !== false) // Duplex
);
}
function isDuplexNodeStream(obj) {
return !!(
obj &&
(typeof obj.pipe === "function" && obj._readableState) &&
typeof obj.on === "function" &&
typeof obj.write === "function"
);
}
function isNodeStream(obj) {
return (
obj &&
(
obj._readableState ||
obj._writableState ||
(typeof obj.write === "function" && typeof obj.on === "function") ||
(typeof obj.pipe === "function" && typeof obj.on === "function")
)
);
}
function isReadableStream(obj) {
return !!(
obj &&
!isNodeStream(obj) &&
typeof obj.pipeThrough === "function" &&
typeof obj.getReader === "function" &&
typeof obj.cancel === "function"
);
}
function isWritableStream(obj) {
return !!(
obj &&
!isNodeStream(obj) &&
typeof obj.getWriter === "function" &&
typeof obj.abort === "function"
);
}
function isTransformStream(obj) {
return !!(
obj &&
!isNodeStream(obj) &&
typeof obj.readable === "object" &&
typeof obj.writable === "object"
);
}
function isWebStream(obj) {
return isReadableStream(obj) || isWritableStream(obj) ||
isTransformStream(obj);
}
function isIterable(obj, isAsync) {
if (obj == null) return false;
if (isAsync === true) return typeof obj[SymbolAsyncIterator] === "function";
if (isAsync === false) return typeof obj[SymbolIterator] === "function";
return typeof obj[SymbolAsyncIterator] === "function" ||
typeof obj[SymbolIterator] === "function";
}
function isDestroyed(stream) {
if (!isNodeStream(stream)) return null;
const wState = stream._writableState;
const rState = stream._readableState;
const state = wState || rState;
return !!(stream.destroyed || stream[kIsDestroyed] || state?.destroyed);
}
// Have been end():d.
function isWritableEnded(stream) {
if (!isWritableNodeStream(stream)) return null;
if (stream.writableEnded === true) return true;
const wState = stream._writableState;
if (wState?.errored) return false;
if (typeof wState?.ended !== "boolean") return null;
return wState.ended;
}
// Have emitted 'finish'.
function isWritableFinished(stream, strict) {
if (!isWritableNodeStream(stream)) return null;
if (stream.writableFinished === true) return true;
const wState = stream._writableState;
if (wState?.errored) return false;
if (typeof wState?.finished !== "boolean") return null;
return !!(
wState.finished ||
(strict === false && wState.ended === true && wState.length === 0)
);
}
// Have been push(null):d.
function isReadableEnded(stream) {
if (!isReadableNodeStream(stream)) return null;
if (stream.readableEnded === true) return true;
const rState = stream._readableState;
if (!rState || rState.errored) return false;
if (typeof rState?.ended !== "boolean") return null;
return rState.ended;
}
// Have emitted 'end'.
function isReadableFinished(stream, strict) {
if (!isReadableNodeStream(stream)) return null;
const rState = stream._readableState;
if (rState?.errored) return false;
if (typeof rState?.endEmitted !== "boolean") return null;
return !!(
rState.endEmitted ||
(strict === false && rState.ended === true && rState.length === 0)
);
}
function isReadable(stream) {
if (stream && stream[kIsReadable] != null) return stream[kIsReadable];
if (typeof stream?.readable !== "boolean") return null;
if (isDestroyed(stream)) return false;
return isReadableNodeStream(stream) &&
stream.readable &&
!isReadableFinished(stream);
}
function isWritable(stream) {
if (stream && stream[kIsWritable] != null) return stream[kIsWritable];
if (typeof stream?.writable !== "boolean") return null;
if (isDestroyed(stream)) return false;
return isWritableNodeStream(stream) &&
stream.writable &&
!isWritableEnded(stream);
}
function isFinished(stream, opts) {
if (!isNodeStream(stream)) {
return null;
}
if (isDestroyed(stream)) {
return true;
}
if (opts?.readable !== false && isReadable(stream)) {
return false;
}
if (opts?.writable !== false && isWritable(stream)) {
return false;
}
return true;
}
function isWritableErrored(stream) {
if (!isNodeStream(stream)) {
return null;
}
if (stream.writableErrored) {
return stream.writableErrored;
}
return stream._writableState?.errored ?? null;
}
function isReadableErrored(stream) {
if (!isNodeStream(stream)) {
return null;
}
if (stream.readableErrored) {
return stream.readableErrored;
}
return stream._readableState?.errored ?? null;
}
function isClosed(stream) {
if (!isNodeStream(stream)) {
return null;
}
if (typeof stream.closed === "boolean") {
return stream.closed;
}
const wState = stream._writableState;
const rState = stream._readableState;
if (
typeof wState?.closed === "boolean" ||
typeof rState?.closed === "boolean"
) {
return wState?.closed || rState?.closed;
}
if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) {
return stream._closed;
}
return null;
}
function isOutgoingMessage(stream) {
return (
typeof stream._closed === "boolean" &&
typeof stream._defaultKeepAlive === "boolean" &&
typeof stream._removedConnection === "boolean" &&
typeof stream._removedContLen === "boolean"
);
}
function isServerResponse(stream) {
return (
typeof stream._sent100 === "boolean" &&
isOutgoingMessage(stream)
);
}
function isServerRequest(stream) {
return (
typeof stream._consuming === "boolean" &&
typeof stream._dumped === "boolean" &&
stream.req?.upgradeOrConnect === undefined
);
}
function willEmitClose(stream) {
if (!isNodeStream(stream)) return null;
const wState = stream._writableState;
const rState = stream._readableState;
const state = wState || rState;
return (!state && isServerResponse(stream)) || !!(
state?.autoDestroy &&
state.emitClose &&
state.closed === false
);
}
function isDisturbed(stream) {
return !!(stream && (
stream[kIsDisturbed] ??
(stream.readableDidRead || stream.readableAborted)
));
}
function isErrored(stream) {
return !!(stream && (
stream[kIsErrored] ??
stream.readableErrored ??
stream.writableErrored ??
stream._readableState?.errorEmitted ??
stream._writableState?.errorEmitted ??
stream._readableState?.errored ??
stream._writableState?.errored
));
}
const _defaultExport1 = {
kOnConstructed,
isDestroyed,
kIsDestroyed,
isDisturbed,
kIsDisturbed,
isErrored,
kIsErrored,
isReadable,
kIsReadable,
kIsClosedPromise,
kControllerErrorFunction,
kIsWritable,
isClosed,
isDuplexNodeStream,
isFinished,
isIterable,
isReadableNodeStream,
isReadableStream,
isReadableEnded,
isReadableFinished,
isReadableErrored,
isNodeStream,
isWebStream,
isWritable,
isWritableNodeStream,
isWritableStream,
isWritableEnded,
isWritableFinished,
isWritableErrored,
isServerRequest,
isServerResponse,
willEmitClose,
isTransformStream,
kState,
// bitfields
kObjectMode,
kErrorEmitted,
kAutoDestroy,
kEmitClose,
kDestroyed,
kClosed,
kCloseEmitted,
kErrored,
kConstructed,
};
export default _defaultExport1;
export {
isClosed,
isDestroyed,
isDisturbed,
isDuplexNodeStream,
isErrored,
isFinished,
isIterable,
isNodeStream,
isReadable,
isReadableEnded,
isReadableErrored,
isReadableFinished,
isReadableNodeStream,
isReadableStream,
isServerRequest,
isServerResponse,
isTransformStream,
isWebStream,
isWritable,
isWritableEnded,
isWritableErrored,
isWritableFinished,
isWritableNodeStream,
isWritableStream,
kAutoDestroy,
kClosed,
kCloseEmitted,
kConstructed,
kControllerErrorFunction,
kDestroyed,
kEmitClose,
kErrored,
kErrorEmitted,
kIsClosedPromise,
kIsDestroyed,
kIsDisturbed,
kIsErrored,
kIsReadable,
kIsWritable,
kObjectMode,
kOnConstructed,
kState,
willEmitClose,
};

View file

@ -1,242 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
const kIsDisturbed = Symbol("kIsDisturbed");
function isReadableNodeStream(obj) {
return !!(
obj &&
typeof obj.pipe === "function" &&
typeof obj.on === "function" &&
(!obj._writableState || obj._readableState?.readable !== false) && // Duplex
(!obj._writableState || obj._readableState) // Writable has .pipe.
);
}
function isWritableNodeStream(obj) {
return !!(
obj &&
typeof obj.write === "function" &&
typeof obj.on === "function" &&
(!obj._readableState || obj._writableState?.writable !== false) // Duplex
);
}
function isDuplexNodeStream(obj) {
return !!(
obj &&
(typeof obj.pipe === "function" && obj._readableState) &&
typeof obj.on === "function" &&
typeof obj.write === "function"
);
}
function isNodeStream(obj) {
return (
obj &&
(
obj._readableState ||
obj._writableState ||
(typeof obj.write === "function" && typeof obj.on === "function") ||
(typeof obj.pipe === "function" && typeof obj.on === "function")
)
);
}
function isDestroyed(stream) {
if (!isNodeStream(stream)) return null;
const wState = stream._writableState;
const rState = stream._readableState;
const state = wState || rState;
return !!(stream.destroyed || state?.destroyed);
}
// Have been end():d.
function isWritableEnded(stream) {
if (!isWritableNodeStream(stream)) return null;
if (stream.writableEnded === true) return true;
const wState = stream._writableState;
if (wState?.errored) return false;
if (typeof wState?.ended !== "boolean") return null;
return wState.ended;
}
// Have emitted 'finish'.
function isWritableFinished(stream, strict) {
if (!isWritableNodeStream(stream)) return null;
if (stream.writableFinished === true) return true;
const wState = stream._writableState;
if (wState?.errored) return false;
if (typeof wState?.finished !== "boolean") return null;
return !!(
wState.finished ||
(strict === false && wState.ended === true && wState.length === 0)
);
}
// Have been push(null):d.
function isReadableEnded(stream) {
if (!isReadableNodeStream(stream)) return null;
if (stream.readableEnded === true) return true;
const rState = stream._readableState;
if (!rState || rState.errored) return false;
if (typeof rState?.ended !== "boolean") return null;
return rState.ended;
}
// Have emitted 'end'.
function isReadableFinished(stream, strict) {
if (!isReadableNodeStream(stream)) return null;
const rState = stream._readableState;
if (rState?.errored) return false;
if (typeof rState?.endEmitted !== "boolean") return null;
return !!(
rState.endEmitted ||
(strict === false && rState.ended === true && rState.length === 0)
);
}
function isDisturbed(stream) {
return !!(stream && (
stream.readableDidRead ||
stream.readableAborted ||
stream[kIsDisturbed]
));
}
function isReadable(stream) {
const r = isReadableNodeStream(stream);
if (r === null || typeof stream?.readable !== "boolean") return null;
if (isDestroyed(stream)) return false;
return r && stream.readable && !isReadableFinished(stream);
}
function isWritable(stream) {
const r = isWritableNodeStream(stream);
if (r === null || typeof stream?.writable !== "boolean") return null;
if (isDestroyed(stream)) return false;
return r && stream.writable && !isWritableEnded(stream);
}
function isFinished(stream, opts) {
if (!isNodeStream(stream)) {
return null;
}
if (isDestroyed(stream)) {
return true;
}
if (opts?.readable !== false && isReadable(stream)) {
return false;
}
if (opts?.writable !== false && isWritable(stream)) {
return false;
}
return true;
}
function isClosed(stream) {
if (!isNodeStream(stream)) {
return null;
}
const wState = stream._writableState;
const rState = stream._readableState;
if (
typeof wState?.closed === "boolean" ||
typeof rState?.closed === "boolean"
) {
return wState?.closed || rState?.closed;
}
if (typeof stream._closed === "boolean" && isOutgoingMessage(stream)) {
return stream._closed;
}
return null;
}
function isOutgoingMessage(stream) {
return (
typeof stream._closed === "boolean" &&
typeof stream._defaultKeepAlive === "boolean" &&
typeof stream._removedConnection === "boolean" &&
typeof stream._removedContLen === "boolean"
);
}
function isServerResponse(stream) {
return (
typeof stream._sent100 === "boolean" &&
isOutgoingMessage(stream)
);
}
function isServerRequest(stream) {
return (
typeof stream._consuming === "boolean" &&
typeof stream._dumped === "boolean" &&
stream.req?.upgradeOrConnect === undefined
);
}
function willEmitClose(stream) {
if (!isNodeStream(stream)) return null;
const wState = stream._writableState;
const rState = stream._readableState;
const state = wState || rState;
return (!state && isServerResponse(stream)) || !!(
state &&
state.autoDestroy &&
state.emitClose &&
state.closed === false
);
}
export default {
isDisturbed,
kIsDisturbed,
isClosed,
isDestroyed,
isDuplexNodeStream,
isFinished,
isReadable,
isReadableNodeStream,
isReadableEnded,
isReadableFinished,
isNodeStream,
isWritable,
isWritableNodeStream,
isWritableEnded,
isWritableFinished,
isServerRequest,
isServerResponse,
willEmitClose,
};
export {
isClosed,
isDestroyed,
isDisturbed,
isDuplexNodeStream,
isFinished,
isNodeStream,
isReadable,
isReadableEnded,
isReadableFinished,
isReadableNodeStream,
isServerRequest,
isServerResponse,
isWritable,
isWritableEnded,
isWritableFinished,
isWritableNodeStream,
kIsDisturbed,
willEmitClose,
};

File diff suppressed because it is too large Load diff

View file

@ -1,9 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// deno-lint-ignore-file
import { Writable } from "ext:deno_node/_stream.mjs";
const { WritableState, fromWeb, toWeb } = Writable;
export default Writable;
export { fromWeb, toWeb, WritableState };

View file

@ -0,0 +1,693 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { destroy } from "ext:deno_node/internal/streams/destroy.js";
import finished from "ext:deno_node/internal/streams/end-of-stream.js";
import {
isDestroyed,
isReadable,
isReadableEnded,
isWritable,
isWritableEnded,
} from "ext:deno_node/internal/streams/utils.js";
import { ReadableStream, WritableStream } from "node:stream/web";
import {
validateBoolean,
validateObject,
} from "ext:deno_node/internal/validators.mjs";
import {
kEmptyObject,
normalizeEncoding,
} from "ext:deno_node/internal/util.mjs";
import { AbortError } from "ext:deno_node/internal/errors.ts";
import process from "node:process";
import { Buffer } from "node:buffer";
import { Duplex, Readable, Writable } from "node:stream";
function isWritableStream(object) {
return object instanceof WritableStream;
}
function isReadableStream(object) {
return object instanceof ReadableStream;
}
export function newStreamReadableFromReadableStream(
readableStream,
options = kEmptyObject,
) {
if (!isReadableStream(readableStream)) {
throw new ERR_INVALID_ARG_TYPE(
"readableStream",
"ReadableStream",
readableStream,
);
}
validateObject(options, "options");
const {
highWaterMark,
encoding,
objectMode = false,
signal,
} = options;
if (encoding !== undefined && !Buffer.isEncoding(encoding)) {
throw new ERR_INVALID_ARG_VALUE(encoding, "options.encoding");
}
validateBoolean(objectMode, "options.objectMode");
const reader = readableStream.getReader();
let closed = false;
const readable = new Readable({
objectMode,
highWaterMark,
encoding,
signal,
read() {
reader.read().then(
(chunk) => {
if (chunk.done) {
readable.push(null);
} else {
readable.push(chunk.value);
}
},
(error) => destroy.call(readable, error),
);
},
destroy(error, callback) {
function done() {
try {
callback(error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => {
throw error;
});
}
}
if (!closed) {
reader.cancel(error).then(done, done);
return;
}
done();
},
});
reader.closed.then(
() => {
closed = true;
},
(error) => {
closed = true;
destroy.call(readable, error);
},
);
return readable;
}
export function newStreamWritableFromWritableStream(
writableStream,
options = kEmptyObject,
) {
if (!isWritableStream(writableStream)) {
throw new ERR_INVALID_ARG_TYPE(
"writableStream",
"WritableStream",
writableStream,
);
}
validateObject(options, "options");
const {
highWaterMark,
decodeStrings = true,
objectMode = false,
signal,
} = options;
validateBoolean(objectMode, "options.objectMode");
validateBoolean(decodeStrings, "options.decodeStrings");
const writer = writableStream.getWriter();
let closed = false;
const writable = new Writable({
highWaterMark,
objectMode,
decodeStrings,
signal,
writev(chunks, callback) {
function done(error) {
error = error.filter((e) => e);
try {
callback(error.length === 0 ? undefined : error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => destroy.call(writable, error));
}
}
writer.ready.then(
() =>
Promise.all(
chunks.map((data) => writer.write(data.chunk)),
).then(done, done),
done,
);
},
write(chunk, encoding, callback) {
if (typeof chunk === "string" && decodeStrings && !objectMode) {
chunk = Buffer.from(chunk, encoding);
chunk = new Uint8Array(
chunk.buffer,
chunk.byteOffset,
chunk.byteLength,
);
}
function done(error) {
try {
callback(error);
} catch (error) {
destroy(this, duplex, error);
}
}
writer.ready.then(
() => writer.write(chunk).then(done, done),
done,
);
},
destroy(error, callback) {
function done() {
try {
callback(error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => {
throw error;
});
}
}
if (!closed) {
if (error != null) {
writer.abort(error).then(done, done);
} else {
writer.close().then(done, done);
}
return;
}
done();
},
final(callback) {
function done(error) {
try {
callback(error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => destroy.call(writable, error));
}
}
if (!closed) {
writer.close().then(done, done);
}
},
});
writer.closed.then(
() => {
closed = true;
},
(error) => {
closed = true;
destroy.call(writable, error);
},
);
return writable;
}
export function newStreamDuplexFromReadableWritablePair(
pair,
options = kEmptyObject,
) {
validateObject(pair, "pair");
const {
readable: readableStream,
writable: writableStream,
} = pair;
if (!isReadableStream(readableStream)) {
throw new ERR_INVALID_ARG_TYPE(
"pair.readable",
"ReadableStream",
readableStream,
);
}
if (!isWritableStream(writableStream)) {
throw new ERR_INVALID_ARG_TYPE(
"pair.writable",
"WritableStream",
writableStream,
);
}
validateObject(options, "options");
const {
allowHalfOpen = false,
objectMode = false,
encoding,
decodeStrings = true,
highWaterMark,
signal,
} = options;
validateBoolean(objectMode, "options.objectMode");
if (encoding !== undefined && !Buffer.isEncoding(encoding)) {
throw new ERR_INVALID_ARG_VALUE(encoding, "options.encoding");
}
const writer = writableStream.getWriter();
const reader = readableStream.getReader();
let writableClosed = false;
let readableClosed = false;
const duplex = new Duplex({
allowHalfOpen,
highWaterMark,
objectMode,
encoding,
decodeStrings,
signal,
writev(chunks, callback) {
function done(error) {
error = error.filter((e) => e);
try {
callback(error.length === 0 ? undefined : error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => destroy(duplex, error));
}
}
writer.ready.then(
() =>
Promise.all(
chunks.map((data) => writer.write(data.chunk)),
).then(done, done),
done,
);
},
write(chunk, encoding, callback) {
if (typeof chunk === "string" && decodeStrings && !objectMode) {
chunk = Buffer.from(chunk, encoding);
chunk = new Uint8Array(
chunk.buffer,
chunk.byteOffset,
chunk.byteLength,
);
}
function done(error) {
try {
callback(error);
} catch (error) {
destroy(duplex, error);
}
}
writer.ready.then(
() => writer.write(chunk).then(done, done),
done,
);
},
final(callback) {
function done(error) {
try {
callback(error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => destroy(duplex, error));
}
}
if (!writableClosed) {
writer.close().then(done, done);
}
},
read() {
reader.read().then(
(chunk) => {
if (chunk.done) {
duplex.push(null);
} else {
duplex.push(chunk.value);
}
},
(error) => destroy(duplex, error),
);
},
destroy(error, callback) {
function done() {
try {
callback(error);
} catch (error) {
// In a next tick because this is happening within
// a promise context, and if there are any errors
// thrown we don't want those to cause an unhandled
// rejection. Let's just escape the promise and
// handle it separately.
process.nextTick(() => {
throw error;
});
}
}
async function closeWriter() {
if (!writableClosed) {
await writer.abort(error);
}
}
async function closeReader() {
if (!readableClosed) {
await reader.cancel(error);
}
}
if (!writableClosed || !readableClosed) {
Promise.all([
closeWriter(),
closeReader(),
]).then(done, done);
return;
}
done();
},
});
writer.closed.then(
() => {
writableClosed = true;
},
(error) => {
writableClosed = true;
readableClosed = true;
destroy(duplex, error);
},
);
reader.closed.then(
() => {
readableClosed = true;
},
(error) => {
writableClosed = true;
readableClosed = true;
destroy(duplex, error);
},
);
return duplex;
}
export function newReadableStreamFromStreamReadable(
streamReadable,
options = kEmptyObject,
) {
// Not using the internal/streams/utils isReadableNodeStream utility
// here because it will return false if streamReadable is a Duplex
// whose readable option is false. For a Duplex that is not readable,
// we want it to pass this check but return a closed ReadableStream.
if (typeof streamReadable?._readableState !== "object") {
throw new ERR_INVALID_ARG_TYPE(
"streamReadable",
"stream.Readable",
streamReadable,
);
}
if (isDestroyed(streamReadable) || !isReadable(streamReadable)) {
const readable = new ReadableStream();
readable.cancel();
return readable;
}
const objectMode = streamReadable.readableObjectMode;
const highWaterMark = streamReadable.readableHighWaterMark;
const evaluateStrategyOrFallback = (strategy) => {
// If there is a strategy available, use it
if (strategy) {
return strategy;
}
if (objectMode) {
// When running in objectMode explicitly but no strategy, we just fall
// back to CountQueuingStrategy
return new CountQueuingStrategy({ highWaterMark });
}
// When not running in objectMode explicitly, we just fall
// back to a minimal strategy that just specifies the highWaterMark
// and no size algorithm. Using a ByteLengthQueuingStrategy here
// is unnecessary.
return { highWaterMark };
};
const strategy = evaluateStrategyOrFallback(options?.strategy);
let controller;
function onData(chunk) {
// Copy the Buffer to detach it from the pool.
if (Buffer.isBuffer(chunk) && !objectMode) {
chunk = new Uint8Array(chunk);
}
controller.enqueue(chunk);
if (controller.desiredSize <= 0) {
streamReadable.pause();
}
}
streamReadable.pause();
const cleanup = finished(streamReadable, (error) => {
if (error?.code === "ERR_STREAM_PREMATURE_CLOSE") {
const err = new AbortError(undefined, { cause: error });
error = err;
}
cleanup();
// This is a protection against non-standard, legacy streams
// that happen to emit an error event again after finished is called.
streamReadable.on("error", () => {});
if (error) {
return controller.error(error);
}
controller.close();
});
streamReadable.on("data", onData);
return new ReadableStream({
start(c) {
controller = c;
},
pull() {
streamReadable.resume();
},
cancel(reason) {
destroy(streamReadable, reason);
},
}, strategy);
}
export function newWritableStreamFromStreamWritable(streamWritable) {
// Not using the internal/streams/utils isWritableNodeStream utility
// here because it will return false if streamWritable is a Duplex
// whose writable option is false. For a Duplex that is not writable,
// we want it to pass this check but return a closed WritableStream.
if (typeof streamWritable?._writableState !== "object") {
throw new ERR_INVALID_ARG_TYPE(
"streamWritable",
"stream.Writable",
streamWritable,
);
}
if (isDestroyed(streamWritable) || !isWritable(streamWritable)) {
const writable = new WritableStream();
writable.close();
return writable;
}
const highWaterMark = streamWritable.writableHighWaterMark;
const strategy = streamWritable.writableObjectMode
? new CountQueuingStrategy({ highWaterMark })
: { highWaterMark };
let controller;
let backpressurePromise;
let closed;
function onDrain() {
if (backpressurePromise !== undefined) {
backpressurePromise.resolve();
}
}
const cleanup = finished(streamWritable, (error) => {
if (error?.code === "ERR_STREAM_PREMATURE_CLOSE") {
const err = new AbortError(undefined, { cause: error });
error = err;
}
cleanup();
// This is a protection against non-standard, legacy streams
// that happen to emit an error event again after finished is called.
streamWritable.on("error", () => {});
if (error != null) {
if (backpressurePromise !== undefined) {
backpressurePromise.reject(error);
}
// If closed is not undefined, the error is happening
// after the WritableStream close has already started.
// We need to reject it here.
if (closed !== undefined) {
closed.reject(error);
closed = undefined;
}
controller.error(error);
controller = undefined;
return;
}
if (closed !== undefined) {
closed.resolve();
closed = undefined;
return;
}
controller.error(new AbortError());
controller = undefined;
});
streamWritable.on("drain", onDrain);
return new WritableStream({
start(c) {
controller = c;
},
async write(chunk) {
if (streamWritable.writableNeedDrain || !streamWritable.write(chunk)) {
backpressurePromise = createDeferredPromise();
return backpressurePromise.promise.finally(() => {
backpressurePromise = undefined;
});
}
},
abort(reason) {
destroy(streamWritable, reason);
},
close() {
if (closed === undefined && !isWritableEnded(streamWritable)) {
closed = createDeferredPromise();
streamWritable.end();
return closed.promise;
}
controller = undefined;
return Promise.resolve();
},
}, strategy);
}
export function newReadableWritablePairFromDuplex(duplex) {
// Not using the internal/streams/utils isWritableNodeStream and
// isReadableNodestream utilities here because they will return false
// if the duplex was created with writable or readable options set to
// false. Instead, we'll check the readable and writable state after
// and return closed WritableStream or closed ReadableStream as
// necessary.
if (
typeof duplex?._writableState !== "object" ||
typeof duplex?._readableState !== "object"
) {
throw new ERR_INVALID_ARG_TYPE("duplex", "stream.Duplex", duplex);
}
if (isDestroyed(duplex)) {
const writable = new WritableStream();
const readable = new ReadableStream();
writable.close();
readable.cancel();
return { readable, writable };
}
const writable = isWritable(duplex)
? newWritableStreamFromStreamWritable(duplex)
: new WritableStream();
if (!isWritable(duplex)) {
writable.close();
}
const readable = isReadable(duplex)
? newReadableStreamFromStreamReadable(duplex)
: new ReadableStream();
if (!isReadable(duplex)) {
readable.cancel();
}
return { writable, readable };
}

View file

@ -70,7 +70,7 @@ export let argv0 = "";
export let arch = "";
export let platform = "";
export let platform = isWindows ? "win32" : ""; // initialized during bootstrap
export let pid = 0;

View file

@ -1,49 +1,175 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
// compose, destroy and isDisturbed are experimental APIs without
// typings. They can be exposed once they are released as stable in Node
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
"use strict";
import { primordials } from "ext:core/mod.js";
const {
ObjectDefineProperty,
ObjectKeys,
ReflectApply,
} = primordials;
import * as internalUtil from "ext:deno_node/internal/util.mjs";
const {
promisify: { custom: customPromisify },
} = internalUtil;
// @deno-types="./_stream.d.ts"
import {
_isArrayBufferView,
_isUint8Array,
_uint8ArrayToBuffer,
addAbortSignal,
compose,
destroy,
Duplex,
finished,
isDestroyed,
isDisturbed,
isErrored,
isReadable,
isWritable,
PassThrough,
pipeline,
Readable,
Stream,
Transform,
Writable,
} from "ext:deno_node/_stream.mjs";
promiseReturningOperators,
streamReturningOperators,
} from "ext:deno_node/internal/streams/operators.js";
import compose from "ext:deno_node/internal/streams/compose.js";
import {
getDefaultHighWaterMark,
setDefaultHighWaterMark,
} from "ext:deno_node/internal/streams/state.mjs";
} from "ext:deno_node/internal/streams/state.js";
import { pipeline } from "ext:deno_node/internal/streams/pipeline.js";
import { destroyer } from "ext:deno_node/internal/streams/destroy.js";
import { eos } from "ext:deno_node/internal/streams/end-of-stream.js";
import { Buffer } from "ext:deno_node/internal/buffer.mjs";
import * as promises from "node:stream/promises";
import * as utils from "ext:deno_node/internal/streams/utils.js";
import {
isArrayBufferView,
isUint8Array,
} from "ext:deno_node/internal/util/types.ts";
import { Stream } from "ext:deno_node/internal/streams/legacy.js";
import Readable from "ext:deno_node/internal/streams/readable.js";
import Writable from "ext:deno_node/internal/streams/writable.js";
import Duplex from "ext:deno_node/internal/streams/duplex.js";
import Transform from "ext:deno_node/internal/streams/transform.js";
import PassThrough from "ext:deno_node/internal/streams/passthrough.js";
import duplexPair from "ext:deno_node/internal/streams/duplexpair.js";
import { addAbortSignal } from "ext:deno_node/internal/streams/add-abort-signal.js";
Stream.isDestroyed = utils.isDestroyed;
Stream.isDisturbed = utils.isDisturbed;
Stream.isErrored = utils.isErrored;
Stream.isReadable = utils.isReadable;
Stream.isWritable = utils.isWritable;
Stream.Readable = Readable;
const streamKeys = ObjectKeys(streamReturningOperators);
for (let i = 0; i < streamKeys.length; i++) {
const key = streamKeys[i];
const op = streamReturningOperators[key];
function fn(...args) {
if (new.target) {
throw new ERR_ILLEGAL_CONSTRUCTOR();
}
return Stream.Readable.from(ReflectApply(op, this, args));
}
ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name });
ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length });
ObjectDefineProperty(Stream.Readable.prototype, key, {
__proto__: null,
value: fn,
enumerable: false,
configurable: true,
writable: true,
});
}
const promiseKeys = ObjectKeys(promiseReturningOperators);
for (let i = 0; i < promiseKeys.length; i++) {
const key = promiseKeys[i];
const op = promiseReturningOperators[key];
function fn(...args) {
if (new.target) {
throw new ERR_ILLEGAL_CONSTRUCTOR();
}
return ReflectApply(op, this, args);
}
ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name });
ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length });
ObjectDefineProperty(Stream.Readable.prototype, key, {
__proto__: null,
value: fn,
enumerable: false,
configurable: true,
writable: true,
});
}
Stream.Writable = Writable;
Stream.Duplex = Duplex;
Stream.Transform = Transform;
Stream.PassThrough = PassThrough;
Stream.duplexPair = duplexPair;
Stream.pipeline = pipeline;
Stream.addAbortSignal = addAbortSignal;
Stream.finished = eos;
Stream.destroy = destroyer;
Stream.compose = compose;
Stream.setDefaultHighWaterMark = setDefaultHighWaterMark;
Stream.getDefaultHighWaterMark = getDefaultHighWaterMark;
ObjectDefineProperty(Stream, "promises", {
__proto__: null,
configurable: true,
enumerable: true,
get() {
return promises;
},
});
ObjectDefineProperty(pipeline, customPromisify, {
__proto__: null,
enumerable: true,
get() {
return promises.pipeline;
},
});
ObjectDefineProperty(eos, customPromisify, {
__proto__: null,
enumerable: true,
get() {
return promises.finished;
},
});
// Backwards-compat with node 0.4.x
Stream.Stream = Stream;
Stream._isArrayBufferView = isArrayBufferView;
Stream._isUint8Array = isUint8Array;
Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) {
// Note: Diverging from Node.js here. Deno doesn't implement
// FastBuffer so we use regular Buffer.
return Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
};
export {
_isArrayBufferView,
_isUint8Array,
_uint8ArrayToBuffer,
addAbortSignal,
compose,
destroy,
destroyer,
Duplex,
finished,
duplexPair,
getDefaultHighWaterMark,
isDestroyed,
isDisturbed,
isErrored,
isReadable,
isWritable,
PassThrough,
pipeline,
Readable,
@ -52,5 +178,14 @@ export {
Transform,
Writable,
};
export const _isArrayBufferView = isArrayBufferView;
export const _isUint8Array = Stream._isUint8Array;
export const _uint8ArrayToBuffer = Stream._uint8ArrayToBuffer;
export const isDisturbed = Stream.isDisturbed;
export const isErrored = Stream.isErrored;
export const finished = eos;
export const isReadable = Stream.isReadable;
export const isWritable = Stream.isWritable;
export const isDestroyed = Stream.isDestroyed;
export default Stream;

View file

@ -1,16 +1,20 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
// TODO(petamoriken): enable prefer-primordials for node polyfills
// deno-lint-ignore-file prefer-primordials
import { primordials } from "ext:core/mod.js";
import { TextDecoder } from "ext:deno_web/08_text_encoding.js";
import { Blob } from "ext:deno_web/09_file.js";
import { Buffer } from "node:buffer";
"use strict";
const {
JSONParse,
} = primordials;
/**
* @typedef {import('../_global.d.ts').ReadableStream
* @typedef {import('../internal/webstreams/readablestream').ReadableStream
* } ReadableStream
* @typedef {import('../_stream.d.ts')} Readable
* @typedef {import('../internal/streams/readable')} Readable
*/
/**
@ -68,14 +72,16 @@ async function text(stream) {
*/
async function json(stream) {
const str = await text(stream);
return JSON.parse(str);
return JSONParse(str);
}
export default {
const _defaultExport1 = {
arrayBuffer,
blob,
buffer,
json,
text,
json,
};
export default _defaultExport1;
export { arrayBuffer, blob, buffer, json, text };

View file

@ -0,0 +1,52 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import { primordials } from "ext:core/mod.js";
import {
isIterable,
isNodeStream,
isWebStream,
} from "ext:deno_node/internal/streams/utils.js";
import { pipelineImpl as pl } from "ext:deno_node/internal/streams/pipeline.js";
import { finished } from "ext:deno_node/internal/streams/end-of-stream.js";
import * as _mod2 from "node:stream";
"use strict";
const {
ArrayPrototypePop,
Promise,
} = primordials;
_mod2;
function pipeline(...streams) {
return new Promise((resolve, reject) => {
let signal;
let end;
const lastArg = streams[streams.length - 1];
if (
lastArg && typeof lastArg === "object" &&
!isNodeStream(lastArg) && !isIterable(lastArg) && !isWebStream(lastArg)
) {
const options = ArrayPrototypePop(streams);
signal = options.signal;
end = options.end;
}
pl(streams, (err, value) => {
if (err) {
reject(err);
} else {
resolve(value);
}
}, { signal, end });
});
}
const _defaultExport1 = {
finished,
pipeline,
};
export default _defaultExport1;
export { finished, pipeline };

View file

@ -1,12 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
// Copyright Joyent and Node contributors. All rights reserved. MIT license.
import { Stream } from "ext:deno_node/_stream.mjs";
const { finished, pipeline } = Stream.promises;
export default {
finished,
pipeline,
};
export { finished, pipeline };

View file

@ -1,19 +1,29 @@
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
import {
ByteLengthQueuingStrategy,
CountQueuingStrategy,
TransformStream,
TransformStreamDefaultController,
} from "ext:deno_web/06_streams.js";
import {
WritableStream,
WritableStreamDefaultController,
WritableStreamDefaultWriter,
} from "ext:deno_web/06_streams.js";
import {
ReadableByteStreamController,
ReadableStream,
ReadableStreamBYOBReader,
ReadableStreamBYOBRequest,
ReadableStreamDefaultController,
ReadableStreamDefaultReader,
TransformStream,
TransformStreamDefaultController,
WritableStream,
WritableStreamDefaultController,
WritableStreamDefaultWriter,
} from "ext:deno_web/06_streams.js";
import {
ByteLengthQueuingStrategy,
CountQueuingStrategy,
} from "ext:deno_web/06_streams.js";
import {
TextDecoderStream,
@ -23,7 +33,29 @@ import {
CompressionStream,
DecompressionStream,
} from "ext:deno_web/14_compression.js";
"use strict";
const _defaultExport1 = {
ReadableStream,
ReadableStreamDefaultReader,
ReadableStreamBYOBReader,
ReadableStreamBYOBRequest,
ReadableByteStreamController,
ReadableStreamDefaultController,
TransformStream,
TransformStreamDefaultController,
WritableStream,
WritableStreamDefaultWriter,
WritableStreamDefaultController,
ByteLengthQueuingStrategy,
CountQueuingStrategy,
TextEncoderStream,
TextDecoderStream,
CompressionStream,
DecompressionStream,
};
export default _defaultExport1;
export {
ByteLengthQueuingStrategy,
CompressionStream,
@ -43,23 +75,3 @@ export {
WritableStreamDefaultController,
WritableStreamDefaultWriter,
};
export default {
ByteLengthQueuingStrategy,
CompressionStream,
CountQueuingStrategy,
DecompressionStream,
ReadableByteStreamController,
ReadableStream,
ReadableStreamBYOBReader,
ReadableStreamBYOBRequest,
ReadableStreamDefaultController,
ReadableStreamDefaultReader,
TextDecoderStream,
TextEncoderStream,
TransformStream,
TransformStreamDefaultController,
WritableStream,
WritableStreamDefaultController,
WritableStreamDefaultWriter,
};

530
ext/node/update_node_stream.ts Executable file
View file

@ -0,0 +1,530 @@
#!/usr/bin/env -S deno run --allow-read --allow-write --allow-env --allow-run
// deno-lint-ignore-file
// Copyright 2018-2025 the Deno authors. MIT license.
// This file is used to transform Node.js internal streams code to
// Deno polyfills.
//
// Run this script with `--upgrade` to upgrade the streams code. This will update
// the code to the Node.js version specified in `tests/node_compat/runner/suite/node_version.ts`.
//
// This script applies the following transformations:
// a. Rewrite CJS-style internal Node.js modules to ESM for Deno.
// b. Remap internal Node.js modules to Deno equivalents.
// @ts-types="npm:@types/jscodeshift"
import jscodeshift from "npm:jscodeshift@0.15.2";
import type {
AssignmentExpression,
ASTPath,
ExportSpecifier,
FileInfo,
Identifier,
ImportDeclaration,
JSCodeshift,
ObjectExpression,
Property,
} from "npm:jscodeshift@0.15.2";
import $ from "jsr:@david/dax@0.42.0";
import path from "node:path";
import { version } from "../../tests/node_compat/runner/suite/node_version.ts";
import { expandGlobSync } from "jsr:@std/fs@1.0.14/expand-glob";
const globs = [
"internal/streams/*.js",
"stream/*.js",
];
// These have special handling for lazy loading
const ignore = ["duplexify.js"];
const moduleMap: Record<string, string> = {
"events": "node:events",
"buffer": "node:buffer",
"stream": "node:stream",
"string_decoder": "node:string_decoder",
"internal/abort_controller": "ext:deno_web/03_abort_signal.js",
"internal/events/abort_listener":
"ext:deno_node/internal/events/abort_listener.mjs",
"internal/assert": "ext:deno_node/internal/assert.mjs",
"internal/webstreams/adapters":
"ext:deno_node/internal/webstreams/adapters.js",
"internal/webstreams/compression": "ext:deno_web/14_compression.js",
"internal/webstreams/encoding": "ext:deno_web/08_text_encoding.js",
"internal/errors": "ext:deno_node/internal/errors.ts",
"internal/event_target": "ext:deno_node/internal/event_target.mjs",
"internal/util": "ext:deno_node/internal/util.mjs",
"internal/util/debuglog": "ext:deno_node/internal/util/debuglog.ts",
"internal/validators": "ext:deno_node/internal/validators.mjs",
"internal/encoding": "ext:deno_web/08_text_encoding.js",
"internal/blob": "ext:deno_web/09_file.js",
};
// Use default export for these conditional require()
const defaultLazy = [
"internal/streams/passthrough",
"internal/streams/readable",
"internal/streams/duplexify",
];
// Workaround a bug in our formatter: "export default from;" does not work
// correctly, so we rename it to something else and export.
//
// https://github.com/dprint/dprint-plugin-typescript/issues/705
const renameForDefaultExport = ["from"];
const mapping = (source: string): string => {
if (source.startsWith("internal/webstreams")) {
return `ext:deno_web/06_streams.js`;
}
if (source.startsWith("internal/")) {
return `ext:deno_node/${source}.js`;
}
return source;
};
const getSource = (source: string): string =>
moduleMap[source] || mapping(source);
function createDefaultAndNamedExport(
j: JSCodeshift,
expr: ObjectExpression,
getUniqueImportId: (id?: string) => Identifier,
) {
const props = expr.properties;
const specifiers: ExportSpecifier[] = props
.filter((prop) => j.Property.check(prop) && j.Identifier.check(prop.value))
.map((p) => {
const prop = p as Property;
return j.exportSpecifier.from({
exported: j.identifier((prop.value as Identifier).name),
local: j.identifier((prop.key as Identifier).name),
});
});
const tmpId = getUniqueImportId("_defaultExport");
const tmpDecl = j.variableDeclaration("const", [
j.variableDeclarator(tmpId, expr),
]);
const defaultExport = j.exportDefaultDeclaration(tmpId);
const namedExport = j.exportNamedDeclaration(null, specifiers);
return { tmpDecl, defaultExport, namedExport };
}
const topLevel = (path: ASTPath) => path.parent.node.type === "Program";
const transform = (file: FileInfo, j: JSCodeshift) => {
const root = j(file.source);
let uidCounter = 1;
function getUniqueImportId(base = "imported") {
const used = new Set(Object.keys(root.getVariableDeclarators(() => true)));
let name;
do {
name = `${base}${uidCounter++}`;
} while (used.has(name));
return j.identifier(name);
}
const requireDecls: ImportDeclaration[] = [];
const destructurings: { index: number; node: any }[] = [];
const toRemove: ASTPath[] = [];
let insertedPrimordialsImport = false;
let insertedProcessImport = false;
let hasDefaultExport = false;
// If "process" is used, add import
root.find(j.Identifier)
.filter((path) => path.node.name === "process")
.forEach(() => {
if (!insertedProcessImport) {
const processImport = j.importDeclaration(
[j.importDefaultSpecifier(j.identifier("process"))],
j.literal("node:process"),
);
requireDecls.push(processImport);
insertedProcessImport = true;
}
});
root.find(j.VariableDeclaration)
.forEach((path) => {
path.node.declarations.forEach((decl) => {
if (decl.type !== "VariableDeclarator") return;
if (
j.ObjectPattern.check(decl.id) &&
j.Identifier.check(
decl.init,
(s) => "name" in s && s.name == "primordials",
)
) {
// Insert import if it hasnt been added yet
if (!insertedPrimordialsImport) {
const primordialsImport = j.importDeclaration(
[j.importSpecifier(j.identifier("primordials"))],
j.literal("ext:core/mod.js"),
);
requireDecls.push(primordialsImport);
insertedPrimordialsImport = true;
}
}
if (
j.CallExpression.check(decl.init) &&
(decl.init.callee as Identifier)?.name === "require" &&
decl.init.arguments.length === 1 &&
j.Literal.check(decl.init.arguments[0])
) {
const callee = decl.init.callee as Identifier;
// Make sure that name is "require"
if (callee.name !== "require") {
throw new Error(
'Expected "require" as the callee name. Found: ' +
callee.name,
);
}
const source = decl.init.arguments[0].value as string;
const id = decl.id;
if (j.Identifier.check(id)) {
// const foo = require('bar')
const importDecl = j.importDeclaration(
[j.importDefaultSpecifier(j.identifier(id.name))],
j.literal(getSource(source)),
);
requireDecls.push(importDecl);
toRemove.push(path);
} else if (j.ObjectPattern.check(id)) {
const isFlat = id.properties.every(
(p) => j.Property.check(p) && j.Identifier.check(p.value),
);
if (isFlat) {
// const { x, y } = require('bar')
const importDecl = j.importDeclaration(
id.properties.map((p) => {
const prop = p as Property;
return j.importSpecifier(
j.identifier((prop.key as Identifier).name),
j.identifier((prop.value as Identifier).name),
);
}),
j.literal(getSource(source)),
);
requireDecls.push(importDecl);
toRemove.push(path);
} else {
// const { o: { a } } = require('baz') → import tmp from 'baz'; const { o: { a } } = tmp;
const importId = getUniqueImportId();
const importDecl = j.importDeclaration(
[j.importDefaultSpecifier(importId)],
j.literal(getSource(source)),
);
requireDecls.push(importDecl);
const replacementDecl = j.variableDeclaration(path.node.kind, [
j.variableDeclarator(id, importId),
]);
destructurings.push({ index: path.name, node: replacementDecl });
toRemove.push(path);
}
}
} else if (
j.MemberExpression.check(decl.init) &&
j.CallExpression.check(decl.init.object) &&
(decl.init.object.callee as Identifier)?.name === "require" &&
decl.init.object.arguments.length === 1 &&
j.Literal.check(decl.init.object.arguments[0])
) {
// Example: require('internal/errors').codes
const source = decl.init.object.arguments[0].value as string;
const accessedProp = (decl.init.property as Identifier).name;
const importId = getUniqueImportId("_mod"); // e.g., _mod1
const importDecl = j.importDeclaration(
[j.importDefaultSpecifier(importId)],
j.literal(getSource(source)),
);
requireDecls.push(importDecl);
// Reassign: const { ... } = _mod.codes
const newInit = j.memberExpression(
importId,
j.identifier(accessedProp),
);
const replacementDecl = j.variableDeclaration(path.node.kind, [
j.variableDeclarator(decl.id, newInit),
]);
destructurings.push({ index: path.name, node: replacementDecl });
toRemove.push(path);
}
});
});
const inlineRequires = new Map(); // module name → imported identifier
// Replace module.exports = { x, y } with export { x, y }
const namedExportAssignments: string[] = [];
const pushEnd = (n: any) => root.get().node.program.body.push(n);
root.find(j.ExpressionStatement)
.filter(topLevel)
.filter((path) => {
const expr = path.node.expression;
return (
j.AssignmentExpression.check(expr) &&
j.MemberExpression.check(expr.left) &&
(expr.left.object as Identifier).name === "module" &&
(expr.left.property as Identifier).name === "exports"
);
})
.forEach((path) => {
const expr = path.node.expression as AssignmentExpression;
if (j.ObjectExpression.check(expr.right)) {
const { tmpDecl, defaultExport, namedExport } =
createDefaultAndNamedExport(j, expr.right, getUniqueImportId);
j(path).insertBefore(tmpDecl);
j(path).insertAfter(namedExport);
j(path).insertAfter(defaultExport);
hasDefaultExport = true;
j(path).remove();
} else if (j.Identifier.check(expr.right)) {
// module.exports = Foo;
const id = expr.right;
if (!hasDefaultExport) {
let name = id.name;
if (renameForDefaultExport.includes(name)) {
name = "_defaultExport";
// Assign to a new variable
const decl = j.variableDeclaration("const", [
j.variableDeclarator(j.identifier(name), id),
]);
pushEnd(decl);
}
const exportDefault = j.exportDefaultDeclaration(
j.identifier(name),
);
pushEnd(exportDefault);
hasDefaultExport = true;
}
const exportNamed = j.exportNamedDeclaration(
null,
[j.exportSpecifier.from({
exported: j.identifier(id.name),
local: j.identifier(id.name),
})],
);
pushEnd(exportNamed);
j(path).remove();
} else {
// module.exports = () => ... or {}
const exportDefault = j.exportDefaultDeclaration(expr.right);
j(path).replaceWith(exportDefault);
}
});
// Handle module.exports.X = ...
root.find(j.ExpressionStatement)
.filter(topLevel)
.filter((path) => {
const expr = path.node.expression;
return (
j.AssignmentExpression.check(expr) &&
j.MemberExpression.check(expr.left) &&
j.MemberExpression.check(expr.left.object) &&
(expr.left.object.object as Identifier).name == "module" &&
(expr.left.object.property as Identifier).name == "exports"
);
})
.forEach((path) => {
const expr = path.node.expression as AssignmentExpression;
const exportName = "property" in expr.left && "name" in expr.left.property
? expr.left.property.name
: null;
if (typeof exportName !== "string") {
return;
}
const right = expr.right;
namedExportAssignments.push(exportName);
if (
j.Identifier.check(right) &&
right.name === exportName
) {
// Just export the existing binding
const exportStmt = j.exportNamedDeclaration(null, [
j.exportSpecifier.from({
local: j.identifier(exportName),
exported: j.identifier(exportName),
}),
]);
j(path).replaceWith(exportStmt);
} else {
// Define new const and export it
const decl = j.variableDeclaration("const", [
j.variableDeclarator(j.identifier(exportName), right),
]);
const exportStmt = j.exportNamedDeclaration(null, [
j.exportSpecifier.from({
local: j.identifier(exportName),
exported: j.identifier(exportName),
}),
]);
j(path).insertBefore(decl);
j(path).insertAfter(exportStmt);
j(path).remove();
}
});
// Remove original require declarations
toRemove.forEach((path) => j(path).remove());
// Remove `module.exports` from `module.exports.call()`
root.find(j.MemberExpression)
.filter((path) => {
const { node } = path;
return (
j.Identifier.check(node.object) &&
node.object.name === "module" &&
j.Identifier.check(node.property) &&
node.property.name === "exports"
);
})
.forEach((path) => {
const nextProp = path.parentPath.node.property;
if (j.Identifier.check(nextProp)) {
j(path.parentPath).replaceWith(nextProp);
}
});
root.find(j.CallExpression)
.forEach((path) => {
const { node } = path;
// Remaining dynamic require('foo')
if (
j.Identifier.check(node.callee) &&
node.callee.name === "require" &&
node.arguments.length === 1 &&
j.Literal.check(node.arguments[0])
) {
const source = node.arguments[0].value as string;
let importId;
if (inlineRequires.has(source)) {
importId = inlineRequires.get(source);
} else {
importId = getUniqueImportId("_mod");
inlineRequires.set(source, importId);
const importDecl = j.importDeclaration(
[
defaultLazy.includes(source)
? j.importDefaultSpecifier(importId)
: j.importNamespaceSpecifier(importId),
],
j.literal(getSource(source)),
);
requireDecls.push(importDecl);
}
j(path).replaceWith(importId);
}
});
// Insert import declarations at the top
if (requireDecls.length > 0) {
const program = root.get().node.program;
program.body = [...requireDecls, ...program.body];
}
// Insert destructuring replacements below imports
if (destructurings.length > 0) {
destructurings.forEach(({ node }) => {
root.get().node.program.body.splice(requireDecls.length, 0, node);
});
}
if (!hasDefaultExport && namedExportAssignments.length > 0) {
const defaultExportObject = j.objectExpression(
namedExportAssignments.map((name) =>
j.objectProperty.from({
key: j.identifier(name),
value: j.identifier(name),
shorthand: true,
})
),
);
const exportDefault = j.exportDefaultDeclaration(defaultExportObject);
root.get().node.program.body.push(exportDefault);
hasDefaultExport = true;
}
const prelude =
"// deno-lint-ignore-file\n// Copyright 2018-2025 the Deno authors. MIT license.\n\n";
return prelude + root.toSource({ quote: "single" });
};
const upgrade = Deno.args.includes("--upgrade");
// Don't run if git status is dirty
const status = (await $`git status --porcelain`.text()).trim();
if (status) {
console.error("Git status is dirty. Please commit or stash your changes.");
Deno.exit(1);
}
const tag = "v" + version;
if (upgrade) {
await $`rm -rf node`;
await $`git clone --depth 1 --sparse --branch ${tag} --single-branch https://github.com/nodejs/node.git`;
await $`git sparse-checkout add lib`.cwd("node");
}
const fromLib = new URL("./node/lib", import.meta.url).pathname;
const toLib = new URL("./polyfills", import.meta.url).pathname;
const root = new URL("../../", import.meta.url).pathname;
for (const glob of globs) {
const sourcePath = path.join(fromLib, glob);
const expand = expandGlobSync(sourcePath);
for (const entry of expand) {
if (ignore.includes(entry.name)) {
console.log(`Ignoring ${entry.name}`);
continue;
}
const sourcePath = entry.path;
const code = await Deno.readTextFile(sourcePath);
const output = transform({ path: sourcePath, source: code }, jscodeshift);
const relativePath = path.relative(fromLib, sourcePath);
const targetPath = path.join(toLib, relativePath);
const targetDir = path.dirname(targetPath);
await Deno.mkdir(targetDir, { recursive: true });
await Deno.writeTextFile(targetPath, output);
console.log(`${sourcePath} -> ${targetPath}`);
}
}
await $`rm -rf node`;
await $`./tools/format.js`.cwd(root);

View file

@ -335,6 +335,10 @@ function cloneAsUint8Array(O) {
}
}
// Using SymbolFor to make globally available. This is used by `node:stream`
// to interop with the web streams API.
const _isClosedPromise = SymbolFor("nodejs.webstream.isClosedPromise");
const _abortAlgorithm = Symbol("[[abortAlgorithm]]");
const _abortSteps = Symbol("[[AbortSteps]]");
const _autoAllocateChunkSize = Symbol("[[autoAllocateChunkSize]]");
@ -388,8 +392,6 @@ const _writer = Symbol("[[writer]]");
const _writeRequests = Symbol("[[writeRequests]]");
const _brand = webidl.brand;
const _isClosedPromise = Symbol("[[isClosedPromise]]");
function noop() {}
async function noopAsync() {}
const _defaultStartAlgorithm = noop;
@ -6904,6 +6906,45 @@ webidl.converters["async iterable<any>"] = webidl.createAsyncIterableConverter(
internals.resourceForReadableStream = resourceForReadableStream;
export default {
// Non-Public
_state,
// Exposed in global runtime scope
ByteLengthQueuingStrategy,
CountQueuingStrategy,
createProxy,
Deferred,
errorReadableStream,
getReadableStreamResourceBacking,
getWritableStreamResourceBacking,
isDetachedBuffer,
isReadableStreamDisturbed,
ReadableByteStreamController,
ReadableStream,
ReadableStreamBYOBReader,
ReadableStreamBYOBRequest,
readableStreamClose,
readableStreamCollectIntoUint8Array,
ReadableStreamDefaultController,
ReadableStreamDefaultReader,
readableStreamDisturb,
readableStreamForRid,
readableStreamForRidUnrefable,
readableStreamForRidUnrefableRef,
readableStreamForRidUnrefableUnref,
ReadableStreamPrototype,
readableStreamTee,
readableStreamThrowIfErrored,
resourceForReadableStream,
TransformStream,
TransformStreamDefaultController,
WritableStream,
writableStreamClose,
WritableStreamDefaultController,
WritableStreamDefaultWriter,
writableStreamForRid,
};
export {
_isClosedPromise,
// Non-Public

View file

@ -718,6 +718,10 @@ function revokeObjectURL(url) {
URL.createObjectURL = createObjectURL;
URL.revokeObjectURL = revokeObjectURL;
function isBlob(obj) {
return ObjectPrototypeIsPrototypeOf(BlobPrototype, obj);
}
export {
Blob,
blobFromObjectUrl,
@ -725,4 +729,5 @@ export {
File,
FilePrototype,
getParts,
isBlob,
};