Merge branch 'main' into refactor/event

This commit is contained in:
Kenta Moriuchi 2025-09-05 02:17:22 +09:00
commit 5803e21831
No known key found for this signature in database
GPG key ID: AC843C584A91BE0C
405 changed files with 40825 additions and 17808 deletions

View file

@ -78,7 +78,7 @@
"tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs" "tests/specs/run/error_syntax_empty_trailing_line/error_syntax_empty_trailing_line.mjs"
], ],
"plugins": [ "plugins": [
"https://plugins.dprint.dev/typescript-0.95.9.wasm", "https://plugins.dprint.dev/typescript-0.95.11.wasm",
"https://plugins.dprint.dev/json-0.20.0.wasm", "https://plugins.dprint.dev/json-0.20.0.wasm",
"https://plugins.dprint.dev/markdown-0.19.0.wasm", "https://plugins.dprint.dev/markdown-0.19.0.wasm",
"https://plugins.dprint.dev/toml-0.7.0.wasm", "https://plugins.dprint.dev/toml-0.7.0.wasm",

552
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -9,6 +9,7 @@ members = [
"cli/rt", "cli/rt",
"cli/snapshot", "cli/snapshot",
"ext/broadcast_channel", "ext/broadcast_channel",
"ext/bundle",
"ext/cache", "ext/cache",
"ext/canvas", "ext/canvas",
"ext/console", "ext/console",
@ -60,25 +61,25 @@ license = "MIT"
repository = "https://github.com/denoland/deno" repository = "https://github.com/denoland/deno"
[workspace.dependencies] [workspace.dependencies]
deno_ast = { version = "=0.49", features = ["transpiling"] } deno_ast = { version = "=0.50.0", features = ["transpiling"] }
deno_core = { git = "https://github.com/petamoriken/deno_core", branch = "feat/export-dispatch-exception" } deno_core = { version = "0.356.0" }
deno_cache_dir = "=0.25.0" deno_cache_dir = "=0.25.0"
deno_doc = "=0.182.0" deno_doc = "=0.183.0"
deno_error = "=0.7.0" deno_error = "=0.7.0"
deno_graph = { version = "=0.99.0", default-features = false } deno_graph = { version = "=0.100.0", default-features = false }
deno_lint = "=0.77.0" deno_lint = "=0.80.0"
deno_lockfile = "=0.31.2" deno_lockfile = "=0.31.2"
deno_media_type = { version = "=0.2.9", features = ["module_specifier"] } deno_media_type = { version = "=0.2.9", features = ["module_specifier"] }
deno_native_certs = "0.3.0" deno_native_certs = "0.3.0"
deno_npm = "=0.37.0" deno_npm = "=0.38.0"
deno_path_util = "=0.6.1" deno_path_util = "=0.6.1"
deno_semver = "=0.9.0" deno_semver = "=0.9.0"
deno_task_shell = "=0.26.0" deno_task_shell = "=0.26.0"
deno_terminal = "=0.2.2" deno_terminal = "=0.2.2"
deno_unsync = { version = "0.4.4", default-features = false } deno_unsync = { version = "0.4.4", default-features = false }
deno_whoami = "0.1.0" deno_whoami = "0.1.0"
eszip = "=0.96.0" eszip = "=0.99.0"
denokv_proto = "0.12.0" denokv_proto = "0.12.0"
denokv_remote = "0.12.0" denokv_remote = "0.12.0"
@ -87,6 +88,7 @@ denokv_sqlite = { default-features = false, version = "0.12.0" }
# exts # exts
deno_broadcast_channel = { version = "0.208.0", path = "./ext/broadcast_channel" } deno_broadcast_channel = { version = "0.208.0", path = "./ext/broadcast_channel" }
deno_bundle_runtime = { version = "0.1.0", path = "./ext/bundle" }
deno_cache = { version = "0.146.0", path = "./ext/cache" } deno_cache = { version = "0.146.0", path = "./ext/cache" }
deno_canvas = { version = "0.83.0", path = "./ext/canvas" } deno_canvas = { version = "0.83.0", path = "./ext/canvas" }
deno_console = { version = "0.214.0", path = "./ext/console" } deno_console = { version = "0.214.0", path = "./ext/console" }
@ -269,7 +271,7 @@ tower-lsp = { package = "deno_tower_lsp", version = "=0.4.3", features = ["propo
tower-service = "0.3.2" tower-service = "0.3.2"
tracing = "0.1" tracing = "0.1"
tracing-opentelemetry = "0.28.0" tracing-opentelemetry = "0.28.0"
tracing-subscriber = "0.3.19" tracing-subscriber = "0.3.20"
twox-hash = { version = "=2.1.0", features = ["std", "xxhash64"], default-features = false } twox-hash = { version = "=2.1.0", features = ["std", "xxhash64"], default-features = false }
typed-arena = "=2.0.2" typed-arena = "=2.0.2"
url = { version = "2.5", features = ["serde", "expose_internals"] } url = { version = "2.5", features = ["serde", "expose_internals"] }
@ -309,7 +311,7 @@ dprint-core = "=0.67.4"
dprint-plugin-json = "=0.20.0" dprint-plugin-json = "=0.20.0"
dprint-plugin-jupyter = "=0.2.0" dprint-plugin-jupyter = "=0.2.0"
dprint-plugin-markdown = "=0.19.0" dprint-plugin-markdown = "=0.19.0"
dprint-plugin-typescript = "=0.95.10" dprint-plugin-typescript = "=0.95.11"
env_logger = "=0.11.6" env_logger = "=0.11.6"
fancy-regex = "=0.14.0" fancy-regex = "=0.14.0"
libsui = "0.10.0" libsui = "0.10.0"

View file

@ -67,6 +67,7 @@ winres.workspace = true
[dependencies] [dependencies]
deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit", "utils"] } deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit", "utils"] }
deno_bundle_runtime.workspace = true
deno_cache_dir = { workspace = true, features = ["sync"] } deno_cache_dir = { workspace = true, features = ["sync"] }
deno_config = { workspace = true, features = ["sync", "workspace"] } deno_config = { workspace = true, features = ["sync", "workspace"] }
deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] }

View file

@ -23,6 +23,10 @@ use clap::builder::styling::AnsiColor;
use clap::error::ErrorKind; use clap::error::ErrorKind;
use clap::value_parser; use clap::value_parser;
use color_print::cstr; use color_print::cstr;
use deno_bundle_runtime::BundleFormat;
use deno_bundle_runtime::BundlePlatform;
use deno_bundle_runtime::PackageHandling;
use deno_bundle_runtime::SourceMapType;
use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::NodeModulesDirMode;
use deno_config::glob::FilePatterns; use deno_config::glob::FilePatterns;
use deno_config::glob::PathOrPatternSet; use deno_config::glob::PathOrPatternSet;
@ -339,6 +343,7 @@ pub struct RunFlags {
pub watch: Option<WatchFlagsWithPaths>, pub watch: Option<WatchFlagsWithPaths>,
pub bare: bool, pub bare: bool,
pub coverage_dir: Option<String>, pub coverage_dir: Option<String>,
pub print_task_list: bool,
} }
impl RunFlags { impl RunFlags {
@ -349,6 +354,7 @@ impl RunFlags {
watch: None, watch: None,
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
} }
} }
@ -488,61 +494,6 @@ pub struct BundleFlags {
pub watch: bool, pub watch: bool,
} }
#[derive(Clone, Debug, Eq, PartialEq, Copy)]
pub enum BundlePlatform {
Browser,
Deno,
}
#[derive(Clone, Debug, Eq, PartialEq, Copy)]
pub enum BundleFormat {
Esm,
Cjs,
Iife,
}
#[derive(Clone, Debug, Eq, PartialEq, Copy)]
pub enum SourceMapType {
Linked,
Inline,
External,
}
impl std::fmt::Display for BundleFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BundleFormat::Esm => write!(f, "esm"),
BundleFormat::Cjs => write!(f, "cjs"),
BundleFormat::Iife => write!(f, "iife"),
}
}
}
impl std::fmt::Display for SourceMapType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SourceMapType::Linked => write!(f, "linked"),
SourceMapType::Inline => write!(f, "inline"),
SourceMapType::External => write!(f, "external"),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Copy)]
pub enum PackageHandling {
Bundle,
External,
}
impl std::fmt::Display for PackageHandling {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PackageHandling::Bundle => write!(f, "bundle"),
PackageHandling::External => write!(f, "external"),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
pub enum DenoSubcommand { pub enum DenoSubcommand {
Add(AddFlags), Add(AddFlags),
@ -752,6 +703,8 @@ pub struct InternalFlags {
/// Used when the language server is configured with an /// Used when the language server is configured with an
/// explicit cache option. /// explicit cache option.
pub cache_path: Option<PathBuf>, pub cache_path: Option<PathBuf>,
/// Override the path to use for the node_modules directory.
pub root_node_modules_dir_override: Option<PathBuf>,
/// Only reads to the lockfile instead of writing to it. /// Only reads to the lockfile instead of writing to it.
pub lockfile_skip_write: bool, pub lockfile_skip_write: bool,
} }
@ -791,13 +744,14 @@ pub struct Flags {
pub no_npm: bool, pub no_npm: bool,
pub reload: bool, pub reload: bool,
pub seed: Option<u64>, pub seed: Option<u64>,
pub strace_ops: Option<Vec<String>>, pub trace_ops: Option<Vec<String>>,
pub unstable_config: UnstableConfig, pub unstable_config: UnstableConfig,
pub unsafely_ignore_certificate_errors: Option<Vec<String>>, pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
pub v8_flags: Vec<String>, pub v8_flags: Vec<String>,
pub code_cache_enabled: bool, pub code_cache_enabled: bool,
pub permissions: PermissionFlags, pub permissions: PermissionFlags,
pub allow_scripts: PackagesAllowedScripts, pub allow_scripts: PackagesAllowedScripts,
pub permission_set: Option<String>,
pub eszip: bool, pub eszip: bool,
pub node_conditions: Vec<String>, pub node_conditions: Vec<String>,
pub preload: Vec<String>, pub preload: Vec<String>,
@ -1164,6 +1118,30 @@ impl Flags {
} }
} }
fn resolve_single_folder_path(
arg: &str,
current_dir: &Path,
maybe_resolve_directory: impl FnOnce(PathBuf) -> Option<PathBuf>,
) -> Option<PathBuf> {
if let Ok(module_specifier) = resolve_url_or_path(arg, current_dir) {
if module_specifier.scheme() == "file"
|| module_specifier.scheme() == "npm"
{
if let Ok(p) = url_to_file_path(&module_specifier) {
maybe_resolve_directory(p)
} else {
Some(current_dir.to_path_buf())
}
} else {
// When the entrypoint doesn't have file: scheme (it's the remote
// script), then we don't auto discover the config file.
None
}
} else {
Some(current_dir.to_path_buf())
}
}
use DenoSubcommand::*; use DenoSubcommand::*;
match &self.subcommand { match &self.subcommand {
Fmt(FmtFlags { files, .. }) => { Fmt(FmtFlags { files, .. }) => {
@ -1176,25 +1154,10 @@ impl Flags {
| Compile(CompileFlags { | Compile(CompileFlags {
source_file: script, source_file: script,
.. ..
}) => { }) => resolve_single_folder_path(script, current_dir, |mut p| {
if let Ok(module_specifier) = resolve_url_or_path(script, current_dir) { if p.pop() { Some(p) } else { None }
if module_specifier.scheme() == "file" })
|| module_specifier.scheme() == "npm" .map(|p| vec![p]),
{
if let Ok(p) = url_to_file_path(&module_specifier) {
p.parent().map(|parent| vec![parent.to_path_buf()])
} else {
Some(vec![current_dir.to_path_buf()])
}
} else {
// When the entrypoint doesn't have file: scheme (it's the remote
// script), then we don't auto discover config file.
None
}
} else {
Some(vec![current_dir.to_path_buf()])
}
}
Task(TaskFlags { Task(TaskFlags {
cwd: Some(path), .. cwd: Some(path), ..
}) => { }) => {
@ -1206,6 +1169,23 @@ impl Flags {
Err(_) => Some(vec![current_dir.to_path_buf()]), Err(_) => Some(vec![current_dir.to_path_buf()]),
} }
} }
Cache(CacheFlags { files, .. })
| Install(InstallFlags::Local(InstallFlagsLocal::Entrypoints(files))) => {
Some(vec![
files
.iter()
.filter_map(|file| {
resolve_single_folder_path(file, current_dir, |mut p| {
if p.is_dir() {
return Some(p);
}
if p.pop() { Some(p) } else { None }
})
})
.next()
.unwrap_or_else(|| current_dir.to_path_buf()),
])
}
_ => Some(vec![current_dir.to_path_buf()]), _ => Some(vec![current_dir.to_path_buf()]),
} }
} }
@ -3877,8 +3857,9 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
.after_help(cstr!(r#"<y>Permission options:</> .after_help(cstr!(r#"<y>Permission options:</>
<y>Docs</>: <c>https://docs.deno.com/go/permissions</> <y>Docs</>: <c>https://docs.deno.com/go/permissions</>
<g>-A, --allow-all</> Allow all permissions. <g>-A, --allow-all</> Allow all permissions.
<g>--no-prompt</> Always throw if required permission wasn't passed. <g>-P, --permission-set[=<<NAME>]</> Loads the permission set from the config file.
<g>--no-prompt</> Always throw if required permission wasn't passed.
<p(245)>Can also be set via the DENO_NO_PROMPT environment variable.</> <p(245)>Can also be set via the DENO_NO_PROMPT environment variable.</>
<g>-R, --allow-read[=<<PATH>...]</> Allow file system read access. Optionally specify allowed paths. <g>-R, --allow-read[=<<PATH>...]</> Allow file system read access. Optionally specify allowed paths.
<p(245)>--allow-read | --allow-read="/etc,/var/log.txt"</> <p(245)>--allow-read | --allow-read="/etc,/var/log.txt"</>
@ -3913,7 +3894,7 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
<p(245)>--deny-ffi | --deny-ffi="./libfoo.so"</> <p(245)>--deny-ffi | --deny-ffi="./libfoo.so"</>
<g>--deny-import[=<<IP_OR_HOSTNAME>...]</> Deny importing from remote hosts. Optionally specify denied IP addresses and host names, with ports as necessary. <g>--deny-import[=<<IP_OR_HOSTNAME>...]</> Deny importing from remote hosts. Optionally specify denied IP addresses and host names, with ports as necessary.
<p(245)>--deny-import | --deny-import="example.com:443,github.com:443"</> <p(245)>--deny-import | --deny-import="example.com:443,github.com:443"</>
<g>DENO_TRACE_PERMISSIONS</> Environmental variable to enable stack traces in permission prompts. <g>DENO_TRACE_PERMISSIONS</> Environmental variable to enable stack traces in permission prompts.
<p(245)>DENO_TRACE_PERMISSIONS=1 deno run main.ts</> <p(245)>DENO_TRACE_PERMISSIONS=1 deno run main.ts</>
"#)) "#))
.arg( .arg(
@ -3925,6 +3906,21 @@ fn permission_args(app: Command, requires: Option<&'static str>) -> Command {
arg arg
} }
) )
.arg(
{
let mut arg = Arg::new("permission-set")
.long("permission-set")
.action(ArgAction::Set)
.num_args(0..=1)
.require_equals(true)
.default_missing_value("")
.short('P');
if let Some(requires) = requires {
arg = arg.requires(requires);
}
arg
}
)
.arg( .arg(
{ {
let mut arg = Arg::new("allow-read") let mut arg = Arg::new("allow-read")
@ -4264,6 +4260,7 @@ fn allow_all_arg() -> Arg {
.conflicts_with("allow-sys") .conflicts_with("allow-sys")
.conflicts_with("allow-ffi") .conflicts_with("allow-ffi")
.conflicts_with("allow-import") .conflicts_with("allow-import")
.conflicts_with("permission-set")
.action(ArgAction::SetTrue) .action(ArgAction::SetTrue)
.help("Allow all permissions") .help("Allow all permissions")
} }
@ -4300,7 +4297,7 @@ fn runtime_misc_args(app: Command) -> Command {
.arg(v8_flags_arg()) .arg(v8_flags_arg())
.arg(seed_arg()) .arg(seed_arg())
.arg(enable_testing_features_arg()) .arg(enable_testing_features_arg())
.arg(strace_ops_arg()) .arg(trace_ops_arg())
.arg(eszip_arg()) .arg(eszip_arg())
} }
@ -4434,7 +4431,6 @@ fn preload_arg() -> Arg {
.long("preload") .long("preload")
.alias("import") .alias("import")
.value_name("FILE") .value_name("FILE")
.use_value_delimiter(true)
.action(ArgAction::Append) .action(ArgAction::Append)
.help("A list of files that will be executed before the main module") .help("A list of files that will be executed before the main module")
.value_hint(ValueHint::FilePath) .value_hint(ValueHint::FilePath)
@ -4498,9 +4494,9 @@ fn enable_testing_features_arg() -> Arg {
.hide(true) .hide(true)
} }
fn strace_ops_arg() -> Arg { fn trace_ops_arg() -> Arg {
Arg::new("strace-ops") Arg::new("trace-ops")
.long("strace-ops") .long("trace-ops")
.num_args(0..) .num_args(0..)
.use_value_delimiter(true) .use_value_delimiter(true)
.require_equals(true) .require_equals(true)
@ -5712,7 +5708,7 @@ fn repl_parse(
seed_arg_parse(flags, matches); seed_arg_parse(flags, matches);
enable_testing_features_arg_parse(flags, matches); enable_testing_features_arg_parse(flags, matches);
env_file_arg_parse(flags, matches); env_file_arg_parse(flags, matches);
strace_ops_parse(flags, matches); trace_ops_parse(flags, matches);
let eval_files = matches let eval_files = matches
.remove_many::<String>("eval-file") .remove_many::<String>("eval-file")
@ -5744,7 +5740,7 @@ fn repl_parse(
fn run_parse( fn run_parse(
flags: &mut Flags, flags: &mut Flags,
matches: &mut ArgMatches, matches: &mut ArgMatches,
mut app: Command, app: Command,
bare: bool, bare: bool,
) -> clap::error::Result<()> { ) -> clap::error::Result<()> {
runtime_args_parse(flags, matches, true, true, true)?; runtime_args_parse(flags, matches, true, true, true)?;
@ -5763,6 +5759,7 @@ fn run_parse(
watch: watch_arg_parse_with_paths(matches)?, watch: watch_arg_parse_with_paths(matches)?,
bare, bare,
coverage_dir, coverage_dir,
print_task_list: false,
}); });
} }
_ => { _ => {
@ -5772,10 +5769,14 @@ fn run_parse(
"[SCRIPT_ARG] may only be omitted with --v8-flags=--help, else to use the repl with arguments, please use the `deno repl` subcommand", "[SCRIPT_ARG] may only be omitted with --v8-flags=--help, else to use the repl with arguments, please use the `deno repl` subcommand",
)); ));
} else { } else {
return Err(app.find_subcommand_mut("run").unwrap().error( // When no script argument is provided, show available tasks like `deno task`
clap::error::ErrorKind::MissingRequiredArgument, flags.subcommand = DenoSubcommand::Run(RunFlags {
"[SCRIPT_ARG] may only be omitted with --v8-flags=--help", script: "".to_string(),
)); watch: None,
bare: false,
coverage_dir: None,
print_task_list: true,
});
} }
} }
} }
@ -6142,6 +6143,9 @@ fn permission_args_parse(
flags: &mut Flags, flags: &mut Flags,
matches: &mut ArgMatches, matches: &mut ArgMatches,
) -> clap::error::Result<()> { ) -> clap::error::Result<()> {
if let Some(set) = matches.remove_one::<String>("permission-set") {
flags.permission_set = Some(set);
}
if let Some(read_wl) = matches.remove_many::<String>("allow-read") { if let Some(read_wl) = matches.remove_many::<String>("allow-read") {
let read_wl = read_wl let read_wl = read_wl
.flat_map(flat_escape_split_commas) .flat_map(flat_escape_split_commas)
@ -6301,7 +6305,7 @@ fn runtime_args_parse(
seed_arg_parse(flags, matches); seed_arg_parse(flags, matches);
enable_testing_features_arg_parse(flags, matches); enable_testing_features_arg_parse(flags, matches);
env_file_arg_parse(flags, matches); env_file_arg_parse(flags, matches);
strace_ops_parse(flags, matches); trace_ops_parse(flags, matches);
eszip_arg_parse(flags, matches); eszip_arg_parse(flags, matches);
Ok(()) Ok(())
} }
@ -6368,9 +6372,9 @@ fn enable_testing_features_arg_parse(
} }
} }
fn strace_ops_parse(flags: &mut Flags, matches: &mut ArgMatches) { fn trace_ops_parse(flags: &mut Flags, matches: &mut ArgMatches) {
if let Some(patterns) = matches.remove_many::<String>("strace-ops") { if let Some(patterns) = matches.remove_many::<String>("trace-ops") {
flags.strace_ops = Some(patterns.collect()); flags.trace_ops = Some(patterns.collect());
} }
} }
@ -6742,6 +6746,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6768,6 +6773,7 @@ mod tests {
}), }),
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6795,6 +6801,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6822,6 +6829,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6849,6 +6857,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6877,6 +6886,7 @@ mod tests {
}), }),
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6908,6 +6918,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6938,6 +6949,7 @@ mod tests {
}), }),
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6965,6 +6977,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -6993,6 +7006,7 @@ mod tests {
}), }),
bare: false, bare: false,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -7020,6 +7034,7 @@ mod tests {
}), }),
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -7059,6 +7074,7 @@ mod tests {
watch: None, watch: None,
bare: false, bare: false,
coverage_dir: Some("foo".to_string()), coverage_dir: Some("foo".to_string()),
print_task_list: false,
}), }),
code_cache_enabled: true, code_cache_enabled: true,
..Flags::default() ..Flags::default()
@ -7098,7 +7114,7 @@ mod tests {
); );
let r = flags_from_vec(svec!["deno", "run", "--v8-flags=--expose-gc"]); let r = flags_from_vec(svec!["deno", "run", "--v8-flags=--expose-gc"]);
assert!(r.is_err()); assert!(r.is_ok());
} }
#[test] #[test]
@ -7375,6 +7391,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
permissions: PermissionFlags { permissions: PermissionFlags {
deny_read: Some(vec![]), deny_read: Some(vec![]),
@ -8427,14 +8444,13 @@ mod tests {
} }
#[test] #[test]
fn repl_strace_ops() { fn repl_trace_ops() {
// Lightly test this undocumented flag // Lightly test this undocumented flag
let r = flags_from_vec(svec!["deno", "repl", "--strace-ops"]); let r = flags_from_vec(svec!["deno", "repl", "--trace-ops"]);
assert_eq!(r.unwrap().strace_ops, Some(vec![])); assert_eq!(r.unwrap().trace_ops, Some(vec![]));
let r = let r = flags_from_vec(svec!["deno", "repl", "--trace-ops=http,websocket"]);
flags_from_vec(svec!["deno", "repl", "--strace-ops=http,websocket"]);
assert_eq!( assert_eq!(
r.unwrap().strace_ops, r.unwrap().trace_ops,
Some(vec!["http".to_string(), "websocket".to_string()]) Some(vec!["http".to_string(), "websocket".to_string()])
); );
} }
@ -8666,6 +8682,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
permissions: PermissionFlags { permissions: PermissionFlags {
deny_net: Some(svec!["127.0.0.1"]), deny_net: Some(svec!["127.0.0.1"]),
@ -8854,6 +8871,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
permissions: PermissionFlags { permissions: PermissionFlags {
deny_sys: Some(svec!["hostname"]), deny_sys: Some(svec!["hostname"]),
@ -9154,6 +9172,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
..Flags::default() ..Flags::default()
} }
@ -9465,6 +9484,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
log_level: Some(Level::Error), log_level: Some(Level::Error),
code_cache_enabled: true, code_cache_enabled: true,
@ -9586,6 +9606,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
type_check_mode: TypeCheckMode::None, type_check_mode: TypeCheckMode::None,
code_cache_enabled: true, code_cache_enabled: true,
@ -9758,6 +9779,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
node_modules_dir: Some(NodeModulesDirMode::Auto), node_modules_dir: Some(NodeModulesDirMode::Auto),
code_cache_enabled: true, code_cache_enabled: true,
@ -10982,6 +11004,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
inspect_wait: Some("127.0.0.1:9229".parse().unwrap()), inspect_wait: Some("127.0.0.1:9229".parse().unwrap()),
code_cache_enabled: true, code_cache_enabled: true,
@ -11188,6 +11211,16 @@ mod tests {
let flags = flags_from_vec(svec!["deno", "lint"]).unwrap(); let flags = flags_from_vec(svec!["deno", "lint"]).unwrap();
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()])); assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
let flags = flags_from_vec(svec!["deno", "cache", "sub/test.js"]).unwrap();
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.join("sub")]));
let flags = flags_from_vec(svec!["deno", "cache", "."]).unwrap();
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.clone()]));
let flags =
flags_from_vec(svec!["deno", "install", "-e", "sub/test.js"]).unwrap();
assert_eq!(flags.config_path_args(&cwd), Some(vec![cwd.join("sub")]));
let flags = flags_from_vec(svec![ let flags = flags_from_vec(svec![
"deno", "deno",
"fmt", "fmt",
@ -11676,6 +11709,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
type_check_mode: TypeCheckMode::None, type_check_mode: TypeCheckMode::None,
code_cache_enabled: true, code_cache_enabled: true,
@ -12230,6 +12264,7 @@ mod tests {
watch: None, watch: None,
bare: true, bare: true,
coverage_dir: None, coverage_dir: None,
print_task_list: false,
}), }),
config_flag: ConfigFlag::Disabled, config_flag: ConfigFlag::Disabled,
code_cache_enabled: true, code_cache_enabled: true,
@ -12846,14 +12881,9 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
} }
); );
let flags = flags_from_vec(svec![ let flags =
"deno", flags_from_vec(svec!["deno", "run", "--preload", "data:,()", "main.ts"])
"run", .unwrap();
"--preload",
"p1.js,./p2.js",
"main.ts"
])
.unwrap();
assert_eq!( assert_eq!(
flags, flags,
Flags { Flags {
@ -12861,7 +12891,7 @@ Usage: deno repl [OPTIONS] [-- [ARGS]...]\n"
script: "main.ts".into(), script: "main.ts".into(),
..Default::default() ..Default::default()
}), }),
preload: svec!["p1.js", "./p2.js"], preload: svec!["data:,()"],
code_cache_enabled: true, code_cache_enabled: true,
..Default::default() ..Default::default()
} }

View file

@ -22,6 +22,8 @@ use deno_config::deno_json::FmtConfig;
pub use deno_config::deno_json::FmtOptionsConfig; pub use deno_config::deno_json::FmtOptionsConfig;
pub use deno_config::deno_json::LintRulesConfig; pub use deno_config::deno_json::LintRulesConfig;
use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::NodeModulesDirMode;
use deno_config::deno_json::PermissionConfigValue;
use deno_config::deno_json::PermissionsObjectWithBase;
pub use deno_config::deno_json::ProseWrap; pub use deno_config::deno_json::ProseWrap;
use deno_config::deno_json::TestConfig; use deno_config::deno_json::TestConfig;
pub use deno_config::glob::FilePatterns; pub use deno_config::glob::FilePatterns;
@ -29,6 +31,7 @@ pub use deno_config::workspace::TsTypeLib;
use deno_config::workspace::Workspace; use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirLintConfig; use deno_config::workspace::WorkspaceDirLintConfig;
use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDirectoryRc;
use deno_config::workspace::WorkspaceLintConfig; use deno_config::workspace::WorkspaceLintConfig;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -45,13 +48,14 @@ use deno_npm_installer::LifecycleScriptsConfig;
use deno_npm_installer::graph::NpmCachingStrategy; use deno_npm_installer::graph::NpmCachingStrategy;
use deno_path_util::resolve_url_or_path; use deno_path_util::resolve_url_or_path;
use deno_resolver::factory::resolve_jsr_url; use deno_resolver::factory::resolve_jsr_url;
use deno_runtime::deno_permissions::AllowRunDescriptor;
use deno_runtime::deno_permissions::PathDescriptor;
use deno_runtime::deno_permissions::PermissionsOptions; use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::inspector_server::InspectorServer; use deno_runtime::inspector_server::InspectorServer;
use deno_semver::StackString; use deno_semver::StackString;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use deno_telemetry::OtelConfig; use deno_telemetry::OtelConfig;
use deno_terminal::colors; use deno_terminal::colors;
use dotenvy::from_filename;
pub use flags::*; pub use flags::*;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use thiserror::Error; use thiserror::Error;
@ -823,7 +827,7 @@ impl CliOptions {
pub fn resolve_fmt_options_for_members( pub fn resolve_fmt_options_for_members(
&self, &self,
fmt_flags: &FmtFlags, fmt_flags: &FmtFlags,
) -> Result<Vec<(WorkspaceDirectory, FmtOptions)>, AnyError> { ) -> Result<Vec<(WorkspaceDirectoryRc, FmtOptions)>, AnyError> {
let cli_arg_patterns = let cli_arg_patterns =
fmt_flags.files.as_file_patterns(self.initial_cwd())?; fmt_flags.files.as_file_patterns(self.initial_cwd())?;
let member_configs = self let member_configs = self
@ -857,7 +861,7 @@ impl CliOptions {
pub fn resolve_lint_options_for_members( pub fn resolve_lint_options_for_members(
&self, &self,
lint_flags: &LintFlags, lint_flags: &LintFlags,
) -> Result<Vec<(WorkspaceDirectory, LintOptions)>, AnyError> { ) -> Result<Vec<(WorkspaceDirectoryRc, LintOptions)>, AnyError> {
let cli_arg_patterns = let cli_arg_patterns =
lint_flags.files.as_file_patterns(self.initial_cwd())?; lint_flags.files.as_file_patterns(self.initial_cwd())?;
let member_configs = self let member_configs = self
@ -881,7 +885,7 @@ impl CliOptions {
pub fn resolve_test_options_for_members( pub fn resolve_test_options_for_members(
&self, &self,
test_flags: &TestFlags, test_flags: &TestFlags,
) -> Result<Vec<(WorkspaceDirectory, TestOptions)>, AnyError> { ) -> Result<Vec<(WorkspaceDirectoryRc, TestOptions)>, AnyError> {
let cli_arg_patterns = let cli_arg_patterns =
test_flags.files.as_file_patterns(self.initial_cwd())?; test_flags.files.as_file_patterns(self.initial_cwd())?;
let workspace_dir_configs = self let workspace_dir_configs = self
@ -905,7 +909,7 @@ impl CliOptions {
pub fn resolve_bench_options_for_members( pub fn resolve_bench_options_for_members(
&self, &self,
bench_flags: &BenchFlags, bench_flags: &BenchFlags,
) -> Result<Vec<(WorkspaceDirectory, BenchOptions)>, AnyError> { ) -> Result<Vec<(WorkspaceDirectoryRc, BenchOptions)>, AnyError> {
let cli_arg_patterns = let cli_arg_patterns =
bench_flags.files.as_file_patterns(self.initial_cwd())?; bench_flags.files.as_file_patterns(self.initial_cwd())?;
let workspace_dir_configs = self let workspace_dir_configs = self
@ -1018,54 +1022,84 @@ impl CliOptions {
self.flags.no_remote self.flags.no_remote
} }
pub fn permissions_options(&self) -> PermissionsOptions { pub fn permissions_options(&self) -> Result<PermissionsOptions, AnyError> {
// bury this in here to ensure people use cli_options.permissions_options() self.permissions_options_for_dir(&self.start_dir)
fn flags_to_options(flags: &PermissionFlags) -> PermissionsOptions { }
fn handle_allow<T: Default>(
allow_all: bool, pub fn permissions_options_for_dir(
value: Option<T>, &self,
) -> Option<T> { dir: &WorkspaceDirectory,
if allow_all { ) -> Result<PermissionsOptions, AnyError> {
assert!(value.is_none()); let config_permissions = self.resolve_config_permissions_for_dir(dir)?;
Some(T::default()) let mut permissions_options = flags_to_permissions_options(
} else { &self.flags.permissions,
value config_permissions,
)?;
self.augment_import_permissions(&mut permissions_options);
Ok(permissions_options)
}
fn resolve_config_permissions_for_dir<'a>(
&self,
dir: &'a WorkspaceDirectory,
) -> Result<Option<&'a PermissionsObjectWithBase>, AnyError> {
let config_permissions = if let Some(name) = &self.flags.permission_set {
if name.is_empty() {
let maybe_subcommand_permissions = match &self.flags.subcommand {
DenoSubcommand::Bench(_) => dir.to_bench_permissions_config()?,
DenoSubcommand::Compile(_) => dir.to_compile_permissions_config()?,
DenoSubcommand::Test(_) => dir.to_test_permissions_config()?,
_ => None,
};
match maybe_subcommand_permissions {
Some(permissions) => Some(permissions),
// do not error when the default set doesn't exist in order
// to allow providing `-P` unconditionally
None => dir.to_permissions_config()?.sets.get("default"),
}
} else {
Some(dir.to_permissions_config()?.get(name)?)
}
} else {
if !self.flags.has_permission() {
let set_config_permission_name = match &self.flags.subcommand {
DenoSubcommand::Bench(_) => dir
.to_bench_permissions_config()?
.is_some()
.then_some("Bench"),
DenoSubcommand::Compile(_) => dir
.to_compile_permissions_config()?
.is_some()
.then_some("Compile"),
DenoSubcommand::Test(_) => dir
.to_test_permissions_config()?
.is_some()
.then_some("Test"),
_ => None,
};
if let Some(name) = set_config_permission_name {
// prevent people from wasting time wondering why benches/tests are failing
bail!(
"{} permissions were found in the config file. Did you mean to run with `-P` or a permission flag?{}",
name,
dir
.maybe_deno_json()
.map(|d| format!("\n at {}", d.specifier))
.unwrap_or_default()
);
} }
} }
PermissionsOptions { None
allow_all: flags.allow_all, };
allow_env: handle_allow(flags.allow_all, flags.allow_env.clone()), Ok(config_permissions)
deny_env: flags.deny_env.clone(),
allow_net: handle_allow(flags.allow_all, flags.allow_net.clone()),
deny_net: flags.deny_net.clone(),
allow_ffi: handle_allow(flags.allow_all, flags.allow_ffi.clone()),
deny_ffi: flags.deny_ffi.clone(),
allow_read: handle_allow(flags.allow_all, flags.allow_read.clone()),
deny_read: flags.deny_read.clone(),
allow_run: handle_allow(flags.allow_all, flags.allow_run.clone()),
deny_run: flags.deny_run.clone(),
allow_sys: handle_allow(flags.allow_all, flags.allow_sys.clone()),
deny_sys: flags.deny_sys.clone(),
allow_write: handle_allow(flags.allow_all, flags.allow_write.clone()),
deny_write: flags.deny_write.clone(),
allow_import: handle_allow(flags.allow_all, flags.allow_import.clone()),
deny_import: flags.deny_import.clone(),
prompt: !resolve_no_prompt(flags),
}
}
let mut permissions_options = flags_to_options(&self.flags.permissions);
self.augment_import_permissions(&mut permissions_options);
permissions_options
} }
fn augment_import_permissions(&self, options: &mut PermissionsOptions) { fn augment_import_permissions(&self, options: &mut PermissionsOptions) {
// do not add if the user specified --allow-all or --allow-import // do not add if the user specified --allow-all or --allow-import
if !options.allow_all && options.allow_import.is_none() { if options.allow_import.is_none() {
options.allow_import = Some(self.implicit_allow_import()); options.allow_import = Some(self.implicit_allow_import());
} }
options.deny_import = options.deny_import.clone();
} }
fn implicit_allow_import(&self) -> Vec<String> { fn implicit_allow_import(&self) -> Vec<String> {
@ -1151,8 +1185,8 @@ impl CliOptions {
&self.flags.subcommand &self.flags.subcommand
} }
pub fn strace_ops(&self) -> &Option<Vec<String>> { pub fn trace_ops(&self) -> &Option<Vec<String>> {
&self.flags.strace_ops &self.flags.trace_ops
} }
pub fn take_binary_npm_command_name(&self) -> Option<String> { pub fn take_binary_npm_command_name(&self) -> Option<String> {
@ -1279,6 +1313,15 @@ impl CliOptions {
full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path))); full_paths.extend(paths.iter().map(|path| self.initial_cwd.join(path)));
} }
if let Some(env_file_names) = &self.flags.env_file {
// Only watch the exact environment files specified
full_paths.extend(
env_file_names
.iter()
.map(|name| self.initial_cwd.join(name)),
);
}
if let Ok(Some(import_map_path)) = self if let Ok(Some(import_map_path)) = self
.resolve_specified_import_map_specifier() .resolve_specified_import_map_specifier()
.map(|ms| ms.and_then(|ref s| s.to_file_path().ok())) .map(|ms| ms.and_then(|ref s| s.to_file_path().ok()))
@ -1297,6 +1340,7 @@ impl CliOptions {
full_paths.push(pkg_json.path.clone()); full_paths.push(pkg_json.path.clone());
} }
} }
full_paths full_paths
} }
@ -1421,53 +1465,6 @@ pub fn config_to_deno_graph_workspace_member(
}) })
} }
pub fn load_env_variables_from_env_file(
filename: Option<&Vec<String>>,
flags_log_level: Option<log::Level>,
) {
let Some(env_file_names) = filename else {
return;
};
for env_file_name in env_file_names.iter().rev() {
match from_filename(env_file_name) {
Ok(_) => (),
Err(error) => {
#[allow(clippy::print_stderr)]
if flags_log_level
.map(|l| l >= log::Level::Info)
.unwrap_or(true)
{
match error {
dotenvy::Error::LineParse(line, index) => eprintln!(
"{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",
colors::yellow("Warning"),
env_file_name,
index,
line
),
dotenvy::Error::Io(_) => eprintln!(
"{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",
colors::yellow("Warning"),
env_file_name
),
dotenvy::Error::EnvVar(_) => eprintln!(
"{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",
colors::yellow("Warning"),
env_file_name
),
_ => eprintln!(
"{} Unknown failure occurred with the specified environment file: {}",
colors::yellow("Warning"),
env_file_name
),
}
}
}
}
}
}
pub fn get_default_v8_flags() -> Vec<String> { pub fn get_default_v8_flags() -> Vec<String> {
vec![ vec![
"--stack-size=1024".to_string(), "--stack-size=1024".to_string(),
@ -1507,8 +1504,213 @@ fn allow_import_host_from_url(url: &Url) -> Option<String> {
} }
} }
// DO NOT make this public. People should use `cli_options.permissions_options/permissions_options_for_dir`
fn flags_to_permissions_options(
flags: &PermissionFlags,
mut config: Option<&PermissionsObjectWithBase>,
) -> Result<PermissionsOptions, AnyError> {
fn handle_allow(
allow_all_flag: bool,
allow_all_config: Option<bool>,
value: Option<&Vec<String>>,
config: Option<&PermissionConfigValue>,
parse_config_value: &impl Fn(&str) -> String,
) -> Option<Vec<String>> {
if allow_all_flag {
Some(vec![])
} else if let Some(value) = value {
Some(value.clone())
} else if let Some(config) = config {
match config {
PermissionConfigValue::All => Some(vec![]),
PermissionConfigValue::Some(items) => {
if items.is_empty() {
None
} else {
Some(
items
.iter()
.map(|value| parse_config_value(value))
.collect(),
)
}
}
PermissionConfigValue::None => None,
}
} else if allow_all_config == Some(true) {
Some(vec![])
} else {
None
}
}
fn handle_deny(
value: Option<&Vec<String>>,
config: Option<&PermissionConfigValue>,
parse_config_value: &impl Fn(&str) -> String,
) -> Option<Vec<String>> {
if let Some(value) = value {
Some(value.clone())
} else if let Some(config) = config {
match config {
PermissionConfigValue::All => Some(vec![]),
PermissionConfigValue::Some(items) => Some(
items
.iter()
.map(|value| parse_config_value(value))
.collect(),
),
PermissionConfigValue::None => None,
}
} else {
None
}
}
if flags.allow_all {
config = None;
}
let config_dir = match &config {
Some(config) => {
let mut path = deno_path_util::url_to_file_path(&config.base)?;
path.pop();
Some(path)
}
None => None,
};
let make_fs_config_value_absolute = |value: &str| match &config_dir {
Some(dir_path) => {
PathDescriptor::new_known_cwd(Cow::Borrowed(Path::new(value)), dir_path)
.into_path_buf()
.into_os_string()
.into_string()
.unwrap()
}
None => value.to_string(),
};
let make_run_config_value_absolute = |value: &str| match &config_dir {
Some(dir_path) => {
if AllowRunDescriptor::is_path(value) {
PathDescriptor::new_known_cwd(Cow::Borrowed(Path::new(value)), dir_path)
.into_path_buf()
.into_os_string()
.into_string()
.unwrap()
} else {
value.to_string()
}
}
None => value.to_string(),
};
let no_op = |value: &str| value.to_string();
Ok(PermissionsOptions {
allow_env: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_env.as_ref(),
config.and_then(|c| c.permissions.env.allow.as_ref()),
&no_op,
),
deny_env: handle_deny(
flags.deny_env.as_ref(),
config.and_then(|c| c.permissions.env.deny.as_ref()),
&no_op,
),
allow_net: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_net.as_ref(),
config.and_then(|c| c.permissions.net.allow.as_ref()),
&no_op,
),
deny_net: handle_deny(
flags.deny_net.as_ref(),
config.and_then(|c| c.permissions.net.deny.as_ref()),
&no_op,
),
allow_ffi: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_ffi.as_ref(),
config.and_then(|c| c.permissions.ffi.allow.as_ref()),
&make_fs_config_value_absolute,
),
deny_ffi: handle_deny(
flags.deny_ffi.as_ref(),
config.and_then(|c| c.permissions.ffi.deny.as_ref()),
&make_fs_config_value_absolute,
),
allow_read: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_read.as_ref(),
config.and_then(|c| c.permissions.read.allow.as_ref()),
&make_fs_config_value_absolute,
),
deny_read: handle_deny(
flags.deny_read.as_ref(),
config.and_then(|c| c.permissions.read.deny.as_ref()),
&make_fs_config_value_absolute,
),
allow_run: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_run.as_ref(),
config.and_then(|c| c.permissions.run.allow.as_ref()),
&make_run_config_value_absolute,
),
deny_run: handle_deny(
flags.deny_run.as_ref(),
config.and_then(|c| c.permissions.run.deny.as_ref()),
&make_run_config_value_absolute,
),
allow_sys: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_sys.as_ref(),
config.and_then(|c| c.permissions.sys.allow.as_ref()),
&no_op,
),
deny_sys: handle_deny(
flags.deny_sys.as_ref(),
config.and_then(|c| c.permissions.sys.deny.as_ref()),
&no_op,
),
allow_write: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_write.as_ref(),
config.and_then(|c| c.permissions.write.allow.as_ref()),
&make_fs_config_value_absolute,
),
deny_write: handle_deny(
flags.deny_write.as_ref(),
config.and_then(|c| c.permissions.write.deny.as_ref()),
&make_fs_config_value_absolute,
),
allow_import: handle_allow(
flags.allow_all,
config.and_then(|c| c.permissions.all),
flags.allow_import.as_ref(),
config.and_then(|c| c.permissions.import.allow.as_ref()),
&no_op,
),
deny_import: handle_deny(
flags.deny_import.as_ref(),
config.and_then(|c| c.permissions.import.deny.as_ref()),
&no_op,
),
prompt: !resolve_no_prompt(flags),
})
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use deno_config::deno_json::AllowDenyPermissionConfig;
use deno_config::deno_json::PermissionsObject;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use super::*; use super::*;
@ -1583,4 +1785,221 @@ mod test {
); );
assert_eq!(parse("file:///example.com"), None); assert_eq!(parse("file:///example.com"), None);
} }
#[test]
fn test_flags_to_permission_options() {
let base_dir = std::env::current_dir().unwrap().join("sub");
{
let flags = PermissionFlags::default();
let config = PermissionsObjectWithBase {
base: deno_path_util::url_from_file_path(&base_dir.join("deno.json"))
.unwrap(),
permissions: PermissionsObject {
all: None,
read: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
".".to_string(),
"./read-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"./read-deny".to_string(),
])),
},
write: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"./write-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"./write-deny".to_string(),
])),
},
import: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"jsr.io".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"example.com".to_string(),
])),
},
env: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"env-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"env-deny".to_string(),
])),
},
net: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"net-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"net-deny".to_string(),
])),
},
run: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"run-allow".to_string(),
"./relative-run-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"run-deny".to_string(),
"./relative-run-deny".to_string(),
])),
},
ffi: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"./ffi-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"./ffi-deny".to_string(),
])),
},
sys: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"sys-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"sys-deny".to_string(),
])),
},
},
};
let permissions_options =
flags_to_permissions_options(&flags, Some(&config)).unwrap();
assert_eq!(
permissions_options,
PermissionsOptions {
allow_env: Some(vec!["env-allow".to_string()]),
deny_env: Some(vec!["env-deny".to_string()]),
allow_net: Some(vec!["net-allow".to_string()]),
deny_net: Some(vec!["net-deny".to_string()]),
allow_ffi: Some(vec![
base_dir
.join("ffi-allow")
.into_os_string()
.into_string()
.unwrap()
]),
deny_ffi: Some(vec![
base_dir
.join("ffi-deny")
.into_os_string()
.into_string()
.unwrap()
]),
allow_read: Some(vec![
base_dir.clone().into_os_string().into_string().unwrap(),
base_dir
.join("read-allow")
.into_os_string()
.into_string()
.unwrap()
]),
deny_read: Some(vec![
base_dir
.join("read-deny")
.into_os_string()
.into_string()
.unwrap()
]),
allow_run: Some(vec![
"run-allow".to_string(),
base_dir
.join("relative-run-allow")
.into_os_string()
.into_string()
.unwrap()
]),
deny_run: Some(vec![
"run-deny".to_string(),
base_dir
.join("relative-run-deny")
.into_os_string()
.into_string()
.unwrap()
]),
allow_sys: Some(vec!["sys-allow".to_string()]),
deny_sys: Some(vec!["sys-deny".to_string()]),
allow_write: Some(vec![
base_dir
.join("write-allow")
.into_os_string()
.into_string()
.unwrap()
]),
deny_write: Some(vec![
base_dir
.join("write-deny")
.into_os_string()
.into_string()
.unwrap()
]),
allow_import: Some(vec!["jsr.io".to_string()]),
deny_import: Some(vec!["example.com".to_string()]),
prompt: true
}
);
}
{
let flags = PermissionFlags {
allow_read: Some(vec!["./folder".to_string()]),
..Default::default()
};
let config = PermissionsObjectWithBase {
base: deno_path_util::url_from_file_path(&base_dir.join("deno.json"))
.unwrap(),
permissions: PermissionsObject {
// will use all permissions except for the explicitly specified permissions
// and the explicit flag will replace
all: Some(true),
write: AllowDenyPermissionConfig {
allow: Some(PermissionConfigValue::Some(vec![
"./write-allow".to_string(),
])),
deny: Some(PermissionConfigValue::Some(vec![
"./write-deny".to_string(),
])),
},
..Default::default()
},
};
let permissions_options =
flags_to_permissions_options(&flags, Some(&config)).unwrap();
assert_eq!(
permissions_options,
PermissionsOptions {
allow_env: Some(vec![]),
deny_env: None,
allow_net: Some(vec![]),
deny_net: None,
allow_ffi: Some(vec![]),
deny_ffi: None,
allow_read: Some(vec!["./folder".to_string()]),
deny_read: None,
allow_run: Some(vec![]),
deny_run: None,
allow_sys: Some(vec![]),
deny_sys: None,
allow_write: Some(vec![
base_dir
.join("write-allow")
.into_os_string()
.into_string()
.unwrap()
]),
deny_write: Some(vec![
base_dir
.join("write-deny")
.into_os_string()
.into_string()
.unwrap()
]),
allow_import: Some(vec![]),
deny_import: None,
prompt: true
}
);
}
}
} }

View file

@ -6,6 +6,7 @@ use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_bundle_runtime::BundlePlatform;
use deno_cache_dir::GlobalOrLocalHttpCache; use deno_cache_dir::GlobalOrLocalHttpCache;
use deno_cache_dir::npm::NpmCacheDir; use deno_cache_dir::npm::NpmCacheDir;
use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectory;
@ -38,7 +39,6 @@ use deno_resolver::factory::ConfigDiscoveryOption;
use deno_resolver::factory::NpmProcessStateOptions; use deno_resolver::factory::NpmProcessStateOptions;
use deno_resolver::factory::ResolverFactoryOptions; use deno_resolver::factory::ResolverFactoryOptions;
use deno_resolver::factory::SpecifiedImportMapProvider; use deno_resolver::factory::SpecifiedImportMapProvider;
use deno_resolver::factory::WorkspaceDirectoryProvider;
use deno_resolver::import_map::WorkspaceExternalImportMapLoader; use deno_resolver::import_map::WorkspaceExternalImportMapLoader;
use deno_resolver::loader::MemoryFiles; use deno_resolver::loader::MemoryFiles;
use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::npm::DenoInNpmPackageChecker;
@ -60,7 +60,6 @@ use once_cell::sync::OnceCell;
use sys_traits::EnvCurrentDir; use sys_traits::EnvCurrentDir;
use crate::args::BundleFlags; use crate::args::BundleFlags;
use crate::args::BundlePlatform;
use crate::args::CliLockfile; use crate::args::CliLockfile;
use crate::args::CliOptions; use crate::args::CliOptions;
use crate::args::ConfigFlag; use crate::args::ConfigFlag;
@ -563,10 +562,12 @@ impl CliFactory {
self.text_only_progress_bar().clone(), self.text_only_progress_bar().clone(),
)), )),
match resolver_factory.npm_resolver()?.as_managed() { match resolver_factory.npm_resolver()?.as_managed() {
Some(managed_npm_resolver) => Arc::new( Some(managed_npm_resolver) => {
DenoTaskLifeCycleScriptsExecutor::new(managed_npm_resolver.clone()), Arc::new(DenoTaskLifeCycleScriptsExecutor::new(
) managed_npm_resolver.clone(),
as Arc<dyn LifecycleScriptsExecutor>, self.text_only_progress_bar().clone(),
)) as Arc<dyn LifecycleScriptsExecutor>
}
None => Arc::new(NullLifecycleScriptsExecutor), None => Arc::new(NullLifecycleScriptsExecutor),
}, },
self.text_only_progress_bar().clone(), self.text_only_progress_bar().clone(),
@ -596,7 +597,9 @@ impl CliFactory {
.services .services
.install_reporter .install_reporter
.get_or_try_init(|| match self.cli_options()?.sub_command() { .get_or_try_init(|| match self.cli_options()?.sub_command() {
DenoSubcommand::Install(InstallFlags::Local(_)) => Ok(Some(Arc::new( DenoSubcommand::Install(InstallFlags::Local(_))
| DenoSubcommand::Add(_)
| DenoSubcommand::Cache(_) => Ok(Some(Arc::new(
crate::tools::installer::InstallReporter::new(), crate::tools::installer::InstallReporter::new(),
))), ))),
_ => Ok(None), _ => Ok(None),
@ -738,7 +741,6 @@ impl CliFactory {
self.node_resolver().await?.clone(), self.node_resolver().await?.clone(),
self.npm_resolver().await?.clone(), self.npm_resolver().await?.clone(),
self.sys(), self.sys(),
self.workspace_directory_provider()?.clone(),
self.compiler_options_resolver()?.clone(), self.compiler_options_resolver()?.clone(),
if cli_options.code_cache_enabled() { if cli_options.code_cache_enabled() {
Some(self.code_cache()?.clone()) Some(self.code_cache()?.clone())
@ -919,19 +921,13 @@ impl CliFactory {
let desc_parser = self.permission_desc_parser()?.clone(); let desc_parser = self.permission_desc_parser()?.clone();
let permissions = Permissions::from_options( let permissions = Permissions::from_options(
desc_parser.as_ref(), desc_parser.as_ref(),
&self.cli_options()?.permissions_options(), &self.cli_options()?.permissions_options()?,
)?; )?;
Ok(PermissionsContainer::new(desc_parser, permissions)) Ok(PermissionsContainer::new(desc_parser, permissions))
}) })
} }
fn workspace_directory_provider(
&self,
) -> Result<&Arc<WorkspaceDirectoryProvider>, AnyError> {
Ok(self.workspace_factory()?.workspace_directory_provider()?)
}
fn workspace_external_import_map_loader( fn workspace_external_import_map_loader(
&self, &self,
) -> Result<&Arc<WorkspaceExternalImportMapLoader<CliSys>>, AnyError> { ) -> Result<&Arc<WorkspaceExternalImportMapLoader<CliSys>>, AnyError> {
@ -1040,6 +1036,9 @@ impl CliFactory {
self.sys(), self.sys(),
self.create_lib_main_worker_options()?, self.create_lib_main_worker_options()?,
roots, roots,
Some(Arc::new(crate::tools::bundle::CliBundleProvider::new(
self.flags.clone(),
))),
); );
Ok(CliMainWorkerFactory::new( Ok(CliMainWorkerFactory::new(
@ -1048,6 +1047,7 @@ impl CliFactory {
self.maybe_lockfile().await?.cloned(), self.maybe_lockfile().await?.cloned(),
self.npm_installer_if_managed().await?.cloned(), self.npm_installer_if_managed().await?.cloned(),
npm_resolver.clone(), npm_resolver.clone(),
self.text_only_progress_bar().clone(),
self.sys(), self.sys(),
self.create_cli_main_worker_options()?, self.create_cli_main_worker_options()?,
self.root_permissions_container()?.clone(), self.root_permissions_container()?.clone(),
@ -1073,7 +1073,7 @@ impl CliFactory {
.is_some(), .is_some(),
inspect_brk: cli_options.inspect_brk().is_some(), inspect_brk: cli_options.inspect_brk().is_some(),
inspect_wait: cli_options.inspect_wait().is_some(), inspect_wait: cli_options.inspect_wait().is_some(),
strace_ops: cli_options.strace_ops().clone(), trace_ops: cli_options.trace_ops().clone(),
is_standalone: false, is_standalone: false,
auto_serve: std::env::var("DENO_AUTO_SERVE").is_ok(), auto_serve: std::env::var("DENO_AUTO_SERVE").is_ok(),
is_inspecting: cli_options.is_inspecting(), is_inspecting: cli_options.is_inspecting(),
@ -1290,6 +1290,10 @@ fn new_workspace_factory_options(
is_byonm: matches!(s.kind, NpmProcessStateKind::Byonm), is_byonm: matches!(s.kind, NpmProcessStateKind::Byonm),
}, },
), ),
root_node_modules_dir_override: flags
.internal
.root_node_modules_dir_override
.clone(),
vendor: flags.vendor, vendor: flags.vendor,
} }
} }

View file

@ -7,6 +7,7 @@ import { escapeName, withPermissions } from "ext:cli/40_test_common.js";
const { const {
op_register_test_step, op_register_test_step,
op_register_test, op_register_test,
op_register_test_hook,
op_test_event_step_result_failed, op_test_event_step_result_failed,
op_test_event_step_result_ignored, op_test_event_step_result_ignored,
op_test_event_step_result_ok, op_test_event_step_result_ok,
@ -344,6 +345,35 @@ test.only = function (
return testInner(nameOrFnOrOptions, optionsOrFn, maybeFn, { only: true }); return testInner(nameOrFnOrOptions, optionsOrFn, maybeFn, { only: true });
}; };
function registerHook(hookType, fn) {
// No-op if we're not running in `deno test` subcommand.
if (typeof op_register_test_hook !== "function") {
return;
}
if (typeof fn !== "function") {
throw new TypeError(`Expected a function for ${hookType} hook`);
}
op_register_test_hook(hookType, fn);
}
test.beforeAll = function (fn) {
registerHook("beforeAll", fn);
};
test.beforeEach = function (fn) {
registerHook("beforeEach", fn);
};
test.afterEach = function (fn) {
registerHook("afterEach", fn);
};
test.afterAll = function (fn) {
registerHook("afterAll", fn);
};
function getFullName(desc) { function getFullName(desc) {
if ("parent" in desc) { if ("parent" in desc) {
return `${getFullName(desc.parent)} ... ${desc.name}`; return `${getFullName(desc.parent)} ... ${desc.name}`;

View file

@ -5,7 +5,9 @@ use std::sync::Arc;
use dashmap::DashMap; use dashmap::DashMap;
use deno_core::serde_json; use deno_core::serde_json;
use deno_graph::packages::JsrPackageInfo; use deno_graph::packages::JsrPackageInfo;
use deno_graph::packages::JsrPackageInfoVersion;
use deno_graph::packages::JsrPackageVersionInfo; use deno_graph::packages::JsrPackageVersionInfo;
use deno_semver::Version;
use deno_semver::package::PackageNv; use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq; use deno_semver::package::PackageReq;
@ -24,6 +26,21 @@ pub struct JsrFetchResolver {
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
} }
fn select_version<'a, I>(versions: I, req: &PackageReq) -> Option<Version>
where
I: IntoIterator<Item = (&'a Version, &'a JsrPackageInfoVersion)>,
{
let mut versions = versions.into_iter().collect::<Vec<_>>();
versions.sort_by_key(|(v, _)| *v);
versions
.into_iter()
.rev()
.find(|(v, i)| {
!i.yanked && req.version_req.tag().is_none() && req.version_req.matches(v)
})
.map(|(v, _)| v.clone())
}
impl JsrFetchResolver { impl JsrFetchResolver {
pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self { pub fn new(file_fetcher: Arc<CliFileFetcher>) -> Self {
Self { Self {
@ -40,32 +57,74 @@ impl JsrFetchResolver {
} }
let maybe_get_nv = || async { let maybe_get_nv = || async {
let name = req.name.clone(); let name = req.name.clone();
let package_info = self.package_info(&name).await?; let package_info = self.package_info(&name).await;
if package_info.is_none() {
log::debug!("no package info found for jsr:{name}");
return None;
}
let package_info = package_info?;
// Find the first matching version of the package. // Find the first matching version of the package.
let mut versions = package_info.versions.iter().collect::<Vec<_>>(); let version = select_version(&package_info.versions, req);
versions.sort_by_key(|(v, _)| *v); let version = if let Some(version) = version {
let version = versions version
.into_iter() } else {
.rev() let info = self.force_refresh_package_info(&name).await;
.find(|(v, i)| { let Some(info) = info else {
!i.yanked log::debug!("no package info found for jsr:{name}");
&& req.version_req.tag().is_none() return None;
&& req.version_req.matches(v) };
}) let version = select_version(&info.versions, req);
.map(|(v, _)| v.clone())?; let Some(version) = version else {
log::debug!("no matching version found for jsr:{req}");
return None;
};
version
};
Some(PackageNv { name, version }) Some(PackageNv { name, version })
}; };
let nv = maybe_get_nv().await; let nv = maybe_get_nv().await;
self.nv_by_req.insert(req.clone(), nv.clone()); self.nv_by_req.insert(req.clone(), nv.clone());
nv nv
} }
pub async fn force_refresh_package_info(
&self,
name: &str,
) -> Option<Arc<JsrPackageInfo>> {
let meta_url = self.meta_url(name)?;
let file_fetcher = self.file_fetcher.clone();
let file = file_fetcher
.fetch_with_options(
&meta_url,
deno_resolver::file_fetcher::FetchPermissionsOptionRef::AllowAll,
deno_resolver::file_fetcher::FetchOptions {
maybe_cache_setting: Some(
&deno_cache_dir::file_fetcher::CacheSetting::ReloadAll,
),
..Default::default()
},
)
.await
.ok()?;
let info = serde_json::from_slice::<JsrPackageInfo>(&file.source).ok()?;
let info = Arc::new(info);
self
.info_by_name
.insert(name.to_string(), Some(info.clone()));
Some(info)
}
fn meta_url(&self, name: &str) -> Option<deno_core::url::Url> {
jsr_url().join(&format!("{}/meta.json", name)).ok()
}
pub async fn package_info(&self, name: &str) -> Option<Arc<JsrPackageInfo>> { pub async fn package_info(&self, name: &str) -> Option<Arc<JsrPackageInfo>> {
if let Some(info) = self.info_by_name.get(name) { if let Some(info) = self.info_by_name.get(name) {
return info.value().clone(); return info.value().clone();
} }
let fetch_package_info = || async { let fetch_package_info = || async {
let meta_url = jsr_url().join(&format!("{}/meta.json", name)).ok()?; let meta_url = self.meta_url(name)?;
let file = self let file = self
.file_fetcher .file_fetcher
.fetch_bypass_permissions(&meta_url) .fetch_bypass_permissions(&meta_url)

View file

@ -18,6 +18,7 @@ arc-swap.workspace = true
aws-lc-rs.workspace = true aws-lc-rs.workspace = true
base64.workspace = true base64.workspace = true
capacity_builder.workspace = true capacity_builder.workspace = true
deno_bundle_runtime.workspace = true
deno_error.workspace = true deno_error.workspace = true
deno_fs = { workspace = true, features = ["sync_fs"] } deno_fs = { workspace = true, features = ["sync_fs"] }
deno_media_type.workspace = true deno_media_type.workspace = true

View file

@ -26,14 +26,16 @@ impl<T> InfallibleResultExt<T> for Result<T, Infallible> {
pub fn js_error_downcast_ref( pub fn js_error_downcast_ref(
err: &AnyError, err: &AnyError,
) -> Option<&deno_runtime::deno_core::error::JsError> { ) -> Option<&deno_runtime::deno_core::error::JsError> {
any_and_jserrorbox_downcast_ref(err).or_else(|| { any_and_jserrorbox_downcast_ref(err)
err .or_else(|| {
.downcast_ref::<CoreError>() err
.and_then(|e| match e.as_kind() { .downcast_ref::<CoreError>()
CoreErrorKind::Js(e) => Some(e), .and_then(|e| match e.as_kind() {
_ => None, CoreErrorKind::Js(e) => Some(e),
}) _ => None,
}) })
})
.map(|v| &**v)
} }
pub fn any_and_jserrorbox_downcast_ref< pub fn any_and_jserrorbox_downcast_ref<

View file

@ -14,7 +14,7 @@ pub fn otel_runtime_config() -> OtelRuntimeConfig {
} }
const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH"); const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH");
const TYPESCRIPT: &str = "5.8.3"; const TYPESCRIPT: &str = "5.9.2";
pub const DENO_VERSION: &str = env!("DENO_VERSION"); pub const DENO_VERSION: &str = env!("DENO_VERSION");
// TODO(bartlomieju): ideally we could remove this const. // TODO(bartlomieju): ideally we could remove this const.
const IS_CANARY: bool = option_env!("DENO_CANARY").is_some(); const IS_CANARY: bool = option_env!("DENO_CANARY").is_some();

View file

@ -5,6 +5,7 @@ use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use deno_bundle_runtime::BundleProvider;
use deno_core::error::JsError; use deno_core::error::JsError;
use deno_node::NodeRequireLoaderRc; use deno_node::NodeRequireLoaderRc;
use deno_path_util::url_from_file_path; use deno_path_util::url_from_file_path;
@ -330,7 +331,7 @@ pub struct LibMainWorkerOptions {
pub has_node_modules_dir: bool, pub has_node_modules_dir: bool,
pub inspect_brk: bool, pub inspect_brk: bool,
pub inspect_wait: bool, pub inspect_wait: bool,
pub strace_ops: Option<Vec<String>>, pub trace_ops: Option<Vec<String>>,
pub is_inspecting: bool, pub is_inspecting: bool,
/// If this is a `deno compile`-ed executable. /// If this is a `deno compile`-ed executable.
pub is_standalone: bool, pub is_standalone: bool,
@ -376,6 +377,7 @@ struct LibWorkerFactorySharedState<TSys: DenoLibSys> {
storage_key_resolver: StorageKeyResolver, storage_key_resolver: StorageKeyResolver,
sys: TSys, sys: TSys,
options: LibMainWorkerOptions, options: LibMainWorkerOptions,
bundle_provider: Option<Arc<dyn BundleProvider>>,
} }
impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> { impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
@ -504,7 +506,7 @@ impl<TSys: DenoLibSys> LibWorkerFactorySharedState<TSys> {
worker_type: args.worker_type, worker_type: args.worker_type,
stdio: stdio.clone(), stdio: stdio.clone(),
cache_storage_dir, cache_storage_dir,
strace_ops: shared.options.strace_ops.clone(), trace_ops: shared.options.trace_ops.clone(),
close_on_idle: args.close_on_idle, close_on_idle: args.close_on_idle,
maybe_worker_metadata: args.maybe_worker_metadata, maybe_worker_metadata: args.maybe_worker_metadata,
enable_raw_imports: shared.options.enable_raw_imports, enable_raw_imports: shared.options.enable_raw_imports,
@ -540,6 +542,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
sys: TSys, sys: TSys,
options: LibMainWorkerOptions, options: LibMainWorkerOptions,
roots: LibWorkerFactoryRoots, roots: LibWorkerFactoryRoots,
bundle_provider: Option<Arc<dyn BundleProvider>>,
) -> Self { ) -> Self {
Self { Self {
shared: Arc::new(LibWorkerFactorySharedState { shared: Arc::new(LibWorkerFactorySharedState {
@ -560,6 +563,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
storage_key_resolver, storage_key_resolver,
sys, sys,
options, options,
bundle_provider,
}), }),
} }
} }
@ -646,6 +650,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
feature_checker, feature_checker,
permissions, permissions,
v8_code_cache: shared.code_cache.clone(), v8_code_cache: shared.code_cache.clone(),
bundle_provider: shared.bundle_provider.clone(),
}; };
let options = WorkerOptions { let options = WorkerOptions {
@ -690,7 +695,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
maybe_inspector_server: shared.maybe_inspector_server.clone(), maybe_inspector_server: shared.maybe_inspector_server.clone(),
should_break_on_first_statement: shared.options.inspect_brk, should_break_on_first_statement: shared.options.inspect_brk,
should_wait_for_inspector_session: shared.options.inspect_wait, should_wait_for_inspector_session: shared.options.inspect_wait,
strace_ops: shared.options.strace_ops.clone(), trace_ops: shared.options.trace_ops.clone(),
cache_storage_dir, cache_storage_dir,
origin_storage_dir, origin_storage_dir,
stdio, stdio,
@ -816,32 +821,29 @@ impl LibMainWorker {
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)] pub fn dispatch_load_event(&mut self) -> Result<(), Box<JsError>> {
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_load_event() self.worker.dispatch_load_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)] pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, Box<JsError>> {
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
self.worker.dispatch_beforeunload_event() self.worker.dispatch_beforeunload_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)] pub fn dispatch_process_beforeexit_event(
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> { &mut self,
) -> Result<bool, Box<JsError>> {
self.worker.dispatch_process_beforeexit_event() self.worker.dispatch_process_beforeexit_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)] pub fn dispatch_unload_event(&mut self) -> Result<(), Box<JsError>> {
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_unload_event() self.worker.dispatch_unload_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)] pub fn dispatch_process_exit_event(&mut self) -> Result<(), Box<JsError>> {
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_process_exit_event() self.worker.dispatch_process_exit_event()
} }

View file

@ -703,11 +703,11 @@ pub fn fix_ts_import_changes(
let target_module = if is_new_file { let target_module = if is_new_file {
None None
} else { } else {
let Some(target_module) = language_server let Some(target_module) =
.document_modules language_server.document_modules.module_for_specifier(
.inspect_module_for_specifier(
&target_specifier, &target_specifier,
module.scope.as_deref(), module.scope.as_deref(),
Some(&module.compiler_options_key),
) )
else { else {
continue; continue;

View file

@ -29,7 +29,7 @@ pub struct LspCompilerOptionsData {
#[derive(Debug)] #[derive(Debug)]
pub struct LspCompilerOptionsResolver { pub struct LspCompilerOptionsResolver {
inner: CompilerOptionsResolver, pub inner: Arc<CompilerOptionsResolver>,
data: BTreeMap<CompilerOptionsKey, LspCompilerOptionsData>, data: BTreeMap<CompilerOptionsKey, LspCompilerOptionsData>,
} }
@ -97,7 +97,10 @@ impl LspCompilerOptionsResolver {
) )
}) })
.collect(); .collect();
Self { inner, data } Self {
inner: Arc::new(inner),
data,
}
} }
pub fn for_specifier(&self, specifier: &Url) -> &LspCompilerOptionsData { pub fn for_specifier(&self, specifier: &Url) -> &LspCompilerOptionsData {

View file

@ -1291,7 +1291,6 @@ impl ConfigData {
maybe_vendor_override: None, maybe_vendor_override: None,
}, },
) )
.map(Arc::new)
.map_err(AnyError::from) .map_err(AnyError::from)
} }
Err(()) => Err(anyhow!("Scope '{}' was not a directory path.", scope)), Err(()) => Err(anyhow!("Scope '{}' was not a directory path.", scope)),
@ -1310,10 +1309,10 @@ impl ConfigData {
Err(err) => { Err(err) => {
lsp_warn!(" Couldn't open workspace \"{}\": {}", scope.as_str(), err); lsp_warn!(" Couldn't open workspace \"{}\": {}", scope.as_str(), err);
let member_dir = let member_dir =
Arc::new(WorkspaceDirectory::empty(WorkspaceDirectoryEmptyOptions { WorkspaceDirectory::empty(WorkspaceDirectoryEmptyOptions {
root_dir: scope.clone(), root_dir: scope.clone(),
use_vendor_dir: VendorEnablement::Disable, use_vendor_dir: VendorEnablement::Disable,
})); });
let mut data = Self::load_inner( let mut data = Self::load_inner(
member_dir, member_dir,
scope.clone(), scope.clone(),
@ -1442,6 +1441,7 @@ impl ConfigData {
no_lock: false, no_lock: false,
no_npm: false, no_npm: false,
npm_process_state: None, npm_process_state: None,
root_node_modules_dir_override: None,
vendor: None, vendor: None,
}, },
); );
@ -1798,7 +1798,7 @@ impl ConfigTree {
.values() .values()
.filter_map(|data| { .filter_map(|data| {
let workspace_root_scope_uri = let workspace_root_scope_uri =
Some(data.member_dir.workspace.root_dir()) Some(data.member_dir.workspace.root_dir_url())
.filter(|s| *s != data.member_dir.dir_url()) .filter(|s| *s != data.member_dir.dir_url())
.and_then(|s| url_to_uri(s).ok()); .and_then(|s| url_to_uri(s).ok());
Some(lsp_custom::DenoConfigurationData { Some(lsp_custom::DenoConfigurationData {
@ -1969,18 +1969,14 @@ impl ConfigTree {
.fs_create_dir_all(config_path.parent().unwrap()) .fs_create_dir_all(config_path.parent().unwrap())
.unwrap(); .unwrap();
memory_sys.fs_write(&config_path, json_text).unwrap(); memory_sys.fs_write(&config_path, json_text).unwrap();
let workspace_dir = Arc::new( let workspace_dir = WorkspaceDirectory::discover(
WorkspaceDirectory::discover( &memory_sys,
&memory_sys, deno_config::workspace::WorkspaceDiscoverStart::ConfigFile(&config_path),
deno_config::workspace::WorkspaceDiscoverStart::ConfigFile( &deno_config::workspace::WorkspaceDiscoverOptions {
&config_path, ..Default::default()
), },
&deno_config::workspace::WorkspaceDiscoverOptions { )
..Default::default() .unwrap();
},
)
.unwrap(),
);
let data = Arc::new( let data = Arc::new(
ConfigData::load_inner( ConfigData::load_inner(
workspace_dir, workspace_dir,

View file

@ -1440,6 +1440,7 @@ mod tests {
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
let compiler_options_resolver = let compiler_options_resolver =
Arc::new(LspCompilerOptionsResolver::new(&config, &resolver)); Arc::new(LspCompilerOptionsResolver::new(&config, &resolver));
resolver.set_compiler_options_resolver(&compiler_options_resolver.inner);
let linter_resolver = Arc::new(LspLinterResolver::new( let linter_resolver = Arc::new(LspLinterResolver::new(
&config, &config,
&compiler_options_resolver, &compiler_options_resolver,

View file

@ -991,10 +991,6 @@ impl WeakDocumentModuleMap {
} }
} }
fn get_for_specifier(&self, specifier: &Url) -> Option<Arc<DocumentModule>> {
self.by_specifier.read().get(specifier)
}
fn contains_specifier(&self, specifier: &Url) -> bool { fn contains_specifier(&self, specifier: &Url) -> bool {
self.by_specifier.read().contains_key(specifier) self.by_specifier.read().contains_key(specifier)
} }
@ -1366,24 +1362,6 @@ impl DocumentModules {
result result
} }
/// This will not create any module entries, only retrieve existing entries.
pub fn inspect_module_for_specifier(
&self,
specifier: &Url,
scope: Option<&Url>,
) -> Option<Arc<DocumentModule>> {
let scoped_resolver = self.resolver.get_scoped_resolver(scope);
let specifier = match JsrPackageReqReference::from_specifier(specifier) {
Ok(jsr_req_ref) => {
Cow::Owned(scoped_resolver.jsr_to_resource_url(&jsr_req_ref)?)
}
_ => Cow::Borrowed(specifier),
};
let specifier = scoped_resolver.resolve_redirects(&specifier)?;
let modules = self.modules_for_scope(scope)?;
modules.get_for_specifier(&specifier)
}
/// This will not create any module entries, only retrieve existing entries. /// This will not create any module entries, only retrieve existing entries.
pub fn inspect_primary_module( pub fn inspect_primary_module(
&self, &self,
@ -2113,6 +2091,7 @@ mod tests {
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
let compiler_options_resolver = let compiler_options_resolver =
Arc::new(LspCompilerOptionsResolver::new(&config, &resolver)); Arc::new(LspCompilerOptionsResolver::new(&config, &resolver));
resolver.set_compiler_options_resolver(&compiler_options_resolver.inner);
let mut document_modules = DocumentModules::default(); let mut document_modules = DocumentModules::default();
document_modules.update_config( document_modules.update_config(
&config, &config,
@ -2261,6 +2240,7 @@ console.log(b, "hello deno");
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
let compiler_options_resolver = let compiler_options_resolver =
Arc::new(LspCompilerOptionsResolver::new(&config, &resolver)); Arc::new(LspCompilerOptionsResolver::new(&config, &resolver));
resolver.set_compiler_options_resolver(&compiler_options_resolver.inner);
document_modules.update_config( document_modules.update_config(
&config, &config,
&compiler_options_resolver, &compiler_options_resolver,
@ -2306,6 +2286,7 @@ console.log(b, "hello deno");
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
let compiler_options_resolver = let compiler_options_resolver =
Arc::new(LspCompilerOptionsResolver::new(&config, &resolver)); Arc::new(LspCompilerOptionsResolver::new(&config, &resolver));
resolver.set_compiler_options_resolver(&compiler_options_resolver.inner);
document_modules.update_config( document_modules.update_config(
&config, &config,
&compiler_options_resolver, &compiler_options_resolver,

View file

@ -1207,6 +1207,13 @@ impl Inner {
&self.config, &self.config,
&self.resolver, &self.resolver,
)); ));
// TODO(nayeemrmn): This represents a circular dependency between
// `LspCompilerOptionsResolver` and `LspResolver` because the former uses
// the node resolver to resolve `extends` in tsconfig. Break out the node
// resolver from `LspResolver`.
self
.resolver
.set_compiler_options_resolver(&self.compiler_options_resolver.inner);
} }
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
@ -1915,9 +1922,11 @@ impl Inner {
.dependency_at_position(&params.text_document_position_params.position) .dependency_at_position(&params.text_document_position_params.position)
{ {
let dep_module = dep.get_code().and_then(|s| { let dep_module = dep.get_code().and_then(|s| {
self self.document_modules.module_for_specifier(
.document_modules s,
.inspect_module_for_specifier(s, module.scope.as_deref()) module.scope.as_deref(),
Some(&module.compiler_options_key),
)
}); });
let dep_types_dependency = dep_module.as_ref().map(|m| { let dep_types_dependency = dep_module.as_ref().map(|m| {
m.types_dependency m.types_dependency
@ -3958,17 +3967,23 @@ impl Inner {
); );
}) })
.unwrap_or_default(); .unwrap_or_default();
items_with_scopes.extend(items.into_iter().map(|i| (i, scope))); items_with_scopes.extend(
items
.into_iter()
.map(|i| (i, (scope, compiler_options_key))),
);
} }
let symbol_information = items_with_scopes let symbol_information = items_with_scopes
.into_iter() .into_iter()
.flat_map(|(item, scope)| { .flat_map(|(item, (scope, compiler_options_key))| {
if token.is_cancelled() { if token.is_cancelled() {
return Some(Err(LspError::request_cancelled())); return Some(Err(LspError::request_cancelled()));
} }
Some(Ok( Some(Ok(item.to_symbol_information(
item.to_symbol_information(scope.map(|s| s.as_ref()), self)?, scope.map(|s| s.as_ref()),
)) compiler_options_key,
self,
)?))
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let symbol_information = if symbol_information.is_empty() { let symbol_information = if symbol_information.is_empty() {

View file

@ -143,8 +143,7 @@ impl LspLinterResolver {
let inner = CliLinter::new(CliLinterOptions { let inner = CliLinter::new(CliLinterOptions {
configured_rules: lint_rule_provider.resolve_lint_rules( configured_rules: lint_rule_provider.resolve_lint_rules(
lint_options.rules, lint_options.rules,
config_data config_data.map(|d| d.member_dir.as_ref()),
.and_then(|d| d.member_dir.maybe_deno_json().map(|c| c.as_ref())),
), ),
fix: false, fix: false,
deno_lint_config, deno_lint_config,

View file

@ -28,6 +28,7 @@ use deno_path_util::url_to_file_path;
use deno_resolver::DenoResolverOptions; use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmResolvers; use deno_resolver::NodeAndNpmResolvers;
use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_resolver::deno_json::JsxImportSourceConfig; use deno_resolver::deno_json::JsxImportSourceConfig;
use deno_resolver::graph::FoundPackageJsonDepFlag; use deno_resolver::graph::FoundPackageJsonDepFlag;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions; use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
@ -495,6 +496,19 @@ impl LspResolver {
Self { unscoped, by_scope } Self { unscoped, by_scope }
} }
pub fn set_compiler_options_resolver(
&self,
value: &Arc<CompilerOptionsResolver>,
) {
for resolver in
std::iter::once(&self.unscoped).chain(self.by_scope.values())
{
resolver
.workspace_resolver
.set_compiler_options_resolver(value.clone());
}
}
pub fn snapshot(&self) -> Arc<Self> { pub fn snapshot(&self) -> Arc<Self> {
Arc::new(Self { Arc::new(Self {
unscoped: self.unscoped.snapshot(), unscoped: self.unscoped.snapshot(),
@ -1019,7 +1033,7 @@ impl<'a> ResolverFactory<'a> {
sloppy_imports_options: if unstable_sloppy_imports { sloppy_imports_options: if unstable_sloppy_imports {
SloppyImportsOptions::Enabled SloppyImportsOptions::Enabled
} else { } else {
SloppyImportsOptions::Disabled SloppyImportsOptions::Unspecified
}, },
fs_cache_options: FsCacheOptions::Disabled, fs_cache_options: FsCacheOptions::Disabled,
}, },
@ -1040,8 +1054,6 @@ impl<'a> ResolverFactory<'a> {
pkg_json_dep_resolution, pkg_json_dep_resolution,
Default::default(), Default::default(),
Default::default(), Default::default(),
Default::default(),
Default::default(),
CliSys::default(), CliSys::default(),
) )
}) })
@ -1056,8 +1068,6 @@ impl<'a> ResolverFactory<'a> {
PackageJsonDepResolution::Disabled, PackageJsonDepResolution::Disabled,
Default::default(), Default::default(),
Default::default(), Default::default(),
Default::default(),
Default::default(),
self.sys.clone(), self.sys.clone(),
) )
}); });

View file

@ -17,6 +17,8 @@ use deno_core::futures::stream;
use deno_core::parking_lot::RwLock; use deno_core::parking_lot::RwLock;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::tokio_util::create_and_run_current_thread; use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap; use indexmap::IndexMap;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
@ -232,7 +234,7 @@ impl TestRun {
)?); )?);
let factory = CliFactory::from_flags(flags); let factory = CliFactory::from_flags(flags);
let cli_options = factory.cli_options()?; let cli_options = factory.cli_options()?;
let permissions_container = factory.root_permissions_container()?; let permission_desc_parser = factory.permission_desc_parser()?;
let main_graph_container = factory.main_module_graph_container().await?; let main_graph_container = factory.main_module_graph_container().await?;
main_graph_container main_graph_container
.check_specifiers( .check_specifiers(
@ -272,11 +274,11 @@ impl TestRun {
let join_handles = queue.into_iter().map(move |specifier| { let join_handles = queue.into_iter().map(move |specifier| {
let specifier = specifier.clone(); let specifier = specifier.clone();
let specifier_dir =
cli_options.workspace().resolve_member_dir(&specifier);
let worker_factory = worker_factory.clone(); let worker_factory = worker_factory.clone();
// Various test files should not share the same permissions in terms of let cli_options = cli_options.clone();
// `PermissionsContainer` - otherwise granting/revoking permissions in one let permission_desc_parser = permission_desc_parser.clone();
// file would have impact on other files, which is undesirable.
let permissions_container = permissions_container.deep_clone();
let worker_sender = test_event_sender_factory.worker(); let worker_sender = test_event_sender_factory.worker();
let fail_fast_tracker = fail_fast_tracker.clone(); let fail_fast_tracker = fail_fast_tracker.clone();
let lsp_filter = self.filters.get(&specifier); let lsp_filter = self.filters.get(&specifier);
@ -295,6 +297,18 @@ impl TestRun {
let token = self.token.clone(); let token = self.token.clone();
spawn_blocking(move || { spawn_blocking(move || {
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
cli_options.permissions_options_for_dir(&specifier_dir)?;
let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
Permissions::from_options(
permission_desc_parser.as_ref(),
&permissions,
)?,
);
if fail_fast_tracker.should_stop() { if fail_fast_tracker.should_stop() {
return Ok(()); return Ok(());
} }

View file

@ -2107,12 +2107,11 @@ impl DocumentSpan {
language_server: &language_server::Inner, language_server: &language_server::Inner,
) -> Option<lsp::LocationLink> { ) -> Option<lsp::LocationLink> {
let target_specifier = resolve_url(&self.file_name).ok()?; let target_specifier = resolve_url(&self.file_name).ok()?;
let target_module = language_server let target_module = language_server.document_modules.module_for_specifier(
.document_modules &target_specifier,
.inspect_module_for_specifier( module.scope.as_deref(),
&target_specifier, Some(&module.compiler_options_key),
module.scope.as_deref(), )?;
)?;
let (target_range, target_selection_range) = let (target_range, target_selection_range) =
if let Some(context_span) = &self.context_span { if let Some(context_span) = &self.context_span {
( (
@ -2151,12 +2150,11 @@ impl DocumentSpan {
language_server: &language_server::Inner, language_server: &language_server::Inner,
) -> Option<ModuleSpecifier> { ) -> Option<ModuleSpecifier> {
let target_specifier = resolve_url(&self.file_name).ok()?; let target_specifier = resolve_url(&self.file_name).ok()?;
let target_module = language_server let target_module = language_server.document_modules.module_for_specifier(
.document_modules &target_specifier,
.inspect_module_for_specifier( module.scope.as_deref(),
&target_specifier, Some(&module.compiler_options_key),
module.scope.as_deref(), )?;
)?;
let range = self.text_span.to_range(target_module.line_index.clone()); let range = self.text_span.to_range(target_module.line_index.clone());
let mut target = uri_to_url(&target_module.uri); let mut target = uri_to_url(&target_module.uri);
target.set_fragment(Some(&format!( target.set_fragment(Some(&format!(
@ -2209,12 +2207,15 @@ impl NavigateToItem {
pub fn to_symbol_information( pub fn to_symbol_information(
&self, &self,
scope: Option<&Url>, scope: Option<&Url>,
compiler_options_key: &CompilerOptionsKey,
language_server: &language_server::Inner, language_server: &language_server::Inner,
) -> Option<lsp::SymbolInformation> { ) -> Option<lsp::SymbolInformation> {
let target_specifier = resolve_url(&self.file_name).ok()?; let target_specifier = resolve_url(&self.file_name).ok()?;
let target_module = language_server let target_module = language_server.document_modules.module_for_specifier(
.document_modules &target_specifier,
.inspect_module_for_specifier(&target_specifier, scope)?; scope,
Some(compiler_options_key),
)?;
let range = self.text_span.to_range(target_module.line_index.clone()); let range = self.text_span.to_range(target_module.line_index.clone());
let location = lsp::Location { let location = lsp::Location {
uri: target_module.uri.as_ref().clone(), uri: target_module.uri.as_ref().clone(),
@ -2258,11 +2259,11 @@ impl InlayHintDisplayPart {
) -> lsp::InlayHintLabelPart { ) -> lsp::InlayHintLabelPart {
let location = self.file.as_ref().and_then(|f| { let location = self.file.as_ref().and_then(|f| {
let target_specifier = resolve_url(f).ok()?; let target_specifier = resolve_url(f).ok()?;
let target_module = language_server let target_module =
.document_modules language_server.document_modules.module_for_specifier(
.inspect_module_for_specifier(
&target_specifier, &target_specifier,
module.scope.as_deref(), module.scope.as_deref(),
Some(&module.compiler_options_key),
)?; )?;
let range = self let range = self
.span .span
@ -2584,11 +2585,11 @@ impl RenameLocation {
includes_non_files = true; includes_non_files = true;
continue; continue;
} }
let Some(target_module) = language_server let Some(target_module) =
.document_modules language_server.document_modules.module_for_specifier(
.inspect_module_for_specifier(
&target_specifier, &target_specifier,
module.scope.as_deref(), module.scope.as_deref(),
Some(&module.compiler_options_key),
) )
else { else {
continue; continue;
@ -2805,14 +2806,11 @@ impl FileTextChanges {
let target_module = if is_new_file { let target_module = if is_new_file {
None None
} else { } else {
Some( Some(language_server.document_modules.module_for_specifier(
language_server &target_specifier,
.document_modules module.scope.as_deref(),
.inspect_module_for_specifier( Some(&module.compiler_options_key),
&target_specifier, )?)
module.scope.as_deref(),
)?,
)
}; };
let target_uri = target_module let target_uri = target_module
.as_ref() .as_ref()
@ -2850,14 +2848,11 @@ impl FileTextChanges {
let target_module = if is_new_file { let target_module = if is_new_file {
None None
} else { } else {
Some( Some(language_server.document_modules.module_for_specifier(
language_server &target_specifier,
.document_modules module.scope.as_deref(),
.inspect_module_for_specifier( Some(&module.compiler_options_key),
&target_specifier, )?)
module.scope.as_deref(),
)?,
)
}; };
let target_uri = target_module let target_uri = target_module
.as_ref() .as_ref()
@ -3309,12 +3304,11 @@ impl ReferenceEntry {
let target_module = if target_specifier == *module.specifier { let target_module = if target_specifier == *module.specifier {
module.clone() module.clone()
} else { } else {
language_server language_server.document_modules.module_for_specifier(
.document_modules &target_specifier,
.inspect_module_for_specifier( module.scope.as_deref(),
&target_specifier, Some(&module.compiler_options_key),
module.scope.as_deref(), )?
)?
}; };
Some(lsp::Location { Some(lsp::Location {
uri: target_module.uri.as_ref().clone(), uri: target_module.uri.as_ref().clone(),
@ -3367,12 +3361,11 @@ impl CallHierarchyItem {
maybe_root_path: Option<&Path>, maybe_root_path: Option<&Path>,
) -> Option<(lsp::CallHierarchyItem, Arc<DocumentModule>)> { ) -> Option<(lsp::CallHierarchyItem, Arc<DocumentModule>)> {
let target_specifier = resolve_url(&self.file).ok()?; let target_specifier = resolve_url(&self.file).ok()?;
let target_module = language_server let target_module = language_server.document_modules.module_for_specifier(
.document_modules &target_specifier,
.inspect_module_for_specifier( module.scope.as_deref(),
&target_specifier, Some(&module.compiler_options_key),
module.scope.as_deref(), )?;
)?;
let use_file_name = self.is_source_file_item(); let use_file_name = self.is_source_file_item();
let maybe_file_path = url_to_file_path(&target_module.specifier).ok(); let maybe_file_path = url_to_file_path(&target_module.specifier).ok();
@ -6070,6 +6063,7 @@ mod tests {
Arc::new(LspResolver::from_config(&config, &cache, None).await); Arc::new(LspResolver::from_config(&config, &cache, None).await);
let compiler_options_resolver = let compiler_options_resolver =
Arc::new(LspCompilerOptionsResolver::new(&config, &resolver)); Arc::new(LspCompilerOptionsResolver::new(&config, &resolver));
resolver.set_compiler_options_resolver(&compiler_options_resolver.inner);
let linter_resolver = Arc::new(LspLinterResolver::new( let linter_resolver = Arc::new(LspLinterResolver::new(
&config, &config,
&compiler_options_resolver, &compiler_options_resolver,

View file

@ -33,6 +33,7 @@ use std::collections::HashMap;
use std::env; use std::env;
use std::future::Future; use std::future::Future;
use std::io::IsTerminal; use std::io::IsTerminal;
use std::io::Write as _;
use std::ops::Deref; use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -58,7 +59,6 @@ use factory::CliFactory;
const MODULE_NOT_FOUND: &str = "Module not found"; const MODULE_NOT_FOUND: &str = "Module not found";
const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; const UNSUPPORTED_SCHEME: &str = "Unsupported scheme";
use self::args::load_env_variables_from_env_file;
use self::util::draw_thread::DrawThread; use self::util::draw_thread::DrawThread;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
@ -67,6 +67,8 @@ use crate::args::get_default_v8_flags;
use crate::util::display; use crate::util::display;
use crate::util::v8::get_v8_flags_from_env; use crate::util::v8::get_v8_flags_from_env;
use crate::util::v8::init_v8_flags; use crate::util::v8::init_v8_flags;
use crate::util::watch_env_tracker::WatchEnvTracker;
use crate::util::watch_env_tracker::load_env_variables_from_env_files;
#[cfg(feature = "dhat-heap")] #[cfg(feature = "dhat-heap")]
#[global_allocator] #[global_allocator]
@ -131,7 +133,7 @@ async fn run_subcommand(
}), }),
DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async { DenoSubcommand::Bundle(bundle_flags) => spawn_subcommand(async {
log::warn!( log::warn!(
"⚠️ {} is experimental and subject to changes", "⚠️ {} is experimental and subject to changes",
colors::cyan("deno bundle") colors::cyan("deno bundle")
); );
tools::bundle::bundle(flags, bundle_flags).await tools::bundle::bundle(flags, bundle_flags).await
@ -241,7 +243,28 @@ async fn run_subcommand(
spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await }) spawn_subcommand(async move { tools::repl::run(flags, repl_flags).await })
} }
DenoSubcommand::Run(run_flags) => spawn_subcommand(async move { DenoSubcommand::Run(run_flags) => spawn_subcommand(async move {
if run_flags.is_stdin() { if run_flags.print_task_list {
let task_flags = TaskFlags {
cwd: None,
task: None,
is_run: true,
recursive: false,
filter: None,
eval: false,
};
let mut flags = flags.deref().clone();
flags.subcommand = DenoSubcommand::Task(task_flags.clone());
writeln!(
&mut std::io::stdout(),
"Please specify a {} or a {}.\n",
colors::bold("[SCRIPT_ARG]"),
colors::bold("task name")
)?;
std::io::stdout().flush()?;
tools::task::execute_script(Arc::new(flags), task_flags)
.await
.map(|_| 1)
} else if run_flags.is_stdin() {
// these futures are boxed to prevent stack overflows on Windows // these futures are boxed to prevent stack overflows on Windows
tools::run::run_from_stdin(flags.clone(), unconfigured_runtime, roots) tools::run::run_from_stdin(flags.clone(), unconfigured_runtime, roots)
.boxed_local() .boxed_local()
@ -295,9 +318,7 @@ async fn run_subcommand(
.await; .await;
} }
let script_err_msg = script_err.to_string(); let script_err_msg = script_err.to_string();
if script_err_msg.starts_with(MODULE_NOT_FOUND) if should_fallback_on_run_error(script_err_msg.as_str()) {
|| script_err_msg.starts_with(UNSUPPORTED_SCHEME)
{
if run_flags.bare { if run_flags.bare {
let mut cmd = args::clap_root(); let mut cmd = args::clap_root();
cmd.build(); cmd.build();
@ -406,7 +427,7 @@ async fn run_subcommand(
1, 1,
), ),
DenoSubcommand::Vendor => exit_with_message( DenoSubcommand::Vendor => exit_with_message(
"⚠️ `deno vendor` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations", "⚠️ `deno vendor` was removed in Deno 2.\n\nSee the Deno 1.x to 2.x Migration Guide for migration instructions: https://docs.deno.com/runtime/manual/advanced/migrate_deprecations",
1, 1,
), ),
DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async { DenoSubcommand::Publish(publish_flags) => spawn_subcommand(async {
@ -437,6 +458,29 @@ async fn run_subcommand(
handle.await? handle.await?
} }
/// Determines whether a error encountered during `deno run`
/// should trigger fallback behavior, such as attempting to run a Deno task
/// with the same name.
///
/// Checks if the error message indicates a "module not found",
/// "unsupported scheme", or certain OS-level import failures (such as
/// "Is a directory" or "Access is denied"); if so, Deno will attempt to
/// interpret the original argument as a script name or task instead of a
/// file path.
///
/// See: https://github.com/denoland/deno/issues/28878
fn should_fallback_on_run_error(script_err: &str) -> bool {
if script_err.starts_with(MODULE_NOT_FOUND)
|| script_err.starts_with(UNSUPPORTED_SCHEME)
{
return true;
}
let re = lazy_regex::regex!(
r"Import 'file:///.+?' failed\.\n\s+0: .+ \(os error \d+\)"
);
re.is_match(script_err)
}
#[allow(clippy::print_stderr)] #[allow(clippy::print_stderr)]
fn setup_panic_hook() { fn setup_panic_hook() {
// This function does two things inside of the panic hook: // This function does two things inside of the panic hook:
@ -603,8 +647,15 @@ async fn resolve_flags_and_init(
} }
Err(err) => exit_for_error(AnyError::from(err)), Err(err) => exit_for_error(AnyError::from(err)),
}; };
// preserve already loaded env variables
load_env_variables_from_env_file(flags.env_file.as_ref(), flags.log_level); if flags.subcommand.watch_flags().is_some() {
WatchEnvTracker::snapshot();
}
let env_file_paths: Option<Vec<std::path::PathBuf>> = flags
.env_file
.as_ref()
.map(|files| files.iter().map(PathBuf::from).collect());
load_env_variables_from_env_files(env_file_paths.as_ref(), flags.log_level);
flags.unstable_config.fill_with_env(); flags.unstable_config.fill_with_env();
if std::env::var("DENO_COMPAT").is_ok() { if std::env::var("DENO_COMPAT").is_ok() {
flags.unstable_config.enable_node_compat(); flags.unstable_config.enable_node_compat();
@ -641,13 +692,18 @@ async fn resolve_flags_and_init(
otel_config.clone(), otel_config.clone(),
)?; )?;
if flags.permission_set.is_some() {
log::warn!(
"{} Permissions in the config file is an experimental feature and may change in the future.",
colors::yellow("Warning")
);
}
// TODO(bartlomieju): remove in Deno v2.5 and hard error then. // TODO(bartlomieju): remove in Deno v2.5 and hard error then.
if flags.unstable_config.legacy_flag_enabled { if flags.unstable_config.legacy_flag_enabled {
log::warn!( log::warn!(
"⚠️ {}", "{} The `--unstable` flag has been removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags",
colors::yellow( colors::yellow("Warning")
"The `--unstable` flag has been removed in Deno 2.0. Use granular `--unstable-*` flags instead.\nLearn more at: https://docs.deno.com/runtime/manual/tools/unstable_flags"
)
); );
} }
@ -761,7 +817,6 @@ fn wait_for_start(
roots.compiled_wasm_module_store.clone(), roots.compiled_wasm_module_store.clone(),
), ),
additional_extensions: vec![], additional_extensions: vec![],
enable_raw_imports: false,
}); });
let (rx, mut tx): ( let (rx, mut tx): (
@ -915,11 +970,14 @@ async fn initialize_tunnel(
let root_cert_store = cert_store_provider.get_or_try_init()?.clone(); let root_cert_store = cert_store_provider.get_or_try_init()?.clone();
let tls_config = deno_runtime::deno_tls::create_client_config( let tls_config = deno_runtime::deno_tls::create_client_config(
Some(root_cert_store), deno_runtime::deno_tls::TlsClientConfigOptions {
vec![], root_cert_store: Some(root_cert_store),
None, ca_certs: vec![],
deno_runtime::deno_tls::TlsKeys::Null, unsafely_ignore_certificate_errors: None,
deno_runtime::deno_tls::SocketUse::GeneralSsl, unsafely_disable_hostname_verification: false,
cert_chain_and_key: deno_runtime::deno_tls::TlsKeys::Null,
socket_use: deno_runtime::deno_tls::SocketUse::GeneralSsl,
},
)?; )?;
let mut metadata = HashMap::new(); let mut metadata = HashMap::new();

View file

@ -275,6 +275,7 @@ pub enum DenoTaskLifecycleScriptsError {
} }
pub struct DenoTaskLifeCycleScriptsExecutor { pub struct DenoTaskLifeCycleScriptsExecutor {
progress_bar: ProgressBar,
npm_resolver: ManagedNpmResolverRc<CliSys>, npm_resolver: ManagedNpmResolverRc<CliSys>,
} }
@ -348,10 +349,7 @@ impl LifecycleScriptsExecutor for DenoTaskLifeCycleScriptsExecutor {
{ {
continue; continue;
} }
let pb = ProgressBar::new( let _guard = self.progress_bar.update_with_prompt(
crate::util::progress_bar::ProgressBarStyle::TextOnly,
);
let _guard = pb.update_with_prompt(
ProgressMessagePrompt::Initialize, ProgressMessagePrompt::Initialize,
&format!("{}: running '{script_name}' script", package.id.nv), &format!("{}: running '{script_name}' script", package.id.nv),
); );
@ -444,8 +442,14 @@ impl LifecycleScriptsExecutor for DenoTaskLifeCycleScriptsExecutor {
} }
impl DenoTaskLifeCycleScriptsExecutor { impl DenoTaskLifeCycleScriptsExecutor {
pub fn new(npm_resolver: ManagedNpmResolverRc<CliSys>) -> Self { pub fn new(
Self { npm_resolver } npm_resolver: ManagedNpmResolverRc<CliSys>,
progress_bar: ProgressBar,
) -> Self {
Self {
npm_resolver,
progress_bar,
}
} }
// take in all (non copy) packages from snapshot, // take in all (non copy) packages from snapshot,

View file

@ -27,6 +27,7 @@ deno_core::extension!(deno_test,
op_restore_test_permissions, op_restore_test_permissions,
op_register_test, op_register_test,
op_register_test_step, op_register_test_step,
op_register_test_hook,
op_test_get_origin, op_test_get_origin,
op_test_event_step_wait, op_test_event_step_wait,
op_test_event_step_result_ok, op_test_event_step_result_ok,
@ -127,12 +128,24 @@ fn op_register_test(
column_number, column_number,
}, },
}; };
let container = state.borrow_mut::<TestContainer>(); state
container.register(description, function); .borrow_mut::<TestContainer>()
.register(description, function);
ret_buf.copy_from_slice(&(id as u32).to_le_bytes()); ret_buf.copy_from_slice(&(id as u32).to_le_bytes());
Ok(()) Ok(())
} }
#[op2]
fn op_register_test_hook(
state: &mut OpState,
#[string] hook_type: String,
#[global] function: v8::Global<v8::Function>,
) -> Result<(), JsErrorBox> {
let container = state.borrow_mut::<TestContainer>();
container.register_hook(hook_type, function);
Ok(())
}
#[op2] #[op2]
#[string] #[string]
fn op_test_get_origin(state: &mut OpState) -> String { fn op_test_get_origin(state: &mut OpState) -> String {

View file

@ -207,6 +207,21 @@ impl FileSystem for DenoRtSys {
RealFs.chown_async(path, uid, gid).await RealFs.chown_async(path, uid, gid).await
} }
fn exists_sync(&self, path: &CheckedPath) -> bool {
if self.0.is_path_within(path) {
self.0.exists(path)
} else {
RealFs.exists_sync(path)
}
}
async fn exists_async(&self, path: CheckedPathBuf) -> FsResult<bool> {
if self.0.is_path_within(&path) {
Ok(self.0.exists(&path))
} else {
RealFs.exists_async(path).await
}
}
fn lchmod_sync(&self, path: &CheckedPath, mode: u32) -> FsResult<()> { fn lchmod_sync(&self, path: &CheckedPath, mode: u32) -> FsResult<()> {
self.error_if_in_vfs(path)?; self.error_if_in_vfs(path)?;
RealFs.lchmod_sync(path, mode) RealFs.lchmod_sync(path, mode)
@ -1478,6 +1493,10 @@ impl FileBackedVfs {
path.starts_with(&self.fs_root.root_path) path.starts_with(&self.fs_root.root_path)
} }
pub fn exists(&self, path: &Path) -> bool {
self.fs_root.find_entry(path, self.case_sensitivity).is_ok()
}
pub fn open_file( pub fn open_file(
self: &Arc<Self>, self: &Arc<Self>,
path: &Path, path: &Path,

View file

@ -927,11 +927,9 @@ pub async fn run(
if metadata.unstable_config.sloppy_imports { if metadata.unstable_config.sloppy_imports {
SloppyImportsOptions::Enabled SloppyImportsOptions::Enabled
} else { } else {
SloppyImportsOptions::Disabled SloppyImportsOptions::Unspecified
}, },
Default::default(), Default::default(),
Default::default(),
Default::default(),
sys.clone(), sys.clone(),
) )
}; };
@ -1007,7 +1005,7 @@ pub async fn run(
has_node_modules_dir, has_node_modules_dir,
inspect_brk: false, inspect_brk: false,
inspect_wait: false, inspect_wait: false,
strace_ops: None, trace_ops: None,
is_inspecting: false, is_inspecting: false,
is_standalone: true, is_standalone: true,
auto_serve: false, auto_serve: false,
@ -1046,6 +1044,7 @@ pub async fn run(
sys.clone(), sys.clone(),
lib_main_worker_options, lib_main_worker_options,
Default::default(), Default::default(),
None,
); );
// Initialize v8 once from the main thread. // Initialize v8 once from the main thread.

View file

@ -5,7 +5,73 @@
"description": "A JSON representation of a Deno configuration file.", "description": "A JSON representation of a Deno configuration file.",
"type": "object", "type": "object",
"required": [], "required": [],
"$defs": {
"permissionConfigValue": {
"oneOf": [{
"type": "boolean",
"description": "Allow or deny the permission."
}, {
"type": "array",
"description": "Subset of items to allow or deny depending on the context.",
"items": { "type": "string" }
}]
},
"allowDenyPermissionConfig": {
"type": "object",
"description": "Object form to allow and/or deny permissions.",
"additionalProperties": false,
"properties": {
"allow": { "$ref": "#/$defs/permissionConfigValue" },
"deny": { "$ref": "#/$defs/permissionConfigValue" }
}
},
"allowDenyPermissionConfigValue": {
"oneOf": [{
"$ref": "#/$defs/permissionConfigValue"
}, {
"$ref": "#/$defs/allowDenyPermissionConfig"
}]
},
"permissionSet": {
"type": "object",
"description": "Collection of permissions.",
"additionalProperties": false,
"properties": {
"all": {
"type": "boolean",
"description": "Allow all permissions for the program to run unrestricted."
},
"read": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"write": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"import": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"env": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"net": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"run": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"ffi": { "$ref": "#/$defs/allowDenyPermissionConfigValue" },
"sys": { "$ref": "#/$defs/allowDenyPermissionConfigValue" }
}
},
"permissionNameOrSet": {
"description": "A permission set name to use or inline permission set.",
"anyOf": [{
"type": "string",
"description": "Permission name."
}, {
"$ref": "#/$defs/permissionSet"
}]
}
},
"properties": { "properties": {
"compile": {
"type": "object",
"description": "Configuration for `deno compile`.",
"additionalProperties": false,
"properties": {
"permissions": {
"$ref": "#/$defs/permissionNameOrSet"
}
}
},
"compilerOptions": { "compilerOptions": {
"type": "object", "type": "object",
"description": "Instructs the TypeScript compiler how to compile .ts files.", "description": "Instructs the TypeScript compiler how to compile .ts files.",
@ -149,6 +215,43 @@
}, },
"markdownDescription": "Specify a set of bundled library declaration files that describe the target runtime environment.\n\nSee more: https://www.typescriptlang.org/tsconfig#lib" "markdownDescription": "Specify a set of bundled library declaration files that describe the target runtime environment.\n\nSee more: https://www.typescriptlang.org/tsconfig#lib"
}, },
"module": {
"description": "Specify what module code is generated.",
"type": ["string", "null"],
"anyOf": [
{
"enum": [
"esnext",
"nodenext",
"preserve"
]
},
{
"pattern": "^([Ee][Ss][Nn][Ee][Xx][Tt]|[Nn][Oo][Dd][Ee][Nn][Ee][Xx][Tt]|[Pp][Rr][Ee][Ss][Ee][Rr][Vv][Ee])$"
}
],
"markdownDescription": "Specify what module code is generated.\n\nSee more: https://www.typescriptlang.org/tsconfig#module"
},
"moduleResolution": {
"description": "Specify how TypeScript looks up a file from a given module specifier.",
"type": ["string", "null"],
"anyOf": [
{
"enum": [
"nodenext",
"bundler"
],
"markdownEnumDescriptions": [
"This is the recommended setting for libraries and Node.js applications",
"This is the recommended setting in TypeScript 5.0+ for applications that use a bundler"
]
},
{
"pattern": "^([Nn]ode[Nn]ext)|([Bb]undler))$"
}
],
"markdownDescription": "Specify how TypeScript looks up a file from a given module specifier.\n\nSee more: https://www.typescriptlang.org/tsconfig#moduleResolution"
},
"noErrorTruncation": { "noErrorTruncation": {
"description": "Do not truncate error messages.", "description": "Do not truncate error messages.",
"type": "boolean", "type": "boolean",
@ -622,6 +725,9 @@
"items": { "items": {
"type": "string" "type": "string"
} }
},
"permissions": {
"$ref": "#/$defs/permissionNameOrSet"
} }
} }
}, },
@ -662,6 +768,9 @@
"items": { "items": {
"type": "string" "type": "string"
} }
},
"permissions": {
"$ref": "#/$defs/permissionNameOrSet"
} }
} }
}, },

View file

@ -75,6 +75,7 @@
"no-implicit-declare-namespace-export", "no-implicit-declare-namespace-export",
"no-import-assertions", "no-import-assertions",
"no-import-assign", "no-import-assign",
"no-import-prefix",
"no-inferrable-types", "no-inferrable-types",
"no-inner-declarations", "no-inner-declarations",
"no-invalid-regexp", "no-invalid-regexp",
@ -110,6 +111,7 @@
"no-unsafe-negation", "no-unsafe-negation",
"no-unused-labels", "no-unused-labels",
"no-unused-vars", "no-unused-vars",
"no-unversioned-import",
"no-useless-rename", "no-useless-rename",
"no-var", "no-var",
"no-window", "no-window",

View file

@ -1,5 +1,5 @@
{ {
"$id": "https://deno.land/x/deno/cli/schemas/lint-tags.v1.json", "$id": "https://deno.land/x/deno/cli/schemas/lint-tags.v1.json",
"$schema": "https://json-schema.org/draft/2020-12/schema", "$schema": "https://json-schema.org/draft/2020-12/schema",
"enum": ["fresh", "jsr", "jsx", "react", "recommended"] "enum": ["fresh", "jsr", "jsx", "react", "recommended", "workspace"]
} }

View file

@ -1,3 +1,3 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
pub static TS_VERSION: &str = "5.8.3"; pub static TS_VERSION: &str = "5.9.2";

View file

@ -262,7 +262,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
bail!( bail!(
"The `--icon` flag is only available when targeting Windows (current: {})", "The `--icon` flag is only available when targeting Windows (current: {})",
target, target,
) );
} }
} }
self.write_standalone_binary(options, original_binary).await self.write_standalone_binary(options, original_binary).await
@ -721,7 +721,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
seed: self.cli_options.seed(), seed: self.cli_options.seed(),
code_cache_key, code_cache_key,
location: self.cli_options.location_flag().clone(), location: self.cli_options.location_flag().clone(),
permissions: self.cli_options.permissions_options(), permissions: self.cli_options.permissions_options()?,
v8_flags: construct_v8_flags( v8_flags: construct_v8_flags(
&get_default_v8_flags(), &get_default_v8_flags(),
self.cli_options.v8_flags(), self.cli_options.v8_flags(),
@ -901,7 +901,7 @@ impl<'a> DenoCompileBinaryWriter<'a> {
self self
.cli_options .cli_options
.workspace() .workspace()
.root_dir() .root_dir_url()
.to_file_path() .to_file_path()
.unwrap(), .unwrap(),
); );

View file

@ -22,7 +22,9 @@ use deno_core::v8;
use deno_error::JsErrorBox; use deno_error::JsErrorBox;
use deno_npm_installer::graph::NpmCachingStrategy; use deno_npm_installer::graph::NpmCachingStrategy;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::tokio_util::create_and_run_current_thread; use deno_runtime::tokio_util::create_and_run_current_thread;
use indexmap::IndexMap; use indexmap::IndexMap;
use indexmap::IndexSet; use indexmap::IndexSet;
@ -33,6 +35,7 @@ use tokio::sync::mpsc::UnboundedSender;
use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::unbounded_channel;
use crate::args::BenchFlags; use crate::args::BenchFlags;
use crate::args::CliOptions;
use crate::args::Flags; use crate::args::Flags;
use crate::colors; use crate::colors;
use crate::display::write_json_to_stdout; use crate::display::write_json_to_stdout;
@ -40,6 +43,7 @@ use crate::factory::CliFactory;
use crate::graph_container::CheckSpecifiersOptions; use crate::graph_container::CheckSpecifiersOptions;
use crate::graph_util::has_graph_root_local_dependent_changed; use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops; use crate::ops;
use crate::sys::CliSys;
use crate::tools::test::TestFilter; use crate::tools::test::TestFilter;
use crate::tools::test::format_test_error; use crate::tools::test::format_test_error;
use crate::util::file_watcher; use crate::util::file_watcher;
@ -169,10 +173,7 @@ async fn bench_specifier(
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(CreateCustomWorkerError::Core(error)) => match error.into_kind() { Err(CreateCustomWorkerError::Core(error)) => match error.into_kind() {
CoreErrorKind::Js(error) => { CoreErrorKind::Js(error) => {
sender.send(BenchEvent::UncaughtError( sender.send(BenchEvent::UncaughtError(specifier.to_string(), error))?;
specifier.to_string(),
Box::new(error),
))?;
Ok(()) Ok(())
} }
error => Err(error.into_box().into()), error => Err(error.into_box().into()),
@ -296,7 +297,8 @@ async fn bench_specifier_inner(
/// Test a collection of specifiers with test modes concurrently. /// Test a collection of specifiers with test modes concurrently.
async fn bench_specifiers( async fn bench_specifiers(
worker_factory: Arc<CliMainWorkerFactory>, worker_factory: Arc<CliMainWorkerFactory>,
root_permissions_container: &PermissionsContainer, cli_options: &Arc<CliOptions>,
permission_desc_parser: &Arc<RuntimePermissionDescriptorParser<CliSys>>,
specifiers: Vec<ModuleSpecifier>, specifiers: Vec<ModuleSpecifier>,
preload_modules: Vec<ModuleSpecifier>, preload_modules: Vec<ModuleSpecifier>,
options: BenchSpecifierOptions, options: BenchSpecifierOptions,
@ -307,14 +309,25 @@ async fn bench_specifiers(
let join_handles = specifiers.into_iter().map(move |specifier| { let join_handles = specifiers.into_iter().map(move |specifier| {
let worker_factory = worker_factory.clone(); let worker_factory = worker_factory.clone();
// Various test files should not share the same permissions in terms of let specifier_dir = cli_options.workspace().resolve_member_dir(&specifier);
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions_container = root_permissions_container.deep_clone();
let sender = sender.clone(); let sender = sender.clone();
let options = option_for_handles.clone(); let options = option_for_handles.clone();
let preload_modules = preload_modules.clone(); let preload_modules = preload_modules.clone();
let cli_options = cli_options.clone();
let permission_desc_parser = permission_desc_parser.clone();
spawn_blocking(move || { spawn_blocking(move || {
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
cli_options.permissions_options_for_dir(&specifier_dir)?;
let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
Permissions::from_options(
permission_desc_parser.as_ref(),
&permissions,
)?,
);
let future = bench_specifier( let future = bench_specifier(
worker_factory, worker_factory,
permissions_container, permissions_container,
@ -491,7 +504,8 @@ pub async fn run_benchmarks(
Arc::new(factory.create_cli_main_worker_factory().await?); Arc::new(factory.create_cli_main_worker_factory().await?);
bench_specifiers( bench_specifiers(
worker_factory, worker_factory,
factory.root_permissions_container()?, cli_options,
factory.permission_desc_parser()?,
specifiers, specifiers,
preload_modules, preload_modules,
BenchSpecifierOptions { BenchSpecifierOptions {
@ -625,7 +639,8 @@ pub async fn run_benchmarks_with_watch(
let preload_modules = cli_options.preload_modules()?; let preload_modules = cli_options.preload_modules()?;
bench_specifiers( bench_specifiers(
worker_factory, worker_factory,
factory.root_permissions_container()?, cli_options,
factory.permission_desc_parser()?,
specifiers, specifiers,
preload_modules, preload_modules,
BenchSpecifierOptions { BenchSpecifierOptions {

View file

@ -2,10 +2,12 @@
mod esbuild; mod esbuild;
mod externals; mod externals;
mod provider;
mod transform; mod transform;
use std::borrow::Cow; use std::borrow::Cow;
use std::cell::RefCell; use std::cell::RefCell;
use std::ops::Deref;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
@ -15,16 +17,23 @@ use std::time::Duration;
use deno_ast::EmitOptions; use deno_ast::EmitOptions;
use deno_ast::MediaType; use deno_ast::MediaType;
use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_bundle_runtime::BundleFormat;
use deno_bundle_runtime::BundlePlatform;
use deno_bundle_runtime::PackageHandling;
use deno_bundle_runtime::SourceMapType;
use deno_config::workspace::TsTypeLib; use deno_config::workspace::TsTypeLib;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::futures::FutureExt as _; use deno_core::futures::FutureExt as _;
use deno_core::parking_lot::Mutex;
use deno_core::serde_json; use deno_core::serde_json;
use deno_core::url::Url; use deno_core::url::Url;
use deno_error::JsError; use deno_error::JsError;
use deno_graph::ModuleErrorKind; use deno_graph::ModuleErrorKind;
use deno_graph::Position; use deno_graph::Position;
use deno_path_util::resolve_url_or_path; use deno_path_util::resolve_url_or_path;
use deno_resolver::cache::ParsedSourceCache;
use deno_resolver::graph::ResolveWithGraphError; use deno_resolver::graph::ResolveWithGraphError;
use deno_resolver::graph::ResolveWithGraphOptions; use deno_resolver::graph::ResolveWithGraphOptions;
use deno_resolver::loader::LoadCodeSourceError; use deno_resolver::loader::LoadCodeSourceError;
@ -44,25 +53,25 @@ use esbuild_client::protocol::BuildResponse;
use indexmap::IndexMap; use indexmap::IndexMap;
use node_resolver::NodeResolutionKind; use node_resolver::NodeResolutionKind;
use node_resolver::ResolutionMode; use node_resolver::ResolutionMode;
use node_resolver::errors::PackageNotFoundError;
use node_resolver::errors::PackageSubpathResolveError; use node_resolver::errors::PackageSubpathResolveError;
pub use provider::CliBundleProvider;
use sys_traits::EnvCurrentDir; use sys_traits::EnvCurrentDir;
use crate::args::BundleFlags; use crate::args::BundleFlags;
use crate::args::BundleFormat;
use crate::args::BundlePlatform;
use crate::args::Flags; use crate::args::Flags;
use crate::args::PackageHandling;
use crate::args::SourceMapType;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::MainModuleGraphContainer; use crate::graph_container::MainModuleGraphContainer;
use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit; use crate::graph_container::ModuleGraphUpdatePermit;
use crate::module_loader::CliDenoResolverModuleLoader; use crate::module_loader::CliDenoResolverModuleLoader;
use crate::module_loader::CliEmitter;
use crate::module_loader::ModuleLoadPreparer; use crate::module_loader::ModuleLoadPreparer;
use crate::module_loader::PrepareModuleLoadOptions; use crate::module_loader::PrepareModuleLoadOptions;
use crate::node::CliNodeResolver; use crate::node::CliNodeResolver;
use crate::npm::CliNpmResolver; use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::resolver::CliResolver; use crate::resolver::CliResolver;
use crate::sys::CliSys; use crate::sys::CliSys;
use crate::tools::bundle::externals::ExternalsMatcher; use crate::tools::bundle::externals::ExternalsMatcher;
@ -71,10 +80,10 @@ use crate::util::file_watcher::WatcherRestartMode;
static DISABLE_HACK: LazyLock<bool> = static DISABLE_HACK: LazyLock<bool> =
LazyLock::new(|| std::env::var("NO_DENO_BUNDLE_HACK").is_err()); LazyLock::new(|| std::env::var("NO_DENO_BUNDLE_HACK").is_err());
pub async fn bundle( pub async fn bundle_init(
mut flags: Arc<Flags>, mut flags: Arc<Flags>,
bundle_flags: BundleFlags, bundle_flags: &BundleFlags,
) -> Result<(), AnyError> { ) -> Result<EsbuildBundler, AnyError> {
{ {
let flags_mut = Arc::make_mut(&mut flags); let flags_mut = Arc::make_mut(&mut flags);
flags_mut.unstable_config.sloppy_imports = true; flags_mut.unstable_config.sloppy_imports = true;
@ -110,8 +119,11 @@ pub async fn bundle(
Some(ExternalsMatcher::new(&bundle_flags.external, &init_cwd)) Some(ExternalsMatcher::new(&bundle_flags.external, &init_cwd))
}, },
on_end_tx, on_end_tx,
parsed_source_cache: factory.parsed_source_cache()?.clone(),
cjs_tracker: factory.cjs_tracker()?.clone(),
emitter: factory.emitter()?.clone(),
deferred_resolve_errors: Default::default(),
}); });
let start = std::time::Instant::now();
let resolved_entrypoints = let resolved_entrypoints =
resolve_entrypoints(&resolver, &init_cwd, &bundle_flags.entrypoints)?; resolve_entrypoints(&resolver, &init_cwd, &bundle_flags.entrypoints)?;
@ -138,7 +150,7 @@ pub async fn bundle(
log::warn!("esbuild exited: {:?}", res); log::warn!("esbuild exited: {:?}", res);
}); });
let esbuild_flags = configure_esbuild_flags(&bundle_flags); let esbuild_flags = configure_esbuild_flags(bundle_flags);
let entries = roots.into_iter().map(|e| ("".into(), e.into())).collect(); let entries = roots.into_iter().map(|e| ("".into(), e.into())).collect();
let bundler = EsbuildBundler::new( let bundler = EsbuildBundler::new(
client, client,
@ -152,7 +164,24 @@ pub async fn bundle(
esbuild_flags, esbuild_flags,
entries, entries,
); );
Ok(bundler)
}
pub async fn bundle(
mut flags: Arc<Flags>,
bundle_flags: BundleFlags,
) -> Result<(), AnyError> {
{
let flags_mut = Arc::make_mut(&mut flags);
flags_mut.unstable_config.sloppy_imports = true;
}
let bundler = bundle_init(flags.clone(), &bundle_flags).await?;
let init_cwd = bundler.cwd.clone();
let start = std::time::Instant::now();
let response = bundler.build().await?; let response = bundler.build().await?;
let end = std::time::Instant::now();
let duration = end.duration_since(start);
if bundle_flags.watch { if bundle_flags.watch {
return bundle_watch( return bundle_watch(
@ -164,19 +193,23 @@ pub async fn bundle(
.await; .await;
} }
handle_esbuild_errors_and_warnings(&response, &init_cwd); handle_esbuild_errors_and_warnings(
&response,
&init_cwd,
&bundler.plugin_handler.take_deferred_resolve_errors(),
);
if response.errors.is_empty() { if response.errors.is_empty() {
let metafile = metafile_from_response(&response)?; let metafile = metafile_from_response(&response)?;
let output_infos = process_result( let output_infos = process_result(
&response, &response,
&init_cwd, &init_cwd,
*DISABLE_HACK && matches!(bundle_flags.platform, BundlePlatform::Deno), should_replace_require_shim(bundle_flags.platform),
bundle_flags.minify, bundle_flags.minify,
)?; )?;
if bundle_flags.output_dir.is_some() || bundle_flags.output_path.is_some() { if bundle_flags.output_dir.is_some() || bundle_flags.output_path.is_some() {
print_finished_message(&metafile, &output_infos, start.elapsed())?; print_finished_message(&metafile, &output_infos, duration)?;
} }
} }
@ -236,13 +269,17 @@ async fn bundle_watch(
.await?; .await?;
} }
let response = bundler.rebuild().await?; let response = bundler.rebuild().await?;
handle_esbuild_errors_and_warnings(&response, &bundler.cwd); handle_esbuild_errors_and_warnings(
&response,
&bundler.cwd,
&bundler.plugin_handler.take_deferred_resolve_errors(),
);
if response.errors.is_empty() { if response.errors.is_empty() {
let metafile = metafile_from_response(&response)?; let metafile = metafile_from_response(&response)?;
let output_infos = process_result( let output_infos = process_result(
&response, &response,
&bundler.cwd, &bundler.cwd,
*DISABLE_HACK && matches!(platform, BundlePlatform::Deno), should_replace_require_shim(platform),
minified, minified,
)?; )?;
print_finished_message(&metafile, &output_infos, start.elapsed())?; print_finished_message(&metafile, &output_infos, start.elapsed())?;
@ -265,6 +302,10 @@ async fn bundle_watch(
Ok(()) Ok(())
} }
pub fn should_replace_require_shim(platform: BundlePlatform) -> bool {
*DISABLE_HACK && matches!(platform, BundlePlatform::Deno)
}
fn get_input_paths_for_watch(response: &BuildResponse) -> Vec<PathBuf> { fn get_input_paths_for_watch(response: &BuildResponse) -> Vec<PathBuf> {
let metafile = serde_json::from_str::<esbuild_client::Metafile>( let metafile = serde_json::from_str::<esbuild_client::Metafile>(
response response
@ -282,7 +323,7 @@ fn get_input_paths_for_watch(response: &BuildResponse) -> Vec<PathBuf> {
.collect::<Vec<_>>() .collect::<Vec<_>>()
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy, PartialEq)]
pub enum BundlingMode { pub enum BundlingMode {
OneShot, OneShot,
Watch, Watch,
@ -360,6 +401,7 @@ impl EsbuildBundler {
.send_build_request(self.make_build_request()) .send_build_request(self.make_build_request())
.await .await
.unwrap(); .unwrap();
Ok(response) Ok(response)
} }
@ -403,12 +445,54 @@ var __require = createRequire(import.meta.url);
} }
} }
fn format_location(
location: &esbuild_client::protocol::Location,
current_dir: &Path,
) -> String {
let url =
deno_path_util::resolve_url_or_path(location.file.as_str(), current_dir)
.map(|url| deno_terminal::colors::cyan(url.into()))
.unwrap_or(deno_terminal::colors::cyan(location.file.clone()));
format!(
"{}:{}:{}",
url,
deno_terminal::colors::yellow(location.line),
deno_terminal::colors::yellow(location.column)
)
}
fn format_note(
note: &esbuild_client::protocol::Note,
current_dir: &Path,
) -> String {
format!(
"{}: {}{}",
deno_terminal::colors::magenta("note"),
note.text,
if let Some(location) = &note.location {
format!("\n {}", format_location(location, current_dir))
} else {
String::new()
}
)
}
// not very efficient, but it's only for error messages
fn add_indent(s: &str, indent: &str) -> String {
let lines = s
.lines()
.map(|line| format!("{}{}", indent, line))
.collect::<Vec<_>>();
lines.join("\n")
}
fn format_message( fn format_message(
message: &esbuild_client::protocol::Message, message: &esbuild_client::protocol::Message,
current_dir: &Path, current_dir: &Path,
) -> String { ) -> String {
format!( format!(
"{}{}{}", "{}{}{}{}",
message.text, message.text,
if message.id.is_empty() { if message.id.is_empty() {
String::new() String::new()
@ -417,22 +501,22 @@ fn format_message(
}, },
if let Some(location) = &message.location { if let Some(location) = &message.location {
if !message.text.contains(" at ") { if !message.text.contains(" at ") {
format!( format!("\n at {}", format_location(location, current_dir))
"\n at {}:{}:{}",
deno_path_util::resolve_url_or_path(
location.file.as_str(),
current_dir
)
.map(|url| deno_terminal::colors::cyan(url.into()))
.unwrap_or(deno_terminal::colors::cyan(location.file.clone())),
deno_terminal::colors::yellow(location.line),
deno_terminal::colors::yellow(location.column)
)
} else { } else {
String::new() String::new()
} }
} else { } else {
String::new() String::new()
},
if !message.notes.is_empty() {
let mut s = String::new();
for note in &message.notes {
s.push('\n');
s.push_str(&add_indent(&format_note(note, current_dir), " "));
}
s
} else {
String::new()
} }
) )
} }
@ -481,6 +565,11 @@ fn requested_type_from_map(
} }
} }
pub struct DeferredResolveError {
path: String,
error: ResolveWithGraphError,
}
pub struct DenoPluginHandler { pub struct DenoPluginHandler {
file_fetcher: Arc<CliFileFetcher>, file_fetcher: Arc<CliFileFetcher>,
resolver: Arc<CliResolver>, resolver: Arc<CliResolver>,
@ -490,6 +579,67 @@ pub struct DenoPluginHandler {
module_loader: Arc<CliDenoResolverModuleLoader>, module_loader: Arc<CliDenoResolverModuleLoader>,
externals_matcher: Option<ExternalsMatcher>, externals_matcher: Option<ExternalsMatcher>,
on_end_tx: tokio::sync::mpsc::Sender<esbuild_client::OnEndArgs>, on_end_tx: tokio::sync::mpsc::Sender<esbuild_client::OnEndArgs>,
deferred_resolve_errors: Arc<Mutex<Vec<DeferredResolveError>>>,
parsed_source_cache: Arc<ParsedSourceCache>,
cjs_tracker: Arc<CliCjsTracker>,
emitter: Arc<CliEmitter>,
}
impl DenoPluginHandler {
fn take_deferred_resolve_errors(&self) -> Vec<DeferredResolveError> {
std::mem::take(&mut *self.deferred_resolve_errors.lock())
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
enum PluginImportKind {
EntryPoint,
ImportStatement,
RequireCall,
DynamicImport,
RequireResolve,
ImportRule,
ComposesFrom,
UrlToken,
}
impl From<protocol::ImportKind> for PluginImportKind {
fn from(kind: protocol::ImportKind) -> Self {
match kind {
protocol::ImportKind::EntryPoint => PluginImportKind::EntryPoint,
protocol::ImportKind::ImportStatement => {
PluginImportKind::ImportStatement
}
protocol::ImportKind::RequireCall => PluginImportKind::RequireCall,
protocol::ImportKind::DynamicImport => PluginImportKind::DynamicImport,
protocol::ImportKind::RequireResolve => PluginImportKind::RequireResolve,
protocol::ImportKind::ImportRule => PluginImportKind::ImportRule,
protocol::ImportKind::ComposesFrom => PluginImportKind::ComposesFrom,
protocol::ImportKind::UrlToken => PluginImportKind::UrlToken,
}
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
struct PluginOnResolveArgs {
path: String,
importer: Option<String>,
kind: PluginImportKind,
namespace: Option<String>,
resolve_dir: Option<String>,
with: IndexMap<String, String>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
struct PluginOnLoadArgs {
path: String,
namespace: String,
suffix: String,
with: IndexMap<String, String>,
} }
#[async_trait::async_trait(?Send)] #[async_trait::async_trait(?Send)]
@ -510,6 +660,7 @@ impl esbuild_client::PluginHandler for DenoPluginHandler {
..Default::default() ..Default::default()
})); }));
} }
let result = self.bundle_resolve( let result = self.bundle_resolve(
&args.path, &args.path,
args.importer.as_deref(), args.importer.as_deref(),
@ -523,7 +674,7 @@ impl esbuild_client::PluginHandler for DenoPluginHandler {
Err(e) => { Err(e) => {
return Ok(Some(esbuild_client::OnResolveResult { return Ok(Some(esbuild_client::OnResolveResult {
errors: Some(vec![esbuild_client::protocol::PartialMessage { errors: Some(vec![esbuild_client::protocol::PartialMessage {
id: "myerror".into(), id: "deno_error".into(),
plugin_name: "deno".into(), plugin_name: "deno".into(),
text: e.to_string(), text: e.to_string(),
..Default::default() ..Default::default()
@ -567,6 +718,7 @@ impl esbuild_client::PluginHandler for DenoPluginHandler {
&self, &self,
args: esbuild_client::OnLoadArgs, args: esbuild_client::OnLoadArgs,
) -> Result<Option<esbuild_client::OnLoadResult>, AnyError> { ) -> Result<Option<esbuild_client::OnLoadResult>, AnyError> {
log::debug!("{}: {args:?}", deno_terminal::colors::cyan("on_load"));
let result = self let result = self
.bundle_load(&args.path, &requested_type_from_map(&args.with)) .bundle_load(&args.path, &requested_type_from_map(&args.with))
.await; .await;
@ -618,7 +770,6 @@ impl esbuild_client::PluginHandler for DenoPluginHandler {
&self, &self,
_args: esbuild_client::OnEndArgs, _args: esbuild_client::OnEndArgs,
) -> Result<Option<esbuild_client::OnEndResult>, AnyError> { ) -> Result<Option<esbuild_client::OnEndResult>, AnyError> {
log::debug!("{}: {_args:?}", deno_terminal::colors::magenta("on_end"));
self.on_end_tx.send(_args).await?; self.on_end_tx.send(_args).await?;
Ok(None) Ok(None)
} }
@ -660,11 +811,27 @@ pub enum BundleLoadError {
#[error("UTF-8 conversion error")] #[error("UTF-8 conversion error")]
Utf8(#[from] std::str::Utf8Error), Utf8(#[from] std::str::Utf8Error),
#[class(generic)] #[class(generic)]
#[error("UTF-8 conversion error")]
StringUtf8(#[from] std::string::FromUtf8Error),
#[class(generic)]
#[error("Parse error")] #[error("Parse error")]
Parse(#[from] deno_ast::ParseDiagnostic), Parse(#[from] deno_ast::ParseDiagnostic),
#[class(generic)] #[class(generic)]
#[error("Emit error")] #[error("Emit error")]
Emit(#[from] deno_ast::EmitError), Emit(#[from] deno_ast::EmitError),
#[class(generic)]
#[error("Prepare module load error")]
PrepareModuleLoad(#[from] crate::module_loader::PrepareModuleLoadError),
#[class(generic)]
#[error("Package.json load error")]
PackageJsonLoadError(#[from] node_resolver::errors::PackageJsonLoadError),
#[class(generic)]
#[error("Emit parsed source helper error")]
EmitParsedSourceHelperError(
#[from] deno_resolver::emit::EmitParsedSourceHelperError,
),
} }
impl BundleLoadError { impl BundleLoadError {
@ -685,6 +852,40 @@ impl BundleLoadError {
} }
} }
fn maybe_ignorable_resolution_error(
error: &ResolveWithGraphError,
) -> Option<String> {
if let deno_resolver::graph::ResolveWithGraphErrorKind::Resolve(e) =
error.as_kind()
&& let deno_resolver::DenoResolveErrorKind::Node(node_err) = e.as_kind()
&& let node_resolver::errors::NodeResolveErrorKind::PackageResolve(pkg_err) =
node_err.as_kind()
&& let node_resolver::errors::PackageResolveErrorKind::PackageFolderResolve(
pkg_folder_err,
) = pkg_err.as_kind()
&& let node_resolver::errors::PackageFolderResolveErrorKind::PackageNotFound(
PackageNotFoundError { package_name, .. },
) = pkg_folder_err.as_kind()
{
Some(package_name.to_string())
} else if let deno_resolver::graph::ResolveWithGraphErrorKind::Resolution(
deno_graph::ResolutionError::ResolverError {
error: resolve_error,
specifier,
..
},
) = error.as_kind()
&& let deno_graph::source::ResolveError::ImportMap(import_map_err) =
resolve_error.deref()
&& let import_map::ImportMapErrorKind::UnmappedBareSpecifier(..) =
import_map_err.as_kind()
{
Some(specifier.to_string())
} else {
None
}
}
impl DenoPluginHandler { impl DenoPluginHandler {
async fn reload_specifiers( async fn reload_specifiers(
&self, &self,
@ -777,6 +978,22 @@ impl DenoPluginHandler {
Ok(specifier) => Ok(Some(file_path_or_url(specifier)?)), Ok(specifier) => Ok(Some(file_path_or_url(specifier)?)),
Err(e) => { Err(e) => {
log::debug!("{}: {:?}", deno_terminal::colors::red("error"), e); log::debug!("{}: {:?}", deno_terminal::colors::red("error"), e);
if let Some(specifier) = maybe_ignorable_resolution_error(&e) {
log::debug!(
"{}: resolution failed, but maybe ignorable",
deno_terminal::colors::red("warn")
);
self
.deferred_resolve_errors
.lock()
.push(DeferredResolveError {
path: specifier,
error: e,
});
// we return None here because this lets esbuild choose to ignore the failure
// for fallible imports/requires
return Ok(None);
}
Err(BundleError::Resolver(e)) Err(BundleError::Resolver(e))
} }
} }
@ -785,7 +1002,7 @@ impl DenoPluginHandler {
async fn prepare_module_load( async fn prepare_module_load(
&self, &self,
specifiers: &[ModuleSpecifier], specifiers: &[ModuleSpecifier],
) -> Result<(), AnyError> { ) -> Result<(), BundleLoadError> {
let mut graph_permit = let mut graph_permit =
self.module_graph_container.acquire_update_permit().await; self.module_graph_container.acquire_update_permit().await;
let graph: &mut deno_graph::ModuleGraph = graph_permit.graph_mut(); let graph: &mut deno_graph::ModuleGraph = graph_permit.graph_mut();
@ -825,23 +1042,15 @@ impl DenoPluginHandler {
specifier, specifier,
Path::new(""), // should be absolute already, feels kind of hacky though Path::new(""), // should be absolute already, feels kind of hacky though
)?; )?;
let (specifier, media_type, loader) = let (specifier, media_type) =
if let RequestedModuleType::Bytes = requested_type { if let RequestedModuleType::Bytes = requested_type {
( (specifier, MediaType::Unknown)
specifier,
MediaType::Unknown,
esbuild_client::BuiltinLoader::Binary,
)
} else if let RequestedModuleType::Text = requested_type { } else if let RequestedModuleType::Text = requested_type {
( (specifier, MediaType::Unknown)
specifier, } else if let Some((specifier, media_type, _)) =
MediaType::Unknown,
esbuild_client::BuiltinLoader::Text,
)
} else if let Some((specifier, media_type, loader)) =
self.specifier_and_type_from_graph(&specifier)? self.specifier_and_type_from_graph(&specifier)?
{ {
(specifier, media_type, loader) (specifier, media_type)
} else { } else {
log::debug!( log::debug!(
"{}: no specifier and type from graph for {}", "{}: no specifier and type from graph for {}",
@ -863,13 +1072,76 @@ impl DenoPluginHandler {
if media_type == deno_media_type::MediaType::Unknown { if media_type == deno_media_type::MediaType::Unknown {
return Ok(None); return Ok(None);
} }
(specifier, media_type, media_type_to_loader(media_type)) (specifier, media_type)
}; };
let graph = self.module_graph_container.graph(); let graph = self.module_graph_container.graph();
let module_or_asset = self let module_or_asset = self
.module_loader .module_loader
.load(&graph, &specifier, None, requested_type) .load(&graph, &specifier, None, requested_type)
.await?; .await;
let module_or_asset = match module_or_asset {
Ok(module_or_asset) => module_or_asset,
Err(e) => match e.as_kind() {
LoadCodeSourceErrorKind::LoadUnpreparedModule(_) => {
let file = self
.file_fetcher
.fetch(&specifier, &self.permissions)
.await?;
let media_type = MediaType::from_specifier_and_headers(
&specifier,
file.maybe_headers.as_ref(),
);
match requested_type {
RequestedModuleType::Text | RequestedModuleType::Bytes => {
return self
.create_module_response(
&graph,
&specifier,
media_type,
&file.source,
Some(requested_type),
)
.await
.map(Some);
}
RequestedModuleType::None
| RequestedModuleType::Json
| RequestedModuleType::Other(_) => {
if media_type.is_emittable() {
let str = String::from_utf8_lossy(&file.source);
let value = str.into();
let source = self
.maybe_transpile(&file.url, media_type, &value, None)
.await?;
return self
.create_module_response(
&graph,
&file.url,
media_type,
source.as_bytes(),
Some(requested_type),
)
.await
.map(Some);
} else {
return self
.create_module_response(
&graph,
&file.url,
media_type,
&file.source,
Some(requested_type),
)
.await
.map(Some);
}
}
}
}
_ => return Err(e.into()),
},
};
let loaded_code = match module_or_asset { let loaded_code = match module_or_asset {
LoadedModuleOrAsset::Module(loaded_module) => loaded_module.source, LoadedModuleOrAsset::Module(loaded_module) => loaded_module.source,
LoadedModuleOrAsset::ExternalAsset { LoadedModuleOrAsset::ExternalAsset {
@ -884,6 +1156,40 @@ impl DenoPluginHandler {
), ),
}; };
Ok(Some(
self
.create_module_response(
&graph,
&specifier,
media_type,
loaded_code.as_bytes(),
Some(requested_type),
)
.await?,
))
}
async fn create_module_response(
&self,
graph: &deno_graph::ModuleGraph,
specifier: &Url,
media_type: MediaType,
source: &[u8],
requested_type: Option<&RequestedModuleType<'_>>,
) -> Result<(Vec<u8>, esbuild_client::BuiltinLoader), BundleLoadError> {
match requested_type {
Some(RequestedModuleType::Text) => {
return Ok((source.to_vec(), esbuild_client::BuiltinLoader::Text));
}
Some(RequestedModuleType::Bytes) => {
return Ok((source.to_vec(), esbuild_client::BuiltinLoader::Binary));
}
Some(RequestedModuleType::Json) => {
return Ok((source.to_vec(), esbuild_client::BuiltinLoader::Json));
}
Some(RequestedModuleType::Other(_) | RequestedModuleType::None)
| None => {}
}
if matches!( if matches!(
media_type, media_type,
MediaType::JavaScript MediaType::JavaScript
@ -894,32 +1200,66 @@ impl DenoPluginHandler {
| MediaType::Cts | MediaType::Cts
| MediaType::Jsx | MediaType::Jsx
| MediaType::Tsx | MediaType::Tsx
) && !graph.roots.contains(&specifier) ) && !graph.roots.contains(specifier)
{ {
let code = self.apply_transform( let module_graph_container = self.module_graph_container.clone();
&specifier, let specifier = specifier.clone();
media_type, let code = source.to_vec();
std::str::from_utf8(loaded_code.as_bytes())?, let code = tokio::task::spawn_blocking(move || {
)?; Self::apply_transform(
Ok(Some((code.into_bytes(), loader))) &module_graph_container,
&specifier,
media_type,
&String::from_utf8(code)?,
)
})
.await
.unwrap()?;
Ok((code.into_bytes(), media_type_to_loader(media_type)))
} else { } else {
Ok(Some((loaded_code.as_bytes().to_vec(), loader))) Ok((source.to_vec(), media_type_to_loader(media_type)))
} }
} }
async fn maybe_transpile(
&self,
specifier: &Url,
media_type: MediaType,
source: &Arc<str>,
is_known_script: Option<bool>,
) -> Result<Arc<str>, BundleLoadError> {
let parsed_source = self.parsed_source_cache.get_matching_parsed_source(
specifier,
media_type,
source.clone(),
)?;
let is_cjs = if let Some(is_known_script) = is_known_script {
self.cjs_tracker.is_cjs_with_known_is_script(
specifier,
media_type,
is_known_script,
)?
} else {
self.cjs_tracker.is_maybe_cjs(specifier, media_type)?
&& parsed_source.compute_is_script()
};
let module_kind = ModuleKind::from_is_cjs(is_cjs);
let source = self
.emitter
.maybe_emit_parsed_source(parsed_source, module_kind)
.await?;
Ok(source)
}
#[allow(clippy::result_large_err)] #[allow(clippy::result_large_err)]
fn apply_transform( fn apply_transform(
&self, module_graph_container: &MainModuleGraphContainer,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
media_type: deno_ast::MediaType, media_type: deno_ast::MediaType,
code: &str, code: &str,
) -> Result<String, BundleLoadError> { ) -> Result<String, BundleLoadError> {
let mut transform = transform::BundleImportMetaMainTransform::new( let mut transform = transform::BundleImportMetaMainTransform::new(
self module_graph_container.graph().roots.contains(specifier),
.module_graph_container
.graph()
.roots
.contains(specifier),
); );
let parsed_source = deno_ast::parse_program_with_post_process( let parsed_source = deno_ast::parse_program_with_post_process(
deno_ast::ParseParams { deno_ast::ParseParams {
@ -1093,9 +1433,12 @@ fn resolve_roots(
let package_folder = npm_resolver let package_folder = npm_resolver
.resolve_pkg_folder_from_deno_module_req(v.req(), &referrer) .resolve_pkg_folder_from_deno_module_req(v.req(), &referrer)
.unwrap(); .unwrap();
let main_module = node_resolver let Ok(main_module) =
.resolve_binary_export(&package_folder, v.sub_path()) node_resolver.resolve_binary_export(&package_folder, v.sub_path())
.unwrap(); else {
roots.push(url);
continue;
};
Url::from_file_path(&main_module).unwrap() Url::from_file_path(&main_module).unwrap()
} }
_ => url, _ => url,
@ -1165,20 +1508,46 @@ fn configure_esbuild_flags(bundle_flags: &BundleFlags) -> EsbuildFlags {
builder.metafile(true); builder.metafile(true);
match bundle_flags.platform { match bundle_flags.platform {
crate::args::BundlePlatform::Browser => { deno_bundle_runtime::BundlePlatform::Browser => {
builder.platform(esbuild_client::Platform::Browser); builder.platform(esbuild_client::Platform::Browser);
} }
crate::args::BundlePlatform::Deno => {} deno_bundle_runtime::BundlePlatform::Deno => {}
} }
builder.build().unwrap() builder.build().unwrap()
} }
// extract the path from a message like "Could not resolve "path/to/file.ts""
fn esbuild_resolve_error_path(
error: &esbuild_client::protocol::Message,
) -> Option<String> {
let re = lazy_regex::regex!(r#"^Could not resolve "([^"]+)"#);
re.captures(error.text.as_str())
.map(|captures| captures.get(1).unwrap().as_str().to_string())
}
fn handle_esbuild_errors_and_warnings( fn handle_esbuild_errors_and_warnings(
response: &BuildResponse, response: &BuildResponse,
init_cwd: &Path, init_cwd: &Path,
deferred_resolve_errors: &[DeferredResolveError],
) { ) {
for error in &response.errors { for error in &response.errors {
if let Some(path) = esbuild_resolve_error_path(error)
&& let Some(deferred_resolve_error) =
deferred_resolve_errors.iter().find(|e| e.path == path)
{
let error = protocol::Message {
// use our own error message, as it has more detail
text: deferred_resolve_error.error.to_string(),
..error.clone()
};
log::error!(
"{}: {}",
deno_terminal::colors::red_bold("error"),
format_message(&error, init_cwd)
);
continue;
}
log::error!( log::error!(
"{}: {}", "{}: {}",
deno_terminal::colors::red_bold("error"), deno_terminal::colors::red_bold("error"),
@ -1206,12 +1575,43 @@ fn is_js(path: &Path) -> bool {
} }
} }
struct OutputFileInfo { pub struct OutputFileInfo {
relative_path: PathBuf, relative_path: PathBuf,
size: usize, size: usize,
is_js: bool, is_js: bool,
} }
fn process_result(
pub struct ProcessedContents {
contents: Option<Vec<u8>>,
is_js: bool,
}
pub fn maybe_process_contents(
file: &esbuild_client::protocol::BuildOutputFile,
should_replace_require_shim: bool,
minified: bool,
) -> Result<ProcessedContents, AnyError> {
let path = Path::new(&file.path);
let is_js = is_js(path) || file.path.ends_with("<stdout>");
if is_js {
let string = String::from_utf8(file.contents.clone())?;
let string = if should_replace_require_shim {
replace_require_shim(&string, minified)
} else {
string
};
Ok(ProcessedContents {
contents: Some(string.into_bytes()),
is_js,
})
} else {
Ok(ProcessedContents {
contents: None,
is_js,
})
}
}
pub fn process_result(
response: &BuildResponse, response: &BuildResponse,
cwd: &Path, cwd: &Path,
should_replace_require_shim: bool, should_replace_require_shim: bool,
@ -1225,21 +1625,16 @@ fn process_result(
.unwrap_or_default(); .unwrap_or_default();
let mut output_infos = Vec::new(); let mut output_infos = Vec::new();
for file in output_files.iter() { for file in output_files.iter() {
let processed_contents =
maybe_process_contents(file, should_replace_require_shim, minified)?;
let path = Path::new(&file.path); let path = Path::new(&file.path);
let relative_path = let relative_path =
pathdiff::diff_paths(path, cwd).unwrap_or_else(|| path.to_path_buf()); pathdiff::diff_paths(path, cwd).unwrap_or_else(|| path.to_path_buf());
let is_js = is_js(path); let is_js = processed_contents.is_js;
let bytes = if is_js || file.path.ends_with("<stdout>") { let bytes = processed_contents
let string = String::from_utf8(file.contents.clone())?; .contents
let string = if should_replace_require_shim { .map(Cow::Owned)
replace_require_shim(&string, minified) .unwrap_or_else(|| Cow::Borrowed(&file.contents));
} else {
string
};
Cow::Owned(string.into_bytes())
} else {
Cow::Borrowed(&file.contents)
};
if file.path.ends_with("<stdout>") { if file.path.ends_with("<stdout>") {
crate::display::write_to_stdout_ignore_sigpipe(bytes.as_slice())?; crate::display::write_to_stdout_ignore_sigpipe(bytes.as_slice())?;

View file

@ -0,0 +1,171 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use deno_bundle_runtime as rt_bundle;
use deno_bundle_runtime::BundleOptions as RtBundleOptions;
use deno_bundle_runtime::BundleProvider;
use deno_core::error::AnyError;
use crate::args::DenoSubcommand;
use crate::args::Flags;
pub struct CliBundleProvider {
flags: Arc<Flags>,
}
impl CliBundleProvider {
pub fn new(flags: Arc<Flags>) -> Self {
Self { flags }
}
}
impl From<RtBundleOptions> for crate::args::BundleFlags {
fn from(value: RtBundleOptions) -> Self {
Self {
entrypoints: value.entrypoints,
output_path: value.output_path,
output_dir: value.output_dir,
external: value.external,
format: value.format,
minify: value.minify,
code_splitting: value.code_splitting,
platform: value.platform,
watch: false,
sourcemap: value.sourcemap,
inline_imports: value.inline_imports,
packages: value.packages,
}
}
}
fn convert_note(note: esbuild_client::protocol::Note) -> rt_bundle::Note {
rt_bundle::Note {
text: note.text,
location: note.location.map(convert_location),
}
}
fn convert_location(
location: esbuild_client::protocol::Location,
) -> rt_bundle::Location {
rt_bundle::Location {
file: location.file,
namespace: Some(location.namespace),
line: location.line,
column: location.column,
length: Some(location.length),
suggestion: Some(location.suggestion),
}
}
fn convert_message(
message: esbuild_client::protocol::Message,
) -> rt_bundle::Message {
rt_bundle::Message {
text: message.text,
location: message.location.map(convert_location),
notes: message.notes.into_iter().map(convert_note).collect(),
}
}
fn convert_build_output_file(
file: esbuild_client::protocol::BuildOutputFile,
) -> rt_bundle::BuildOutputFile {
rt_bundle::BuildOutputFile {
path: file.path,
contents: Some(file.contents),
hash: file.hash,
}
}
pub fn convert_build_response(
response: esbuild_client::protocol::BuildResponse,
) -> rt_bundle::BuildResponse {
rt_bundle::BuildResponse {
errors: response.errors.into_iter().map(convert_message).collect(),
warnings: response.warnings.into_iter().map(convert_message).collect(),
output_files: response
.output_files
.map(|files| files.into_iter().map(convert_build_output_file).collect()),
}
}
fn process_output_files(
bundle_flags: &crate::args::BundleFlags,
response: &mut esbuild_client::protocol::BuildResponse,
) -> Result<(), AnyError> {
if let Some(files) = &mut response.output_files {
for file in files {
let processed_contents = crate::tools::bundle::maybe_process_contents(
file,
crate::tools::bundle::should_replace_require_shim(
bundle_flags.platform,
),
bundle_flags.minify,
)?;
if let Some(contents) = processed_contents.contents {
file.contents = contents;
}
}
}
Ok(())
}
#[async_trait::async_trait]
impl BundleProvider for CliBundleProvider {
async fn bundle(
&self,
options: RtBundleOptions,
) -> Result<rt_bundle::BuildResponse, AnyError> {
let mut flags_clone = (*self.flags).clone();
flags_clone.type_check_mode = crate::args::TypeCheckMode::None;
let write_output = options.write
&& (options.output_dir.is_some() || options.output_path.is_some());
let bundle_flags: crate::args::BundleFlags = options.into();
flags_clone.subcommand = DenoSubcommand::Bundle(bundle_flags.clone());
let (tx, rx) = tokio::sync::oneshot::channel();
std::thread::spawn(move || {
deno_runtime::tokio_util::create_and_run_current_thread(async move {
let flags = Arc::new(flags_clone);
let bundler = match super::bundle_init(flags, &bundle_flags).await {
Ok(bundler) => bundler,
Err(e) => {
log::trace!("bundle_init error: {e:?}");
let _ = tx.send(Err(e));
return Ok(());
}
};
log::trace!("bundler.build");
let mut result = match bundler.build().await {
Ok(result) => result,
Err(e) => {
log::trace!("bundler.build error: {e:?}");
let _ = tx.send(Err(e));
return Ok(());
}
};
log::trace!("process_result");
if write_output {
super::process_result(
&result,
&bundler.cwd,
true,
bundle_flags.minify,
)?;
result.output_files = None;
} else {
process_output_files(&bundle_flags, &mut result)?;
}
log::trace!("convert_build_response");
let result = convert_build_response(result);
log::trace!("send result");
let _ = tx.send(Ok(result));
Ok::<_, AnyError>(())
})
});
log::trace!("rx.await");
let response = rx.await??;
log::trace!("response: {:?}", response);
Ok(response)
}
}

View file

@ -31,26 +31,10 @@ use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::BuildGraphRequest; use crate::graph_util::BuildGraphRequest;
use crate::graph_util::BuildGraphWithNpmOptions; use crate::graph_util::BuildGraphWithNpmOptions;
use crate::sys::CliSys; use crate::sys::CliSys;
use crate::util::fs::FsCleaner;
use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt; use crate::util::progress_bar::ProgressMessagePrompt;
use crate::util::progress_bar::UpdateGuard;
#[derive(Default)]
struct CleanState {
files_removed: u64,
dirs_removed: u64,
bytes_removed: u64,
progress_guard: Option<UpdateGuard>,
}
impl CleanState {
fn update_progress(&self) {
if let Some(pg) = &self.progress_guard {
pg.set_position(self.files_removed + self.dirs_removed);
}
}
}
pub async fn clean( pub async fn clean(
flags: Arc<Flags>, flags: Arc<Flags>,
@ -69,17 +53,12 @@ pub async fn clean(
let progress_guard = let progress_guard =
progress_bar.update_with_prompt(ProgressMessagePrompt::Cleaning, ""); progress_bar.update_with_prompt(ProgressMessagePrompt::Cleaning, "");
progress_guard.set_total_size(no_of_files.try_into().unwrap()); progress_guard.set_total_size(no_of_files.try_into().unwrap());
let mut state = CleanState { let mut cleaner = FsCleaner::new(Some(progress_guard));
files_removed: 0,
dirs_removed: 0,
bytes_removed: 0,
progress_guard: Some(progress_guard),
};
rm_rf(&mut state, &deno_dir.root)?; cleaner.rm_rf(&deno_dir.root)?;
// Drop the guard so that progress bar disappears. // Drop the guard so that progress bar disappears.
drop(state.progress_guard); drop(cleaner.progress_guard);
log::info!( log::info!(
"{} {} {}", "{} {} {}",
@ -87,8 +66,8 @@ pub async fn clean(
deno_dir.root.display(), deno_dir.root.display(),
colors::gray(&format!( colors::gray(&format!(
"({} files, {})", "({} files, {})",
state.files_removed + state.dirs_removed, cleaner.files_removed + cleaner.dirs_removed,
display::human_size(state.bytes_removed as f64) display::human_size(cleaner.bytes_removed as f64)
)) ))
); );
} }
@ -202,7 +181,7 @@ async fn clean_except(
entrypoints: &[String], entrypoints: &[String],
dry_run: bool, dry_run: bool,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
let mut state = CleanState::default(); let mut state = FsCleaner::default();
let factory = CliFactory::from_flags(flags.clone()); let factory = CliFactory::from_flags(flags.clone());
let sys = factory.sys(); let sys = factory.sys();
@ -345,7 +324,7 @@ async fn clean_except(
&deno_dir.root, &deno_dir.root,
dry_run, dry_run,
)?; )?;
let mut node_modules_cleaned = CleanState::default(); let mut node_modules_cleaned = FsCleaner::default();
if let Some(dir) = node_modules_path { if let Some(dir) = node_modules_path {
// let npm_installer = factory.npm_installer_if_managed().await?.unwrap(); // let npm_installer = factory.npm_installer_if_managed().await?.unwrap();
@ -359,7 +338,7 @@ async fn clean_except(
)?; )?;
} }
let mut vendor_cleaned = CleanState::default(); let mut vendor_cleaned = FsCleaner::default();
if let Some(vendor_dir) = options.vendor_dir_path() if let Some(vendor_dir) = options.vendor_dir_path()
&& let GlobalOrLocalHttpCache::Local(cache) = local_or_global_http_cache && let GlobalOrLocalHttpCache::Local(cache) = local_or_global_http_cache
{ {
@ -408,10 +387,10 @@ async fn clean_except(
Ok(()) Ok(())
} }
fn log_stats(state: &CleanState, dir: &Path) { fn log_stats(cleaner: &FsCleaner, dir: &Path) {
if state.bytes_removed == 0 if cleaner.bytes_removed == 0
&& state.dirs_removed == 0 && cleaner.dirs_removed == 0
&& state.files_removed == 0 && cleaner.files_removed == 0
{ {
return; return;
} }
@ -420,8 +399,8 @@ fn log_stats(state: &CleanState, dir: &Path) {
colors::green("Removed"), colors::green("Removed"),
colors::gray(&format!( colors::gray(&format!(
"{} files, {} from {}", "{} files, {} from {}",
state.files_removed + state.dirs_removed, cleaner.files_removed + cleaner.dirs_removed,
display::human_size(state.bytes_removed as f64), display::human_size(cleaner.bytes_removed as f64),
dir.display() dir.display()
)) ))
); );
@ -451,7 +430,7 @@ fn add_jsr_meta_paths(
// TODO(nathanwhit): use strategy pattern instead of branching on dry_run // TODO(nathanwhit): use strategy pattern instead of branching on dry_run
fn walk_removing( fn walk_removing(
state: &mut CleanState, cleaner: &mut FsCleaner,
walker: WalkDir, walker: WalkDir,
trie: &PathTrie, trie: &PathTrie,
base: &Path, base: &Path,
@ -481,7 +460,7 @@ fn walk_removing(
eprintln!(" {}", entry.path().display()); eprintln!(" {}", entry.path().display());
} }
} else { } else {
rm_rf(state, entry.path())?; cleaner.rm_rf(entry.path())?;
} }
walker.skip_current_dir(); walker.skip_current_dir();
} else if dry_run { } else if dry_run {
@ -490,7 +469,7 @@ fn walk_removing(
eprintln!(" {}", entry.path().display()); eprintln!(" {}", entry.path().display());
} }
} else { } else {
remove_file(state, entry.path(), Some(entry.metadata()?))?; cleaner.remove_file(entry.path(), Some(entry.metadata()?))?;
} }
} }
@ -498,7 +477,7 @@ fn walk_removing(
} }
fn clean_node_modules( fn clean_node_modules(
state: &mut CleanState, cleaner: &mut FsCleaner,
keep_pkgs: &HashSet<deno_npm::NpmPackageCacheFolderId>, keep_pkgs: &HashSet<deno_npm::NpmPackageCacheFolderId>,
dir: &Path, dir: &Path,
dry_run: bool, dry_run: bool,
@ -558,20 +537,26 @@ fn clean_node_modules(
eprintln!(" {}", entry.path().display()); eprintln!(" {}", entry.path().display());
} }
} else { } else {
rm_rf(state, &entry.path())?; cleaner.rm_rf(&entry.path())?;
} }
} }
// remove top level symlinks from node_modules/<package> to node_modules/.deno/<package> // remove top level symlinks from node_modules/<package> to node_modules/.deno/<package>
// where the target doesn't exist (because it was removed above) // where the target doesn't exist (because it was removed above)
clean_node_modules_symlinks(state, &keep_names, dir, dry_run, &mut |name| { clean_node_modules_symlinks(
setup_cache.remove_root_symlink(name); cleaner,
})?; &keep_names,
dir,
dry_run,
&mut |name| {
setup_cache.remove_root_symlink(name);
},
)?;
// remove symlinks from node_modules/.deno/node_modules/<package> to node_modules/.deno/<package> // remove symlinks from node_modules/.deno/node_modules/<package> to node_modules/.deno/<package>
// where the target doesn't exist (because it was removed above) // where the target doesn't exist (because it was removed above)
clean_node_modules_symlinks( clean_node_modules_symlinks(
state, cleaner,
&keep_names, &keep_names,
&base.join("node_modules"), &base.join("node_modules"),
dry_run, dry_run,
@ -598,7 +583,7 @@ fn node_modules_package_actual_dir_to_name(
} }
fn clean_node_modules_symlinks( fn clean_node_modules_symlinks(
state: &mut CleanState, cleaner: &mut FsCleaner,
keep_names: &HashSet<String>, keep_names: &HashSet<String>,
dir: &Path, dir: &Path,
dry_run: bool, dry_run: bool,
@ -620,7 +605,7 @@ fn clean_node_modules_symlinks(
} }
} else { } else {
on_remove(&name); on_remove(&name);
remove_file(state, &entry.path(), None)?; cleaner.remove_file(&entry.path(), None)?;
} }
} }
} }
@ -628,51 +613,6 @@ fn clean_node_modules_symlinks(
Ok(()) Ok(())
} }
fn rm_rf(state: &mut CleanState, path: &Path) -> Result<(), AnyError> {
for entry in walkdir::WalkDir::new(path).contents_first(true) {
let entry = entry?;
if entry.file_type().is_dir() {
state.dirs_removed += 1;
state.update_progress();
std::fs::remove_dir_all(entry.path())?;
} else {
remove_file(state, entry.path(), entry.metadata().ok())?;
}
}
Ok(())
}
fn remove_file(
state: &mut CleanState,
path: &Path,
meta: Option<std::fs::Metadata>,
) -> Result<(), AnyError> {
if let Some(meta) = meta {
state.bytes_removed += meta.len();
}
state.files_removed += 1;
state.update_progress();
match std::fs::remove_file(path)
.with_context(|| format!("Failed to remove file: {}", path.display()))
{
Err(e) => {
if cfg!(windows)
&& let Ok(meta) = path.symlink_metadata()
&& meta.is_symlink()
{
std::fs::remove_dir(path).with_context(|| {
format!("Failed to remove symlink: {}", path.display())
})?;
return Ok(());
}
Err(e)
}
_ => Ok(()),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::Path; use std::path::Path;

View file

@ -204,13 +204,13 @@ pub async fn compile_eszip(
}; };
let transpile_and_emit_options = compiler_options_resolver let transpile_and_emit_options = compiler_options_resolver
.for_specifier(cli_options.workspace().root_dir()) .for_specifier(cli_options.workspace().root_dir_url())
.transpile_options()?; .transpile_options()?;
let transpile_options = transpile_and_emit_options.transpile.clone(); let transpile_options = transpile_and_emit_options.transpile.clone();
let emit_options = transpile_and_emit_options.emit.clone(); let emit_options = transpile_and_emit_options.emit.clone();
let parser = parsed_source_cache.as_capturing_parser(); let parser = parsed_source_cache.as_capturing_parser();
let root_dir_url = cli_options.workspace().root_dir(); let root_dir_url = cli_options.workspace().root_dir_url();
log::debug!("Binary root dir: {}", root_dir_url); log::debug!("Binary root dir: {}", root_dir_url);
let relative_file_base = eszip::EszipRelativeFileBaseUrl::new(root_dir_url); let relative_file_base = eszip::EszipRelativeFileBaseUrl::new(root_dir_url);
let mut eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions { let mut eszip = eszip::EszipV2::from_graph(eszip::FromGraphOptions {

View file

@ -531,7 +531,7 @@ fn collect_coverages(
.ignore_git_folder() .ignore_git_folder()
.ignore_node_modules() .ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), file_patterns); .collect_file_patterns(&CliSys::default(), &file_patterns);
let coverage_patterns = FilePatterns { let coverage_patterns = FilePatterns {
base: initial_cwd.to_path_buf(), base: initial_cwd.to_path_buf(),

View file

@ -237,7 +237,7 @@ fn collect_fmt_files(
.ignore_node_modules() .ignore_node_modules()
.use_gitignore() .use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), files) .collect_file_patterns(&CliSys::default(), &files)
} }
/// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks /// Formats markdown (using <https://github.com/dprint/dprint-plugin-markdown>) and its code blocks
@ -1398,6 +1398,12 @@ fn get_typescript_config_builder(
options.space_surrounding_properties options.space_surrounding_properties
{ {
builder.space_surrounding_properties(space_surrounding_properties); builder.space_surrounding_properties(space_surrounding_properties);
builder.import_declaration_space_surrounding_named_imports(
space_surrounding_properties,
);
builder.export_declaration_space_surrounding_named_exports(
space_surrounding_properties,
);
} }
builder builder

View file

@ -1,9 +1,11 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::ffi::OsString;
use std::io::IsTerminal; use std::io::IsTerminal;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use chrono::NaiveDate;
use color_print::cformat; use color_print::cformat;
use color_print::cstr; use color_print::cstr;
use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::NodeModulesDirMode;
@ -18,9 +20,12 @@ use log::info;
use crate::args::DenoSubcommand; use crate::args::DenoSubcommand;
use crate::args::Flags; use crate::args::Flags;
use crate::args::InitFlags; use crate::args::InitFlags;
use crate::args::InternalFlags;
use crate::args::PermissionFlags; use crate::args::PermissionFlags;
use crate::args::RunFlags; use crate::args::RunFlags;
use crate::colors; use crate::colors;
use crate::util::fs::FsCleaner;
use crate::util::progress_bar::ProgressBar;
pub async fn init_project(init_flags: InitFlags) -> Result<i32, AnyError> { pub async fn init_project(init_flags: InitFlags) -> Result<i32, AnyError> {
if let Some(package) = &init_flags.package { if let Some(package) = &init_flags.package {
@ -313,7 +318,7 @@ async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
if std::io::stdin().is_terminal() { if std::io::stdin().is_terminal() {
log::info!( log::info!(
cstr!( cstr!(
"⚠️ Do you fully trust <y>{}</> package? Deno will invoke code from it with all permissions. Do you want to continue? <p(245)>[y/n]</>" "⚠️ Do you fully trust <y>{}</> package? Deno will invoke code from it with all permissions. Do you want to continue? <p(245)>[y/n]</>"
), ),
script_name script_name
); );
@ -334,6 +339,15 @@ async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
return Ok(print_manual_usage(&script_name, &args)); return Ok(print_manual_usage(&script_name, &args));
} }
let temp_node_modules_parent_dir = create_temp_node_modules_parent_dir()
.context("Failed creating temp directory for node_modules folder.")?;
let temp_node_modules_dir =
temp_node_modules_parent_dir.path().join("node_modules");
log::debug!(
"Creating node_modules directory at: {}",
temp_node_modules_dir.display()
);
let new_flags = Flags { let new_flags = Flags {
permissions: PermissionFlags { permissions: PermissionFlags {
allow_all: true, allow_all: true,
@ -347,16 +361,99 @@ async fn init_npm(name: &str, args: Vec<String>) -> Result<i32, AnyError> {
..Default::default() ..Default::default()
}), }),
reload: true, reload: true,
internal: InternalFlags {
lockfile_skip_write: true,
root_node_modules_dir_override: Some(temp_node_modules_dir),
..Default::default()
},
..Default::default() ..Default::default()
}; };
crate::tools::run::run_script( let result = crate::tools::run::run_script(
WorkerExecutionMode::Run, WorkerExecutionMode::Run,
new_flags.into(), new_flags.into(),
None, None,
None, None,
Default::default(), Default::default(),
) )
.await .await;
drop(temp_node_modules_parent_dir); // explicit drop for clarity
result
}
/// Creates a node_modules directory in a folder with the following format:
///
/// <tmp-dir>/deno_init_nm/<date>/<random-value>
///
/// Old folders are automatically deleted by this function.
fn create_temp_node_modules_parent_dir() -> Result<tempfile::TempDir, AnyError>
{
let root_temp_folder = std::env::temp_dir().join("deno_init_nm");
let today = chrono::Utc::now().date_naive();
// remove any old/stale temp dirs
if let Err(err) =
attempt_temp_dir_garbage_collection(&root_temp_folder, today)
{
log::debug!("Failed init temp folder garbage collection: {:#?}", err);
}
let day_folder = root_temp_folder.join(folder_name_for_date(today));
std::fs::create_dir_all(&day_folder)
.with_context(|| format!("Failed creating '{}'", day_folder.display()))?;
let temp_node_modules_parent_dir = tempfile::TempDir::new_in(&day_folder)?;
// write a package.json to make this be considered a "node" project to deno
let package_json_path =
temp_node_modules_parent_dir.path().join("package.json");
std::fs::write(&package_json_path, "{}").with_context(|| {
format!("Failed creating '{}'", package_json_path.display())
})?;
Ok(temp_node_modules_parent_dir)
}
fn attempt_temp_dir_garbage_collection(
root_temp_folder: &Path,
utc_now: NaiveDate,
) -> Result<(), AnyError> {
let previous_day_str = folder_name_for_date(
utc_now
.checked_sub_days(chrono::Days::new(1))
.unwrap_or(utc_now),
);
let current_day_str = folder_name_for_date(utc_now);
let next_day_str = folder_name_for_date(
utc_now
.checked_add_days(chrono::Days::new(1))
.unwrap_or(utc_now),
);
let progress_bar =
ProgressBar::new(crate::util::progress_bar::ProgressBarStyle::TextOnly);
let update_guard = progress_bar.deferred_update_with_prompt(
crate::util::progress_bar::ProgressMessagePrompt::Cleaning,
"old temp node_modules folders...",
);
// remove any folders that aren't the current date +- 1 day
let mut cleaner = FsCleaner::new(Some(update_guard));
for entry in std::fs::read_dir(root_temp_folder)? {
let Ok(entry) = entry else {
continue;
};
if entry.file_name() != previous_day_str
&& entry.file_name() != current_day_str
&& entry.file_name() != next_day_str
&& let Err(err) = cleaner.rm_rf(&entry.path())
{
log::debug!(
"Failed cleaning '{}': {:#?}",
entry.file_name().display(),
err
);
}
}
Ok(())
}
fn folder_name_for_date(date: chrono::NaiveDate) -> OsString {
OsString::from(date.format("%Y-%m-%d").to_string())
} }
fn create_json_file( fn create_json_file(
@ -394,6 +491,10 @@ fn create_file(
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use test_util::TempDir;
use super::attempt_temp_dir_garbage_collection;
use crate::tools::init::npm_name_to_create_package; use crate::tools::init::npm_name_to_create_package;
#[test] #[test]
@ -424,4 +525,44 @@ mod test {
"npm:@foo/create-bar@1.0.0".to_string() "npm:@foo/create-bar@1.0.0".to_string()
); );
} }
#[test]
fn test_attempt_temp_dir_garbage_collection() {
let temp_dir = TempDir::new();
let reference_date = chrono::NaiveDate::from_ymd_opt(2020, 5, 13).unwrap();
temp_dir.path().join("0000-00-00").create_dir_all();
temp_dir
.path()
.join("2020-05-01/sub_dir/sub")
.create_dir_all();
temp_dir
.path()
.join("2020-05-01/sub_dir/sub/test.txt")
.write("");
temp_dir.path().join("2020-05-02/sub_dir").create_dir_all();
temp_dir.path().join("2020-05-11").create_dir_all();
temp_dir.path().join("2020-05-12").create_dir_all();
temp_dir.path().join("2020-05-13").create_dir_all();
temp_dir.path().join("2020-05-14").create_dir_all();
temp_dir.path().join("2020-05-15").create_dir_all();
attempt_temp_dir_garbage_collection(
temp_dir.path().as_path(),
reference_date,
)
.unwrap();
let mut entries = std::fs::read_dir(temp_dir.path())
.unwrap()
.map(|e| e.unwrap().file_name().into_string().unwrap())
.collect::<Vec<_>>();
entries.sort();
// should only have the current day +- 1
assert_eq!(
entries,
vec![
"2020-05-12".to_string(),
"2020-05-13".to_string(),
"2020-05-14".to_string()
]
);
}
} }

View file

@ -20,9 +20,12 @@ use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::url::Url; use deno_core::url::Url;
use deno_lib::args::CaData; use deno_lib::args::CaData;
use deno_npm_installer::lifecycle_scripts::LifecycleScriptsWarning;
use deno_path_util::resolve_url_or_path; use deno_path_util::resolve_url_or_path;
use deno_resolver::workspace::WorkspaceResolver;
use deno_semver::npm::NpmPackageReqReference; use deno_semver::npm::NpmPackageReqReference;
use log::Level; use log::Level;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
@ -46,7 +49,9 @@ use crate::file_fetcher::create_cli_file_fetcher;
use crate::graph_container::CollectSpecifiersOptions; use crate::graph_container::CollectSpecifiersOptions;
use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphContainer;
use crate::jsr::JsrFetchResolver; use crate::jsr::JsrFetchResolver;
use crate::npm::CliNpmResolver;
use crate::npm::NpmFetchResolver; use crate::npm::NpmFetchResolver;
use crate::sys::CliSys;
use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::fs::canonicalize_path_maybe_not_exists;
mod bin_name_resolver; mod bin_name_resolver;
@ -117,14 +122,28 @@ impl std::fmt::Debug for InstallStats {
#[derive(Debug)] #[derive(Debug)]
pub struct InstallReporter { pub struct InstallReporter {
stats: Arc<InstallStats>, stats: Arc<InstallStats>,
scripts_warnings: Arc<Mutex<Vec<LifecycleScriptsWarning>>>,
deprecation_messages: Arc<Mutex<Vec<String>>>,
} }
impl InstallReporter { impl InstallReporter {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
stats: Arc::new(InstallStats::default()), stats: Arc::new(InstallStats::default()),
scripts_warnings: Arc::new(Mutex::new(Vec::new())),
deprecation_messages: Arc::new(Mutex::new(Vec::new())),
} }
} }
pub fn take_scripts_warnings(&self) -> Vec<LifecycleScriptsWarning> {
std::mem::take(&mut *self.scripts_warnings.lock())
}
pub fn take_deprecation_message(&self) -> Vec<String> {
std::mem::take(&mut *self.deprecation_messages.lock())
}
} }
impl deno_npm_installer::InstallProgressReporter for InstallReporter { impl deno_npm_installer::InstallProgressReporter for InstallReporter {
@ -140,6 +159,17 @@ impl deno_npm_installer::InstallProgressReporter for InstallReporter {
fn blocking(&self, _message: &str) { fn blocking(&self, _message: &str) {
// log::info!("blocking: {}", message); // log::info!("blocking: {}", message);
} }
fn scripts_not_run_warning(
&self,
warning: deno_npm_installer::lifecycle_scripts::LifecycleScriptsWarning,
) {
self.scripts_warnings.lock().push(warning);
}
fn deprecated_message(&self, message: String) {
self.deprecation_messages.lock().push(message);
}
} }
fn package_nv_from_url(url: &Url) -> Option<String> { fn package_nv_from_url(url: &Url) -> Option<String> {
@ -416,7 +446,15 @@ pub(crate) async fn install_from_entrypoints(
.await?; .await?;
emitter emitter
.cache_module_emits(&main_graph_container.graph()) .cache_module_emits(&main_graph_container.graph())
.await .await?;
print_install_report(
&factory.sys(),
&factory.install_reporter()?.unwrap().clone(),
factory.workspace_resolver().await?,
factory.npm_resolver().await?,
);
Ok(())
} }
async fn install_local( async fn install_local(
@ -437,31 +475,15 @@ async fn install_local(
} }
} }
async fn install_top_level(factory: &CliFactory) -> Result<(), AnyError> { pub fn print_install_report(
// surface any errors in the package.json sys: &dyn sys_traits::boxed::FsOpenBoxed,
factory install_reporter: &InstallReporter,
.npm_installer() workspace: &WorkspaceResolver<CliSys>,
.await? npm_resolver: &CliNpmResolver,
.ensure_no_pkg_json_dep_errors()?; ) {
let npm_installer = factory.npm_installer().await?;
npm_installer.ensure_no_pkg_json_dep_errors()?;
// set up the custom progress bar
let install_reporter = factory.install_reporter()?.unwrap().clone();
// the actual work
crate::tools::pm::cache_top_level_deps(factory, None).await?;
// compute the summary info // compute the summary info
let snapshot = factory let snapshot = npm_resolver.as_managed().unwrap().resolution().snapshot();
.npm_resolver()
.await?
.as_managed()
.unwrap()
.resolution()
.snapshot();
let workspace = factory.workspace_resolver().await?;
let top_level_packages = snapshot.top_level_packages(); let top_level_packages = snapshot.top_level_packages();
// all this nonsense is to categorize into normal and dev deps // all this nonsense is to categorize into normal and dev deps
@ -614,10 +636,43 @@ async fn install_top_level(factory: &CliFactory) -> Result<(), AnyError> {
} }
} }
let warnings = install_reporter.take_scripts_warnings();
for warning in warnings {
log::warn!("{}", warning.into_message(sys));
}
let deprecation_messages = install_reporter.take_deprecation_message();
for message in deprecation_messages {
log::warn!("{}", message);
}
}
async fn install_top_level(factory: &CliFactory) -> Result<(), AnyError> {
// surface any errors in the package.json
factory
.npm_installer()
.await?
.ensure_no_pkg_json_dep_errors()?;
let npm_installer = factory.npm_installer().await?;
npm_installer.ensure_no_pkg_json_dep_errors()?;
// the actual work
crate::tools::pm::cache_top_level_deps(factory, None).await?;
if let Some(lockfile) = factory.maybe_lockfile().await? { if let Some(lockfile) = factory.maybe_lockfile().await? {
lockfile.write_if_changed()?; lockfile.write_if_changed()?;
} }
let install_reporter = factory.install_reporter()?.unwrap().clone();
let workspace = factory.workspace_resolver().await?;
let npm_resolver = factory.npm_resolver().await?;
print_install_report(
&factory.sys(),
&install_reporter,
workspace,
npm_resolver,
);
Ok(()) Ok(())
} }

View file

@ -17,6 +17,7 @@ use deno_config::deno_json::LintRulesConfig;
use deno_config::glob::FileCollector; use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns; use deno_config::glob::FilePatterns;
use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDirectoryRc;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
@ -210,7 +211,7 @@ async fn lint_with_watch(
} }
struct PathsWithOptions { struct PathsWithOptions {
dir: WorkspaceDirectory, dir: WorkspaceDirectoryRc,
paths: Vec<PathBuf>, paths: Vec<PathBuf>,
options: LintOptions, options: LintOptions,
} }
@ -282,7 +283,7 @@ impl WorkspaceLinter {
&mut self, &mut self,
cli_options: &Arc<CliOptions>, cli_options: &Arc<CliOptions>,
lint_options: LintOptions, lint_options: LintOptions,
member_dir: WorkspaceDirectory, member_dir: WorkspaceDirectoryRc,
paths: Vec<PathBuf>, paths: Vec<PathBuf>,
) -> Result<(), AnyError> { ) -> Result<(), AnyError> {
self.file_count += paths.len(); self.file_count += paths.len();
@ -290,10 +291,9 @@ impl WorkspaceLinter {
let exclude = lint_options.rules.exclude.clone(); let exclude = lint_options.rules.exclude.clone();
let plugin_specifiers = lint_options.plugins.clone(); let plugin_specifiers = lint_options.plugins.clone();
let lint_rules = self.lint_rule_provider.resolve_lint_rules( let lint_rules = self
lint_options.rules, .lint_rule_provider
member_dir.maybe_deno_json().map(|c| c.as_ref()), .resolve_lint_rules(lint_options.rules, Some(&member_dir));
);
let mut maybe_incremental_cache = None; let mut maybe_incremental_cache = None;
@ -426,7 +426,7 @@ impl WorkspaceLinter {
fn run_package_rules( fn run_package_rules(
&mut self, &mut self,
linter: &Arc<CliLinter>, linter: &Arc<CliLinter>,
member_dir: &WorkspaceDirectory, member_dir: &WorkspaceDirectoryRc,
paths: &[PathBuf], paths: &[PathBuf],
) -> Option<LocalBoxFuture<'_, Result<(), AnyError>>> { ) -> Option<LocalBoxFuture<'_, Result<(), AnyError>>> {
if self.workspace_module_graph.is_none() { if self.workspace_module_graph.is_none() {
@ -499,7 +499,7 @@ fn collect_lint_files(
.ignore_node_modules() .ignore_node_modules()
.use_gitignore() .use_gitignore()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.collect_file_patterns(&CliSys::default(), files) .collect_file_patterns(&CliSys::default(), &files)
} }
#[allow(clippy::print_stdout)] #[allow(clippy::print_stdout)]
@ -588,10 +588,8 @@ fn lint_stdin(
let deno_lint_config = let deno_lint_config =
resolve_lint_config(compiler_options_resolver, start_dir.dir_url())?; resolve_lint_config(compiler_options_resolver, start_dir.dir_url())?;
let lint_options = LintOptions::resolve(lint_config, &lint_flags)?; let lint_options = LintOptions::resolve(lint_config, &lint_flags)?;
let configured_rules = lint_rule_provider.resolve_lint_rules_err_empty( let configured_rules = lint_rule_provider
lint_options.rules, .resolve_lint_rules_err_empty(lint_options.rules, Some(start_dir))?;
start_dir.maybe_deno_json().map(|c| c.as_ref()),
)?;
let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME); let mut file_path = cli_options.initial_cwd().join(STDIN_FILE_NAME);
if let Some(ext) = cli_options.ext_flag() { if let Some(ext) = cli_options.ext_flag() {
file_path.set_extension(ext); file_path.set_extension(ext);
@ -661,11 +659,15 @@ fn resolve_lint_config(
.for_specifier(specifier) .for_specifier(specifier)
.transpile_options()? .transpile_options()?
.transpile; .transpile;
let jsx_classic_options =
transpile_options.jsx.as_ref().and_then(|jsx| match jsx {
deno_ast::JsxRuntime::Classic(classic) => Some(classic),
_ => None,
});
Ok(deno_lint::linter::LintConfig { Ok(deno_lint::linter::LintConfig {
default_jsx_factory: (!transpile_options.jsx_automatic) default_jsx_factory: jsx_classic_options.map(|o| o.factory.clone()),
.then(|| transpile_options.jsx_factory.clone()), default_jsx_fragment_factory: jsx_classic_options
default_jsx_fragment_factory: (!transpile_options.jsx_automatic) .map(|o| o.fragment_factory.clone()),
.then(|| transpile_options.jsx_fragment_factory.clone()),
}) })
} }

View file

@ -5,8 +5,8 @@ use std::collections::HashSet;
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_config::deno_json::ConfigFile;
use deno_config::deno_json::LintRulesConfig; use deno_config::deno_json::LintRulesConfig;
use deno_config::workspace::WorkspaceDirectory;
use deno_core::anyhow::bail; use deno_core::anyhow::bail;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use deno_graph::ModuleGraph; use deno_graph::ModuleGraph;
@ -153,9 +153,9 @@ impl LintRuleProvider {
pub fn resolve_lint_rules_err_empty( pub fn resolve_lint_rules_err_empty(
&self, &self,
rules: LintRulesConfig, rules: LintRulesConfig,
maybe_config_file: Option<&ConfigFile>, maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> Result<ConfiguredRules, AnyError> { ) -> Result<ConfiguredRules, AnyError> {
let lint_rules = self.resolve_lint_rules(rules, maybe_config_file); let lint_rules = self.resolve_lint_rules(rules, maybe_workspace_dir);
if lint_rules.rules.is_empty() { if lint_rules.rules.is_empty() {
bail!("No rules have been configured") bail!("No rules have been configured")
} }
@ -183,7 +183,7 @@ impl LintRuleProvider {
pub fn resolve_lint_rules( pub fn resolve_lint_rules(
&self, &self,
rules: LintRulesConfig, rules: LintRulesConfig,
maybe_config_file: Option<&ConfigFile>, maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> ConfiguredRules { ) -> ConfiguredRules {
let all_rules = self.all_rules(); let all_rules = self.all_rules();
let mut all_rule_names = HashSet::with_capacity(all_rules.len()); let mut all_rule_names = HashSet::with_capacity(all_rules.len());
@ -194,7 +194,7 @@ impl LintRuleProvider {
all_rules.into_iter(), all_rules.into_iter(),
rules rules
.tags .tags
.or_else(|| Some(get_default_tags(maybe_config_file))), .or_else(|| Some(get_default_tags(maybe_workspace_dir))),
rules.exclude, rules.exclude,
rules.include, rules.include,
); );
@ -205,11 +205,24 @@ impl LintRuleProvider {
} }
} }
fn get_default_tags(maybe_config_file: Option<&ConfigFile>) -> Vec<String> { fn get_default_tags(
maybe_workspace_dir: Option<&WorkspaceDirectory>,
) -> Vec<String> {
let mut tags = Vec::with_capacity(2); let mut tags = Vec::with_capacity(2);
tags.push("recommended".to_string()); tags.push("recommended".to_string());
if maybe_config_file.map(|c| c.is_package()).unwrap_or(false) { if let Some(member_dir) = maybe_workspace_dir {
tags.push("jsr".to_string()); if member_dir
.maybe_deno_json()
.map(|c| c.is_package())
.unwrap_or(false)
{
tags.push("jsr".to_string());
}
if member_dir.maybe_deno_json().is_some()
|| member_dir.maybe_pkg_json().is_some()
{
tags.push("workspace".to_string());
}
} }
tags tags
} }

View file

@ -896,6 +896,17 @@ async fn npm_install_after_modification(
// npm install // npm install
cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?; cache_deps::cache_top_level_deps(&cli_factory, jsr_resolver).await?;
if let Some(install_reporter) = cli_factory.install_reporter()? {
let workspace = cli_factory.workspace_resolver().await?;
let npm_resolver = cli_factory.npm_resolver().await?;
super::installer::print_install_report(
&cli_factory.sys(),
install_reporter,
workspace,
npm_resolver,
);
}
if let Some(lockfile) = cli_factory.maybe_lockfile().await? { if let Some(lockfile) = cli_factory.maybe_lockfile().await? {
lockfile.write_if_changed()?; lockfile.write_if_changed()?;
} }

View file

@ -1,5 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
@ -26,9 +27,8 @@ use super::unfurl::SpecifierUnfurlerDiagnostic;
use crate::sys::CliSys; use crate::sys::CliSys;
struct JsxFolderOptions<'a> { struct JsxFolderOptions<'a> {
jsx_factory: &'a str,
jsx_fragment_factory: &'a str,
jsx_runtime: &'static str, jsx_runtime: &'static str,
jsx_classic: Option<Cow<'a, deno_ast::JsxClassicOptions>>,
jsx_import_source: Option<String>, jsx_import_source: Option<String>,
jsx_import_source_types: Option<String>, jsx_import_source_types: Option<String>,
} }
@ -206,18 +206,19 @@ impl<TSys: FsMetadata + FsRead> ModuleContentProvider<TSys> {
import_source import_source
)); ));
} }
let is_classic = jsx_options.jsx_runtime == "classic"; if let Some(classic_options) = &jsx_options.jsx_classic {
if is_classic && !leading_comments_has_re(&JSX_FACTORY_RE) { if !leading_comments_has_re(&JSX_FACTORY_RE) {
add_text_change(format!( add_text_change(format!(
"/** @jsxFactory {} */", "/** @jsxFactory {} */",
jsx_options.jsx_factory, classic_options.factory,
)); ));
} }
if is_classic && !leading_comments_has_re(&JSX_FRAGMENT_FACTORY_RE) { if !leading_comments_has_re(&JSX_FRAGMENT_FACTORY_RE) {
add_text_change(format!( add_text_change(format!(
"/** @jsxFragmentFactory {} */", "/** @jsxFragmentFactory {} */",
jsx_options.jsx_fragment_factory, classic_options.fragment_factory,
)); ));
}
} }
Ok(()) Ok(())
} }
@ -232,12 +233,13 @@ impl<TSys: FsMetadata + FsRead> ModuleContentProvider<TSys> {
self.compiler_options_resolver.for_specifier(specifier); self.compiler_options_resolver.for_specifier(specifier);
let jsx_config = compiler_options.jsx_import_source_config()?; let jsx_config = compiler_options.jsx_import_source_config()?;
let transpile_options = &compiler_options.transpile_options()?.transpile; let transpile_options = &compiler_options.transpile_options()?.transpile;
let jsx_runtime = let jsx_runtime = match &transpile_options.jsx {
if transpile_options.jsx_automatic || transpile_options.precompile_jsx { Some(
"automatic" deno_ast::JsxRuntime::Automatic(_)
} else { | deno_ast::JsxRuntime::Precompile(_),
"classic" ) => "automatic",
}; None | Some(deno_ast::JsxRuntime::Classic(_)) => "classic",
};
let mut unfurl_import_source = let mut unfurl_import_source =
|import_source: &str, referrer: &Url, resolution_kind: ResolutionKind| { |import_source: &str, referrer: &Url, resolution_kind: ResolutionKind| {
let maybe_import_source = self let maybe_import_source = self
@ -270,10 +272,19 @@ impl<TSys: FsMetadata + FsRead> ModuleContentProvider<TSys> {
ResolutionKind::Types, ResolutionKind::Types,
) )
}); });
let classic_options = match &transpile_options.jsx {
None => Some(Cow::Owned(deno_ast::JsxClassicOptions::default())),
Some(deno_ast::JsxRuntime::Classic(classic_options)) => {
Some(Cow::Borrowed(classic_options))
}
Some(
deno_ast::JsxRuntime::Precompile(_)
| deno_ast::JsxRuntime::Automatic(_),
) => None,
};
Ok(JsxFolderOptions { Ok(JsxFolderOptions {
jsx_runtime, jsx_runtime,
jsx_factory: &transpile_options.jsx_factory, jsx_classic: classic_options,
jsx_fragment_factory: &transpile_options.jsx_fragment_factory,
jsx_import_source, jsx_import_source,
jsx_import_source_types, jsx_import_source_types,
}) })
@ -288,7 +299,6 @@ mod test {
use deno_path_util::url_from_file_path; use deno_path_util::url_from_file_path;
use deno_resolver::deno_json::CompilerOptionsOverrides; use deno_resolver::deno_json::CompilerOptionsOverrides;
use deno_resolver::factory::ConfigDiscoveryOption; use deno_resolver::factory::ConfigDiscoveryOption;
use deno_resolver::factory::WorkspaceDirectoryProvider;
use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions; use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::npm::DenoInNpmPackageChecker;
@ -466,7 +476,6 @@ mod test {
) )
.unwrap(), .unwrap(),
); );
let specifier_unfurler = SpecifierUnfurler::new(None, resolver, false);
let package_json_resolver = let package_json_resolver =
Arc::new(PackageJsonResolver::new(sys.clone(), None)); Arc::new(PackageJsonResolver::new(sys.clone(), None));
let node_resolver = Arc::new(NodeResolver::new( let node_resolver = Arc::new(NodeResolver::new(
@ -485,11 +494,13 @@ mod test {
)); ));
let compiler_options_resolver = Arc::new(CompilerOptionsResolver::new( let compiler_options_resolver = Arc::new(CompilerOptionsResolver::new(
&sys, &sys,
&WorkspaceDirectoryProvider::from_initial_dir(&Arc::new(workspace_dir)), &workspace_dir.workspace,
&node_resolver, &node_resolver,
&ConfigDiscoveryOption::DiscoverCwd, &ConfigDiscoveryOption::DiscoverCwd,
&CompilerOptionsOverrides::default(), &CompilerOptionsOverrides::default(),
)); ));
resolver.set_compiler_options_resolver(compiler_options_resolver.clone());
let specifier_unfurler = SpecifierUnfurler::new(None, resolver, false);
ModuleContentProvider::new( ModuleContentProvider::new(
Arc::new(ParsedSourceCache::default()), Arc::new(ParsedSourceCache::default()),
specifier_unfurler, specifier_unfurler,

View file

@ -339,5 +339,5 @@ fn collect_paths(
.ignore_node_modules() .ignore_node_modules()
.set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned))
.use_gitignore() .use_gitignore()
.collect_file_patterns(&CliSys::default(), file_patterns) .collect_file_patterns(&CliSys::default(), &file_patterns)
} }

View file

@ -923,8 +923,6 @@ mod tests {
deno_resolver::workspace::PackageJsonDepResolution::Enabled, deno_resolver::workspace::PackageJsonDepResolution::Enabled,
SloppyImportsOptions::Enabled, SloppyImportsOptions::Enabled,
Default::default(), Default::default(),
Default::default(),
Default::default(),
CliSys::default(), CliSys::default(),
); );
let unfurler = let unfurler =
@ -1092,8 +1090,6 @@ export type * from "./c.d.ts";
deno_resolver::workspace::PackageJsonDepResolution::Enabled, deno_resolver::workspace::PackageJsonDepResolution::Enabled,
Default::default(), Default::default(),
Default::default(), Default::default(),
Default::default(),
Default::default(),
sys.clone(), sys.clone(),
); );
let unfurler = let unfurler =

View file

@ -3,6 +3,8 @@
use std::sync::Arc; use std::sync::Arc;
use deno_ast::ImportsNotUsedAsValues; use deno_ast::ImportsNotUsedAsValues;
use deno_ast::JsxAutomaticOptions;
use deno_ast::JsxClassicOptions;
use deno_ast::ModuleKind; use deno_ast::ModuleKind;
use deno_ast::ModuleSpecifier; use deno_ast::ModuleSpecifier;
use deno_ast::ParseDiagnosticsError; use deno_ast::ParseDiagnosticsError;
@ -51,14 +53,13 @@ use crate::colors;
use crate::lsp::ReplLanguageServer; use crate::lsp::ReplLanguageServer;
use crate::npm::CliNpmInstaller; use crate::npm::CliNpmInstaller;
use crate::resolver::CliResolver; use crate::resolver::CliResolver;
use crate::tools::test::TestEvent;
use crate::tools::test::TestEventReceiver; use crate::tools::test::TestEventReceiver;
use crate::tools::test::TestEventTracker;
use crate::tools::test::TestFailureFormatOptions; use crate::tools::test::TestFailureFormatOptions;
use crate::tools::test::report_tests; use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter; use crate::tools::test::reporters::PrettyTestReporter;
use crate::tools::test::reporters::TestReporter; use crate::tools::test::reporters::TestReporter;
use crate::tools::test::run_tests_for_worker; use crate::tools::test::run_tests_for_worker;
use crate::tools::test::send_test_event;
use crate::tools::test::worker_has_tests; use crate::tools::test::worker_has_tests;
fn comment_source_to_position_range( fn comment_source_to_position_range(
@ -167,12 +168,6 @@ pub struct TsEvaluateResponse {
pub value: cdp::EvaluateResponse, pub value: cdp::EvaluateResponse,
} }
struct ReplJsxState {
factory: String,
frag_factory: String,
import_source: Option<String>,
}
pub struct ReplSession { pub struct ReplSession {
internal_object_id: Option<RemoteObjectId>, internal_object_id: Option<RemoteObjectId>,
npm_installer: Option<Arc<CliNpmInstaller>>, npm_installer: Option<Arc<CliNpmInstaller>>,
@ -187,8 +182,8 @@ pub struct ReplSession {
test_reporter_factory: Box<dyn Fn() -> Box<dyn TestReporter>>, test_reporter_factory: Box<dyn Fn() -> Box<dyn TestReporter>>,
/// This is only optional because it's temporarily taken when evaluating. /// This is only optional because it's temporarily taken when evaluating.
test_event_receiver: Option<TestEventReceiver>, test_event_receiver: Option<TestEventReceiver>,
jsx: ReplJsxState, jsx: deno_ast::JsxRuntime,
experimental_decorators: bool, decorators: deno_ast::DecoratorsTranspileOption,
} }
impl ReplSession { impl ReplSession {
@ -255,11 +250,10 @@ impl ReplSession {
cli_options.initial_cwd().to_string_lossy(), cli_options.initial_cwd().to_string_lossy(),
) )
})?; })?;
let experimental_decorators = compiler_options_resolver let transpile_options = &compiler_options_resolver
.for_specifier(&cwd_url) .for_specifier(&cwd_url)
.transpile_options()? .transpile_options()?
.transpile .transpile;
.use_ts_decorators;
let mut repl_session = ReplSession { let mut repl_session = ReplSession {
internal_object_id: None, internal_object_id: None,
npm_installer, npm_installer,
@ -282,12 +276,8 @@ impl ReplSession {
}), }),
main_module, main_module,
test_event_receiver: Some(test_event_receiver), test_event_receiver: Some(test_event_receiver),
jsx: ReplJsxState { jsx: transpile_options.jsx.clone().unwrap_or_default(),
factory: "React.createElement".to_string(), decorators: transpile_options.decorators.clone(),
frag_factory: "React.Fragment".to_string(),
import_source: None,
},
experimental_decorators,
}; };
// inject prelude // inject prelude
@ -471,19 +461,18 @@ impl ReplSession {
self.test_event_receiver.take().unwrap(), self.test_event_receiver.take().unwrap(),
(self.test_reporter_factory)(), (self.test_reporter_factory)(),
)); ));
let event_tracker =
TestEventTracker::new(self.worker.js_runtime.op_state());
run_tests_for_worker( run_tests_for_worker(
&mut self.worker, &mut self.worker,
&self.main_module, &self.main_module,
&Default::default(), &Default::default(),
&Default::default(), &Default::default(),
&event_tracker,
) )
.await .await
.unwrap(); .unwrap();
send_test_event( event_tracker.force_end_report().unwrap();
&self.worker.js_runtime.op_state(),
TestEvent::ForceEndReport,
)
.unwrap();
self.test_event_receiver = Some(report_tests_handle.await.unwrap().1); self.test_event_receiver = Some(report_tests_handle.await.unwrap().1);
} }
@ -671,19 +660,9 @@ impl ReplSession {
let transpiled_src = parsed_source let transpiled_src = parsed_source
.transpile( .transpile(
&deno_ast::TranspileOptions { &deno_ast::TranspileOptions {
use_ts_decorators: self.experimental_decorators, decorators: self.decorators.clone(),
use_decorators_proposal: !self.experimental_decorators,
emit_metadata: false,
imports_not_used_as_values: ImportsNotUsedAsValues::Preserve, imports_not_used_as_values: ImportsNotUsedAsValues::Preserve,
transform_jsx: true, jsx: Some(self.jsx.clone()),
precompile_jsx: false,
precompile_jsx_skip_elements: None,
precompile_jsx_dynamic_props: None,
jsx_automatic: self.jsx.import_source.is_some(),
jsx_development: false,
jsx_factory: self.jsx.factory.clone(),
jsx_fragment_factory: self.jsx.frag_factory.clone(),
jsx_import_source: self.jsx.import_source.clone(),
var_decl_imports: true, var_decl_imports: true,
verbatim_module_syntax: false, verbatim_module_syntax: false,
}, },
@ -721,15 +700,49 @@ impl ReplSession {
} }
if let Some(jsx) = analyzed_pragmas.jsx { if let Some(jsx) = analyzed_pragmas.jsx {
self.jsx.factory = jsx.text; match &mut self.jsx {
self.jsx.import_source = None; deno_ast::JsxRuntime::Classic(jsx_classic_options) => {
jsx_classic_options.factory = jsx.text;
}
deno_ast::JsxRuntime::Automatic(_)
| deno_ast::JsxRuntime::Precompile(_) => {
self.jsx = deno_ast::JsxRuntime::Classic(JsxClassicOptions {
factory: jsx.text,
..Default::default()
});
}
}
} }
if let Some(jsx_frag) = analyzed_pragmas.jsx_fragment { if let Some(jsx_frag) = analyzed_pragmas.jsx_fragment {
self.jsx.frag_factory = jsx_frag.text; match &mut self.jsx {
self.jsx.import_source = None; deno_ast::JsxRuntime::Classic(jsx_classic_options) => {
jsx_classic_options.fragment_factory = jsx_frag.text;
}
deno_ast::JsxRuntime::Automatic(_)
| deno_ast::JsxRuntime::Precompile(_) => {
self.jsx = deno_ast::JsxRuntime::Classic(JsxClassicOptions {
fragment_factory: jsx_frag.text,
..Default::default()
});
}
}
} }
if let Some(jsx_import_source) = analyzed_pragmas.jsx_import_source { if let Some(jsx_import_source) = analyzed_pragmas.jsx_import_source {
self.jsx.import_source = Some(jsx_import_source.text); match &mut self.jsx {
deno_ast::JsxRuntime::Classic(_) => {
self.jsx = deno_ast::JsxRuntime::Automatic(JsxAutomaticOptions {
import_source: Some(jsx_import_source.text),
development: false,
});
}
deno_ast::JsxRuntime::Automatic(automatic)
| deno_ast::JsxRuntime::Precompile(deno_ast::JsxPrecompileOptions {
automatic,
..
}) => {
automatic.import_source = Some(jsx_import_source.text);
}
}
} }
} }

View file

@ -1,6 +1,7 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
use std::io::Read; use std::io::Read;
use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use deno_cache_dir::file_fetcher::File; use deno_cache_dir::file_fetcher::File;
@ -24,6 +25,7 @@ use crate::args::WatchFlagsWithPaths;
use crate::factory::CliFactory; use crate::factory::CliFactory;
use crate::util; use crate::util;
use crate::util::file_watcher::WatcherRestartMode; use crate::util::file_watcher::WatcherRestartMode;
use crate::util::watch_env_tracker::WatchEnvTracker;
pub mod hmr; pub mod hmr;
@ -173,6 +175,14 @@ async fn run_with_watch(
WatcherRestartMode::Automatic, WatcherRestartMode::Automatic,
move |flags, watcher_communicator, changed_paths| { move |flags, watcher_communicator, changed_paths| {
watcher_communicator.show_path_changed(changed_paths.clone()); watcher_communicator.show_path_changed(changed_paths.clone());
let env_file_paths: Option<Vec<std::path::PathBuf>> = flags
.env_file
.as_ref()
.map(|files| files.iter().map(PathBuf::from).collect());
WatchEnvTracker::snapshot().load_env_variables_from_env_files(
env_file_paths.as_ref(),
flags.log_level,
);
Ok(async move { Ok(async move {
let factory = CliFactory::from_flags_for_watcher( let factory = CliFactory::from_flags_for_watcher(
flags, flags,

View file

@ -63,7 +63,7 @@ pub async fn execute_script(
let start_dir = &cli_options.start_dir; let start_dir = &cli_options.start_dir;
if !start_dir.has_deno_or_pkg_json() && !task_flags.eval { if !start_dir.has_deno_or_pkg_json() && !task_flags.eval {
bail!( bail!(
"deno task couldn't find deno.json(c). See https://docs.deno.com/go/config" "deno task couldn't find deno.json(c) or package.json. See https://docs.deno.com/go/config"
) )
} }
let force_use_pkg_json = let force_use_pkg_json =
@ -824,7 +824,8 @@ fn print_available_tasks(
if let Some(config) = config.deno_json.as_ref() { if let Some(config) = config.deno_json.as_ref() {
let is_root = !is_cwd_root_dir let is_root = !is_cwd_root_dir
&& config.folder_url == *workspace_dir.workspace.root_dir().as_ref(); && config.folder_url
== *workspace_dir.workspace.root_dir_url().as_ref();
for (name, definition) in &config.tasks { for (name, definition) in &config.tasks {
if !seen_task_names.insert(name) { if !seen_task_names.insert(name) {
@ -841,7 +842,8 @@ fn print_available_tasks(
if let Some(config) = config.package_json.as_ref() { if let Some(config) = config.package_json.as_ref() {
let is_root = !is_cwd_root_dir let is_root = !is_cwd_root_dir
&& config.folder_url == *workspace_dir.workspace.root_dir().as_ref(); && config.folder_url
== *workspace_dir.workspace.root_dir_url().as_ref();
for (name, script) in &config.tasks { for (name, script) in &config.tasks {
if !seen_task_names.insert(name) { if !seen_task_names.insert(name) {
continue; // already seen continue; // already seen

View file

@ -13,7 +13,9 @@ use std::io::Write;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use std::sync::LazyLock;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
@ -40,12 +42,6 @@ use deno_core::futures::future;
use deno_core::futures::stream; use deno_core::futures::stream;
use deno_core::located_script_name; use deno_core::located_script_name;
use deno_core::serde_v8; use deno_core::serde_v8;
use deno_core::stats::RuntimeActivity;
use deno_core::stats::RuntimeActivityDiff;
use deno_core::stats::RuntimeActivityStats;
use deno_core::stats::RuntimeActivityStatsFactory;
use deno_core::stats::RuntimeActivityStatsFilter;
use deno_core::stats::RuntimeActivityType;
use deno_core::unsync::spawn; use deno_core::unsync::spawn;
use deno_core::unsync::spawn_blocking; use deno_core::unsync::spawn_blocking;
use deno_core::url::Url; use deno_core::url::Url;
@ -55,7 +51,9 @@ use deno_npm_installer::graph::NpmCachingStrategy;
use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerExecutionMode;
use deno_runtime::deno_io::Stdio; use deno_runtime::deno_io::Stdio;
use deno_runtime::deno_io::StdioPipe; use deno_runtime::deno_io::StdioPipe;
use deno_runtime::deno_permissions::Permissions;
use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::permissions::RuntimePermissionDescriptorParser;
use deno_runtime::tokio_util::create_and_run_current_thread; use deno_runtime::tokio_util::create_and_run_current_thread;
use deno_runtime::worker::MainWorker; use deno_runtime::worker::MainWorker;
use indexmap::IndexMap; use indexmap::IndexMap;
@ -79,6 +77,7 @@ use crate::file_fetcher::CliFileFetcher;
use crate::graph_container::CheckSpecifiersOptions; use crate::graph_container::CheckSpecifiersOptions;
use crate::graph_util::has_graph_root_local_dependent_changed; use crate::graph_util::has_graph_root_local_dependent_changed;
use crate::ops; use crate::ops;
use crate::sys::CliSys;
use crate::util::extract::extract_doc_tests; use crate::util::extract::extract_doc_tests;
use crate::util::file_watcher; use crate::util::file_watcher;
use crate::util::fs::CollectSpecifiersOptions; use crate::util::fs::CollectSpecifiersOptions;
@ -92,6 +91,7 @@ use crate::worker::CreateCustomWorkerError;
mod channel; mod channel;
pub mod fmt; pub mod fmt;
pub mod reporters; pub mod reporters;
mod sanitizers;
pub use channel::TestEventReceiver; pub use channel::TestEventReceiver;
pub use channel::TestEventSender; pub use channel::TestEventSender;
@ -112,37 +112,10 @@ use crate::tools::coverage::cover_files;
use crate::tools::coverage::reporter; use crate::tools::coverage::reporter;
use crate::tools::test::channel::ChannelClosedError; use crate::tools::test::channel::ChannelClosedError;
/// How many times we're allowed to spin the event loop before considering something a leak. static SLOW_TEST_TIMEOUT: LazyLock<u64> = LazyLock::new(|| {
const MAX_SANITIZER_LOOP_SPINS: usize = 16; let base_timeout = env::var("DENO_SLOW_TEST_TIMEOUT").unwrap_or_default();
base_timeout.parse().unwrap_or(60).max(1)
#[derive(Default)] });
struct TopLevelSanitizerStats {
map: HashMap<(RuntimeActivityType, Cow<'static, str>), usize>,
}
fn get_sanitizer_item(
activity: RuntimeActivity,
) -> (RuntimeActivityType, Cow<'static, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
fn get_sanitizer_item_ref(
activity: &RuntimeActivity,
) -> (RuntimeActivityType, Cow<'_, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, (*name).into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
/// The test mode is used to determine how a specifier is to be tested. /// The test mode is used to determine how a specifier is to be tested.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -230,10 +203,19 @@ pub struct TestLocation {
} }
#[derive(Default)] #[derive(Default)]
pub(crate) struct TestContainer( pub(crate) struct TestContainer {
TestDescriptions, descriptions: TestDescriptions,
Vec<v8::Global<v8::Function>>, test_functions: Vec<v8::Global<v8::Function>>,
); test_hooks: TestHooks,
}
#[derive(Default)]
pub(crate) struct TestHooks {
pub before_all: Vec<v8::Global<v8::Function>>,
pub before_each: Vec<v8::Global<v8::Function>>,
pub after_each: Vec<v8::Global<v8::Function>>,
pub after_all: Vec<v8::Global<v8::Function>>,
}
impl TestContainer { impl TestContainer {
pub fn register( pub fn register(
@ -241,12 +223,26 @@ impl TestContainer {
description: TestDescription, description: TestDescription,
function: v8::Global<v8::Function>, function: v8::Global<v8::Function>,
) { ) {
self.0.tests.insert(description.id, description); self.descriptions.tests.insert(description.id, description);
self.1.push(function) self.test_functions.push(function)
}
pub fn register_hook(
&mut self,
hook_type: String,
function: v8::Global<v8::Function>,
) {
match hook_type.as_str() {
"beforeAll" => self.test_hooks.before_all.push(function),
"beforeEach" => self.test_hooks.before_each.push(function),
"afterEach" => self.test_hooks.after_each.push(function),
"afterAll" => self.test_hooks.after_all.push(function),
_ => {}
}
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.1.is_empty() self.test_functions.is_empty()
} }
} }
@ -687,11 +683,9 @@ async fn configure_main_worker(
let check_res = let check_res =
|res: Result<(), CoreError>| match res.map_err(|err| err.into_kind()) { |res: Result<(), CoreError>| match res.map_err(|err| err.into_kind()) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(CoreErrorKind::Js(err)) => send_test_event( Err(CoreErrorKind::Js(err)) => TestEventTracker::new(op_state.clone())
&op_state, .uncaught_error(specifier.to_string(), err)
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)), .map_err(|e| CoreErrorKind::JsBox(JsErrorBox::from_err(e)).into_box()),
)
.map_err(|e| CoreErrorKind::JsBox(JsErrorBox::from_err(e)).into_box()),
Err(err) => Err(err.into_box()), Err(err) => Err(err.into_box()),
}; };
@ -728,12 +722,14 @@ pub async fn test_specifier(
jupyter_channel.0, jupyter_channel.0,
) )
.await?; .await?;
let event_tracker = TestEventTracker::new(worker.js_runtime.op_state());
match test_specifier_inner( match test_specifier_inner(
&mut worker, &mut worker,
coverage_collector, coverage_collector,
specifier.clone(), specifier.clone(),
fail_fast_tracker, fail_fast_tracker,
&event_tracker,
options, options,
) )
.await .await
@ -741,10 +737,7 @@ pub async fn test_specifier(
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(TestSpecifierError::Core(err)) => match err.into_kind() { Err(TestSpecifierError::Core(err)) => match err.into_kind() {
CoreErrorKind::Js(err) => { CoreErrorKind::Js(err) => {
send_test_event( event_tracker.uncaught_error(specifier.to_string(), err)?;
&worker.js_runtime.op_state(),
TestEvent::UncaughtError(specifier.to_string(), Box::new(err)),
)?;
Ok(()) Ok(())
} }
err => Err(err.into_box().into()), err => Err(err.into_box().into()),
@ -765,12 +758,12 @@ pub enum TestSpecifierError {
/// Test a single specifier as documentation containing test programs, an executable test module or /// Test a single specifier as documentation containing test programs, an executable test module or
/// both. /// both.
#[allow(clippy::too_many_arguments)]
async fn test_specifier_inner( async fn test_specifier_inner(
worker: &mut MainWorker, worker: &mut MainWorker,
mut coverage_collector: Option<CoverageCollector>, mut coverage_collector: Option<CoverageCollector>,
specifier: ModuleSpecifier, specifier: ModuleSpecifier,
fail_fast_tracker: FailFastTracker, fail_fast_tracker: FailFastTracker,
event_tracker: &TestEventTracker,
options: TestSpecifierOptions, options: TestSpecifierOptions,
) -> Result<(), TestSpecifierError> { ) -> Result<(), TestSpecifierError> {
// Ensure that there are no pending exceptions before we start running tests // Ensure that there are no pending exceptions before we start running tests
@ -780,8 +773,14 @@ async fn test_specifier_inner(
.dispatch_load_event() .dispatch_load_event()
.map_err(|e| CoreErrorKind::Js(e).into_box())?; .map_err(|e| CoreErrorKind::Js(e).into_box())?;
run_tests_for_worker(worker, &specifier, &options, &fail_fast_tracker) run_tests_for_worker(
.await?; worker,
&specifier,
&options,
&fail_fast_tracker,
event_tracker,
)
.await?;
// Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the
// event loop to continue beyond what's needed to await results. // event loop to continue beyond what's needed to await results.
@ -822,6 +821,18 @@ pub fn worker_has_tests(worker: &mut MainWorker) -> bool {
!state.borrow::<TestContainer>().is_empty() !state.borrow::<TestContainer>().is_empty()
} }
// Each test needs a fresh reqwest connection pool to avoid inter-test weirdness with connections
// failing. If we don't do this, a connection to a test server we just tore down might be re-used in
// the next test.
// TODO(mmastrac): this should be some sort of callback that we can implement for any subsystem
pub fn worker_prepare_for_test(worker: &mut MainWorker) {
worker
.js_runtime
.op_state()
.borrow_mut()
.try_take::<deno_runtime::deno_fetch::Client>();
}
/// Yields to tokio to allow async work to process, and then polls /// Yields to tokio to allow async work to process, and then polls
/// the event loop once. /// the event loop once.
#[must_use = "The event loop result should be checked"] #[must_use = "The event loop result should be checked"]
@ -841,16 +852,6 @@ pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), CoreError> {
.await .await
} }
pub fn send_test_event(
op_state: &RefCell<OpState>,
event: TestEvent,
) -> Result<(), ChannelClosedError> {
op_state
.borrow_mut()
.borrow_mut::<TestEventSender>()
.send(event)
}
#[derive(Debug, thiserror::Error, deno_error::JsError)] #[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum RunTestsForWorkerErr { pub enum RunTestsForWorkerErr {
#[class(inherit)] #[class(inherit)]
@ -864,49 +865,66 @@ pub enum RunTestsForWorkerErr {
SerdeV8(#[from] serde_v8::Error), SerdeV8(#[from] serde_v8::Error),
} }
async fn slow_test_watchdog(event_tracker: TestEventTracker, test_id: usize) {
// The slow test warning should pop up every DENO_SLOW_TEST_TIMEOUT*(2**n) seconds,
// with a duration that is doubling each time. So for a warning time of 60s,
// we should get a warning at 60s, 120s, 240s, etc.
let base_timeout = *SLOW_TEST_TIMEOUT;
let mut multiplier = 1;
let mut elapsed = 0;
loop {
tokio::time::sleep(Duration::from_secs(
base_timeout * (multiplier - elapsed),
))
.await;
if event_tracker
.slow(test_id, Duration::from_secs(base_timeout * multiplier))
.is_err()
{
break;
}
multiplier *= 2;
elapsed += 1;
}
}
pub async fn run_tests_for_worker( pub async fn run_tests_for_worker(
worker: &mut MainWorker, worker: &mut MainWorker,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
options: &TestSpecifierOptions, options: &TestSpecifierOptions,
fail_fast_tracker: &FailFastTracker, fail_fast_tracker: &FailFastTracker,
event_tracker: &TestEventTracker,
) -> Result<(), RunTestsForWorkerErr> { ) -> Result<(), RunTestsForWorkerErr> {
let state_rc = worker.js_runtime.op_state(); let state_rc = worker.js_runtime.op_state();
// Take whatever tests have been registered // Take whatever tests have been registered
let TestContainer(tests, test_functions) = let container =
std::mem::take(&mut *state_rc.borrow_mut().borrow_mut::<TestContainer>()); std::mem::take(&mut *state_rc.borrow_mut().borrow_mut::<TestContainer>());
let tests: Arc<TestDescriptions> = tests.into(); let descriptions = Arc::new(container.descriptions);
send_test_event(&state_rc, TestEvent::Register(tests.clone()))?; event_tracker.register(descriptions.clone())?;
let res = run_tests_for_worker_inner( run_tests_for_worker_inner(
worker, worker,
specifier, specifier,
tests, descriptions,
test_functions, container.test_functions,
container.test_hooks,
options, options,
event_tracker,
fail_fast_tracker, fail_fast_tracker,
) )
.await; .await
_ = send_test_event(&state_rc, TestEvent::Completed);
res
} }
async fn run_tests_for_worker_inner( fn compute_tests_to_run(
worker: &mut MainWorker, descs: &TestDescriptions,
specifier: &ModuleSpecifier,
tests: Arc<TestDescriptions>,
test_functions: Vec<v8::Global<v8::Function>>, test_functions: Vec<v8::Global<v8::Function>>,
options: &TestSpecifierOptions, filter: TestFilter,
fail_fast_tracker: &FailFastTracker, ) -> (Vec<(&TestDescription, v8::Global<v8::Function>)>, bool) {
) -> Result<(), RunTestsForWorkerErr> { let mut tests_to_run = Vec::with_capacity(descs.len());
let unfiltered = tests.len();
let state_rc = worker.js_runtime.op_state();
// Build the test plan in a single pass
let mut tests_to_run = Vec::with_capacity(tests.len());
let mut used_only = false; let mut used_only = false;
for ((_, d), f) in tests.tests.iter().zip(test_functions) { for ((_, d), f) in descs.tests.iter().zip(test_functions) {
if !options.filter.includes(&d.name) { if !filter.includes(&d.name) {
continue; continue;
} }
@ -923,88 +941,91 @@ async fn run_tests_for_worker_inner(
} }
tests_to_run.push((d, f)); tests_to_run.push((d, f));
} }
(tests_to_run, used_only)
}
async fn call_hooks<H>(
worker: &mut MainWorker,
hook_fns: impl Iterator<Item = &v8::Global<v8::Function>>,
mut error_handler: H,
) -> Result<(), RunTestsForWorkerErr>
where
H: FnMut(CoreErrorKind) -> Result<(), RunTestsForWorkerErr>,
{
for hook_fn in hook_fns {
let call = worker.js_runtime.call(hook_fn);
let result = worker
.js_runtime
.with_event_loop_promise(call, PollEventLoopOptions::default())
.await;
let Err(err) = result else {
continue;
};
error_handler(err.into_kind())?;
break;
}
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn run_tests_for_worker_inner(
worker: &mut MainWorker,
specifier: &ModuleSpecifier,
descs: Arc<TestDescriptions>,
test_functions: Vec<v8::Global<v8::Function>>,
test_hooks: TestHooks,
options: &TestSpecifierOptions,
event_tracker: &TestEventTracker,
fail_fast_tracker: &FailFastTracker,
) -> Result<(), RunTestsForWorkerErr> {
let unfiltered = descs.len();
let (mut tests_to_run, used_only) =
compute_tests_to_run(&descs, test_functions, options.filter.clone());
if let Some(seed) = options.shuffle { if let Some(seed) = options.shuffle {
tests_to_run.shuffle(&mut SmallRng::seed_from_u64(seed)); tests_to_run.shuffle(&mut SmallRng::seed_from_u64(seed));
} }
send_test_event( event_tracker.plan(TestPlan {
&state_rc, origin: specifier.to_string(),
TestEvent::Plan(TestPlan { total: tests_to_run.len(),
origin: specifier.to_string(), filtered_out: unfiltered - tests_to_run.len(),
total: tests_to_run.len(), used_only,
filtered_out: unfiltered - tests_to_run.len(), })?;
used_only,
}),
)?;
let mut had_uncaught_error = false; let mut had_uncaught_error = false;
let stats = worker.js_runtime.runtime_activity_stats_factory(); let sanitizer_helper = sanitizers::create_test_sanitizer_helper(worker);
let ops = worker.js_runtime.op_names();
// These particular ops may start and stop independently of tests, so we just filter them out // Execute beforeAll hooks (FIFO order)
// completely. call_hooks(worker, test_hooks.before_all.iter(), |core_error| {
let op_id_host_recv_message = ops tests_to_run = vec![];
.iter() match core_error {
.position(|op| *op == "op_host_recv_message") CoreErrorKind::Js(err) => {
.unwrap(); event_tracker.uncaught_error(specifier.to_string(), err)?;
let op_id_host_recv_ctrl = ops Ok(())
.iter() }
.position(|op| *op == "op_host_recv_ctrl") err => Err(err.into_box().into()),
.unwrap(); }
})
// For consistency between tests with and without sanitizers, we _always_ include .await?;
// the actual sanitizer capture before and after a test, but a test that ignores resource
// or op sanitization simply doesn't throw if one of these constraints is violated.
let mut filter = RuntimeActivityStatsFilter::default();
filter = filter.with_resources();
filter = filter.with_ops();
filter = filter.with_timers();
filter = filter.omit_op(op_id_host_recv_ctrl as _);
filter = filter.omit_op(op_id_host_recv_message as _);
// Count the top-level stats so we can filter them out if they complete and restart within
// a test.
let top_level_stats = stats.clone().capture(&filter);
let mut top_level = TopLevelSanitizerStats::default();
for activity in top_level_stats.dump().active {
top_level
.map
.entry(get_sanitizer_item(activity))
.and_modify(|n| *n += 1)
.or_insert(1);
}
for (desc, function) in tests_to_run.into_iter() { for (desc, function) in tests_to_run.into_iter() {
worker_prepare_for_test(worker);
if fail_fast_tracker.should_stop() { if fail_fast_tracker.should_stop() {
break; break;
} }
// Each test needs a fresh reqwest connection pool to avoid inter-test weirdness with connections
// failing. If we don't do this, a connection to a test server we just tore down might be re-used in
// the next test.
// TODO(mmastrac): this should be some sort of callback that we can implement for any subsystem
worker
.js_runtime
.op_state()
.borrow_mut()
.try_take::<deno_runtime::deno_fetch::Client>();
if desc.ignore { if desc.ignore {
send_test_event( event_tracker.ignored(desc)?;
&state_rc,
TestEvent::Result(desc.id, TestResult::Ignored, 0),
)?;
continue; continue;
} }
if had_uncaught_error { if had_uncaught_error {
send_test_event( event_tracker.cancelled(desc)?;
&state_rc,
TestEvent::Result(desc.id, TestResult::Cancelled, 0),
)?;
continue; continue;
} }
send_test_event(&state_rc, TestEvent::Wait(desc.id))?; event_tracker.wait(desc)?;
// Poll event loop once, to allow all ops that are already resolved, but haven't // Poll event loop once, to allow all ops that are already resolved, but haven't
// responded to settle. // responded to settle.
@ -1013,90 +1034,89 @@ async fn run_tests_for_worker_inner(
poll_event_loop(worker).await?; poll_event_loop(worker).await?;
// We always capture stats, regardless of sanitization state // We always capture stats, regardless of sanitization state
let before = stats.clone().capture(&filter); let before_test_stats = sanitizer_helper.capture_stats();
let earlier = Instant::now(); let earlier = Instant::now();
let call = worker.js_runtime.call(&function);
let slow_state_rc = state_rc.clone(); // Execute beforeEach hooks (FIFO order)
let slow_test_id = desc.id; let mut before_each_hook_errored = false;
let slow_test_warning = spawn(async move {
// The slow test warning should pop up every DENO_SLOW_TEST_TIMEOUT*(2**n) seconds,
// with a duration that is doubling each time. So for a warning time of 60s,
// we should get a warning at 60s, 120s, 240s, etc.
let base_timeout = env::var("DENO_SLOW_TEST_TIMEOUT").unwrap_or_default();
let base_timeout = base_timeout.parse().unwrap_or(60).max(1);
let mut multiplier = 1;
let mut elapsed = 0;
loop {
tokio::time::sleep(Duration::from_secs(
base_timeout * (multiplier - elapsed),
))
.await;
if send_test_event(
&slow_state_rc,
TestEvent::Slow(
slow_test_id,
Duration::from_secs(base_timeout * multiplier).as_millis() as _,
),
)
.is_err()
{
break;
}
multiplier *= 2;
elapsed += 1;
}
});
let result = worker call_hooks(worker, test_hooks.before_each.iter(), |core_error| {
.js_runtime match core_error {
.with_event_loop_promise(call, PollEventLoopOptions::default()) CoreErrorKind::Js(err) => {
.await; before_each_hook_errored = true;
slow_test_warning.abort(); let test_result = TestResult::Failed(TestFailure::JsError(err));
let result = match result {
Ok(r) => r,
Err(error) => match error.into_kind() {
CoreErrorKind::Js(js_error) => {
send_test_event(
&state_rc,
TestEvent::UncaughtError(specifier.to_string(), Box::new(js_error)),
)?;
fail_fast_tracker.add_failure(); fail_fast_tracker.add_failure();
send_test_event( event_tracker.result(desc, test_result, earlier.elapsed())?;
&state_rc, Ok(())
TestEvent::Result(desc.id, TestResult::Cancelled, 0),
)?;
had_uncaught_error = true;
continue;
} }
err => return Err(err.into_box().into()), err => Err(err.into_box().into()),
}, }
}; })
.await?;
// Check the result before we check for leaks // TODO(bartlomieju): this whole block/binding could be reworked into something better
let result = { let result = if !before_each_hook_errored {
let call = worker.js_runtime.call(&function);
let slow_test_warning =
spawn(slow_test_watchdog(event_tracker.clone(), desc.id));
let result = worker
.js_runtime
.with_event_loop_promise(call, PollEventLoopOptions::default())
.await;
slow_test_warning.abort();
let result = match result {
Ok(r) => r,
Err(error) => match error.into_kind() {
CoreErrorKind::Js(js_error) => {
event_tracker.uncaught_error(specifier.to_string(), js_error)?;
fail_fast_tracker.add_failure();
event_tracker.cancelled(desc)?;
had_uncaught_error = true;
continue;
}
err => return Err(err.into_box().into()),
},
};
// Check the result before we check for leaks
let scope = &mut worker.js_runtime.handle_scope(); let scope = &mut worker.js_runtime.handle_scope();
let result = v8::Local::new(scope, result); let result = v8::Local::new(scope, result);
serde_v8::from_v8::<TestResult>(scope, result)? serde_v8::from_v8::<TestResult>(scope, result)?
} else {
TestResult::Ignored
}; };
if matches!(result, TestResult::Failed(_)) { if matches!(result, TestResult::Failed(_)) {
fail_fast_tracker.add_failure(); fail_fast_tracker.add_failure();
let elapsed = earlier.elapsed().as_millis(); event_tracker.result(desc, result.clone(), earlier.elapsed())?;
send_test_event( }
&state_rc,
TestEvent::Result(desc.id, result, elapsed as u64), // Execute afterEach hooks (LIFO order)
)?; call_hooks(worker, test_hooks.after_each.iter().rev(), |core_error| {
match core_error {
CoreErrorKind::Js(err) => {
let test_result = TestResult::Failed(TestFailure::JsError(err));
fail_fast_tracker.add_failure();
event_tracker.result(desc, test_result, earlier.elapsed())?;
Ok(())
}
err => Err(err.into_box().into()),
}
})
.await?;
if matches!(result, TestResult::Failed(_)) {
continue; continue;
} }
// Await activity stabilization // Await activity stabilization
if let Some(diff) = wait_for_activity_to_stabilize( if let Some(diff) = sanitizers::wait_for_activity_to_stabilize(
worker, worker,
&stats, &sanitizer_helper,
&filter, before_test_stats,
&top_level,
before,
desc.sanitize_ops, desc.sanitize_ops,
desc.sanitize_resources, desc.sanitize_resources,
) )
@ -1106,155 +1126,36 @@ async fn run_tests_for_worker_inner(
if !formatted.is_empty() { if !formatted.is_empty() {
let failure = TestFailure::Leaked(formatted, trailer_notes); let failure = TestFailure::Leaked(formatted, trailer_notes);
fail_fast_tracker.add_failure(); fail_fast_tracker.add_failure();
let elapsed = earlier.elapsed().as_millis(); event_tracker.result(
send_test_event( desc,
&state_rc, TestResult::Failed(failure),
TestEvent::Result( earlier.elapsed(),
desc.id,
TestResult::Failed(failure),
elapsed as u64,
),
)?; )?;
continue; continue;
} }
} }
let elapsed = earlier.elapsed().as_millis(); // TODO(bartlomieju): using `before_each_hook_errored` is fishy
send_test_event( if !before_each_hook_errored {
&state_rc, event_tracker.result(desc, result, earlier.elapsed())?;
TestEvent::Result(desc.id, result, elapsed as u64),
)?;
}
Ok(())
}
/// The sanitizer must ignore ops, resources and timers that were started at the top-level, but
/// completed and restarted, replacing themselves with the same "thing". For example, if you run a
/// `Deno.serve` server at the top level and make fetch requests to it during the test, those ops
/// should not count as completed during the test because they are immediately replaced.
fn is_empty(
top_level: &TopLevelSanitizerStats,
diff: &RuntimeActivityDiff,
) -> bool {
// If the diff is empty, return empty
if diff.is_empty() {
return true;
}
// If the # of appeared != # of disappeared, we can exit fast with not empty
if diff.appeared.len() != diff.disappeared.len() {
return false;
}
// If there are no top-level ops and !diff.is_empty(), we can exit fast with not empty
if top_level.map.is_empty() {
return false;
}
// Otherwise we need to calculate replacement for top-level stats. Sanitizers will not fire
// if an op, resource or timer is replaced and has a corresponding top-level op.
let mut map = HashMap::new();
for item in &diff.appeared {
let item = get_sanitizer_item_ref(item);
let Some(n1) = top_level.map.get(&item) else {
return false;
};
let n2 = map.entry(item).and_modify(|n| *n += 1).or_insert(1);
// If more ops appeared than were created at the top-level, return false
if *n2 > *n1 {
return false;
} }
} }
// We know that we replaced no more things than were created at the top-level. So now we just want event_tracker.completed()?;
// to make sure that whatever thing was created has a corresponding disappearance record.
for item in &diff.disappeared { // Execute afterAll hooks (LIFO order)
let item = get_sanitizer_item_ref(item); call_hooks(worker, test_hooks.after_all.iter().rev(), |core_error| {
// If more things of this type disappeared than appeared, return false match core_error {
let Some(n1) = map.get_mut(&item) else { CoreErrorKind::Js(err) => {
return false; event_tracker.uncaught_error(specifier.to_string(), err)?;
}; Ok(())
*n1 -= 1; }
if *n1 == 0 { err => Err(err.into_box().into()),
map.remove(&item);
} }
}
// If everything is accounted for, we are empty
map.is_empty()
}
async fn wait_for_activity_to_stabilize(
worker: &mut MainWorker,
stats: &RuntimeActivityStatsFactory,
filter: &RuntimeActivityStatsFilter,
top_level: &TopLevelSanitizerStats,
before: RuntimeActivityStats,
sanitize_ops: bool,
sanitize_resources: bool,
) -> Result<Option<RuntimeActivityDiff>, CoreError> {
// First, check to see if there's any diff at all. If not, just continue.
let after = stats.clone().capture(filter);
let mut diff = RuntimeActivityStats::diff(&before, &after);
if is_empty(top_level, &diff) {
// No activity, so we return early
return Ok(None);
}
// We allow for up to MAX_SANITIZER_LOOP_SPINS to get to a point where there is no difference.
// TODO(mmastrac): We could be much smarter about this if we had the concept of "progress" in
// an event loop tick. Ideally we'd be able to tell if we were spinning and doing nothing, or
// spinning and resolving ops.
for _ in 0..MAX_SANITIZER_LOOP_SPINS {
// There was a diff, so let the event loop run once
poll_event_loop(worker).await?;
let after = stats.clone().capture(filter);
diff = RuntimeActivityStats::diff(&before, &after);
if is_empty(top_level, &diff) {
return Ok(None);
}
}
if !sanitize_ops {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
}
if !sanitize_resources {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
}
// Since we don't have an option to disable timer sanitization, we use sanitize_ops == false &&
// sanitize_resources == false to disable those.
if !sanitize_ops && !sanitize_resources {
diff.appeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
diff.disappeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
}
Ok(if is_empty(top_level, &diff) {
None
} else {
Some(diff)
}) })
.await?;
Ok(())
} }
static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false); static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false);
@ -1262,7 +1163,8 @@ static HAS_TEST_RUN_SIGINT_HANDLER: AtomicBool = AtomicBool::new(false);
/// Test a collection of specifiers with test modes concurrently. /// Test a collection of specifiers with test modes concurrently.
async fn test_specifiers( async fn test_specifiers(
worker_factory: Arc<CliMainWorkerFactory>, worker_factory: Arc<CliMainWorkerFactory>,
root_permissions_container: &PermissionsContainer, cli_options: &Arc<CliOptions>,
permission_desc_parser: &Arc<RuntimePermissionDescriptorParser<CliSys>>,
specifiers: Vec<ModuleSpecifier>, specifiers: Vec<ModuleSpecifier>,
preload_modules: Vec<ModuleSpecifier>, preload_modules: Vec<ModuleSpecifier>,
options: TestSpecifiersOptions, options: TestSpecifiersOptions,
@ -1291,15 +1193,26 @@ async fn test_specifiers(
let join_handles = specifiers.into_iter().map(move |specifier| { let join_handles = specifiers.into_iter().map(move |specifier| {
let worker_factory = worker_factory.clone(); let worker_factory = worker_factory.clone();
// Various test files should not share the same permissions in terms of let specifier_dir = cli_options.workspace().resolve_member_dir(&specifier);
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions_container = root_permissions_container.deep_clone();
let preload_modules = preload_modules.clone(); let preload_modules = preload_modules.clone();
let worker_sender = test_event_sender_factory.worker(); let worker_sender = test_event_sender_factory.worker();
let fail_fast_tracker = fail_fast_tracker.clone(); let fail_fast_tracker = fail_fast_tracker.clone();
let specifier_options = options.specifier.clone(); let specifier_options = options.specifier.clone();
let cli_options = cli_options.clone();
let permission_desc_parser = permission_desc_parser.clone();
spawn_blocking(move || { spawn_blocking(move || {
// Various test files should not share the same permissions in terms of
// `PermissionsContainer` - otherwise granting/revoking permissions in one
// file would have impact on other files, which is undesirable.
let permissions =
cli_options.permissions_options_for_dir(&specifier_dir)?;
let permissions_container = PermissionsContainer::new(
permission_desc_parser.clone(),
Permissions::from_options(
permission_desc_parser.as_ref(),
&permissions,
)?,
);
create_and_run_current_thread(test_specifier( create_and_run_current_thread(test_specifier(
worker_factory, worker_factory,
permissions_container, permissions_container,
@ -1671,7 +1584,8 @@ pub async fn run_tests(
// Run tests // Run tests
test_specifiers( test_specifiers(
worker_factory, worker_factory,
factory.root_permissions_container()?, cli_options,
factory.permission_desc_parser()?,
specifiers_for_typecheck_and_test, specifiers_for_typecheck_and_test,
preload_modules, preload_modules,
TestSpecifiersOptions { TestSpecifiersOptions {
@ -1881,7 +1795,8 @@ pub async fn run_tests_with_watch(
test_specifiers( test_specifiers(
worker_factory, worker_factory,
factory.root_permissions_container()?, &cli_options,
factory.permission_desc_parser()?,
specifiers_for_typecheck_and_test, specifiers_for_typecheck_and_test,
preload_modules, preload_modules,
TestSpecifiersOptions { TestSpecifiersOptions {
@ -1950,6 +1865,88 @@ fn get_target_specifiers(
.collect() .collect()
} }
#[derive(Clone)]
pub struct TestEventTracker {
op_state: Rc<RefCell<OpState>>,
}
impl TestEventTracker {
pub fn new(op_state: Rc<RefCell<OpState>>) -> Self {
Self { op_state }
}
fn send_event(&self, event: TestEvent) -> Result<(), ChannelClosedError> {
self
.op_state
.borrow_mut()
.borrow_mut::<TestEventSender>()
.send(event)
}
fn slow(
&self,
test_id: usize,
duration: Duration,
) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Slow(test_id, duration.as_millis() as _))
}
fn wait(&self, desc: &TestDescription) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Wait(desc.id))
}
fn ignored(&self, desc: &TestDescription) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Result(desc.id, TestResult::Ignored, 0))
}
fn cancelled(
&self,
desc: &TestDescription,
) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Result(desc.id, TestResult::Cancelled, 0))
}
fn register(
&self,
descriptions: Arc<TestDescriptions>,
) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Register(descriptions))
}
fn completed(&self) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Completed)
}
fn uncaught_error(
&self,
specifier: String,
error: Box<JsError>,
) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::UncaughtError(specifier, error))
}
fn plan(&self, plan: TestPlan) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Plan(plan))
}
fn result(
&self,
desc: &TestDescription,
test_result: TestResult,
duration: Duration,
) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::Result(
desc.id,
test_result,
duration.as_millis() as u64,
))
}
pub(crate) fn force_end_report(&self) -> Result<(), ChannelClosedError> {
self.send_event(TestEvent::ForceEndReport)
}
}
/// Tracks failures for the `--fail-fast` argument in /// Tracks failures for the `--fail-fast` argument in
/// order to tell when to stop running tests. /// order to tell when to stop running tests.
#[derive(Clone, Default)] #[derive(Clone, Default)]
@ -1966,15 +1963,10 @@ impl FailFastTracker {
} }
} }
pub fn add_failure(&self) -> bool { pub fn add_failure(&self) {
if let Some(max_count) = &self.max_count { self
self .failure_count
.failure_count .fetch_add(1, std::sync::atomic::Ordering::SeqCst);
.fetch_add(1, std::sync::atomic::Ordering::SeqCst)
>= *max_count
} else {
false
}
} }
pub fn should_stop(&self) -> bool { pub fn should_stop(&self) -> bool {

View file

@ -194,6 +194,7 @@ impl TestReporter for PrettyTestReporter {
} }
fn report_wait(&mut self, description: &TestDescription) { fn report_wait(&mut self, description: &TestDescription) {
self.write_output_end();
if !self.parallel { if !self.parallel {
self.force_report_wait(description); self.force_report_wait(description);
} }

View file

@ -0,0 +1,232 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashMap;
use deno_core::error::CoreError;
use deno_core::stats::RuntimeActivity;
use deno_core::stats::RuntimeActivityDiff;
use deno_core::stats::RuntimeActivityStats;
use deno_core::stats::RuntimeActivityStatsFactory;
use deno_core::stats::RuntimeActivityStatsFilter;
use deno_core::stats::RuntimeActivityType;
use deno_runtime::worker::MainWorker;
use super::poll_event_loop;
/// How many times we're allowed to spin the event loop before considering something a leak.
const MAX_SANITIZER_LOOP_SPINS: usize = 16;
#[derive(Default)]
struct TopLevelSanitizerStats {
map: HashMap<(RuntimeActivityType, Cow<'static, str>), usize>,
}
fn get_sanitizer_item(
activity: RuntimeActivity,
) -> (RuntimeActivityType, Cow<'static, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
fn get_sanitizer_item_ref(
activity: &RuntimeActivity,
) -> (RuntimeActivityType, Cow<'_, str>) {
let activity_type = activity.activity();
match activity {
RuntimeActivity::AsyncOp(_, _, name) => (activity_type, (*name).into()),
RuntimeActivity::Resource(_, _, name) => (activity_type, name.into()),
RuntimeActivity::Interval(_, _) => (activity_type, "".into()),
RuntimeActivity::Timer(_, _) => (activity_type, "".into()),
}
}
pub struct TestSanitizerHelper {
activity_stats: RuntimeActivityStatsFactory,
activity_filter: RuntimeActivityStatsFilter,
top_level_sanitizer_stats: TopLevelSanitizerStats,
}
impl TestSanitizerHelper {
pub fn capture_stats(&self) -> RuntimeActivityStats {
self.activity_stats.clone().capture(&self.activity_filter)
}
}
pub fn create_test_sanitizer_helper(
worker: &mut MainWorker,
) -> TestSanitizerHelper {
let stats = worker.js_runtime.runtime_activity_stats_factory();
let ops = worker.js_runtime.op_names();
// These particular ops may start and stop independently of tests, so we just filter them out
// completely.
let op_id_host_recv_message = ops
.iter()
.position(|op| *op == "op_host_recv_message")
.unwrap();
let op_id_host_recv_ctrl = ops
.iter()
.position(|op| *op == "op_host_recv_ctrl")
.unwrap();
// For consistency between tests with and without sanitizers, we _always_ include
// the actual sanitizer capture before and after a test, but a test that ignores resource
// or op sanitization simply doesn't throw if one of these constraints is violated.
let mut filter = RuntimeActivityStatsFilter::default();
filter = filter.with_resources();
filter = filter.with_ops();
filter = filter.with_timers();
filter = filter.omit_op(op_id_host_recv_ctrl as _);
filter = filter.omit_op(op_id_host_recv_message as _);
// Count the top-level stats so we can filter them out if they complete and restart within
// a test.
let top_level_stats = stats.clone().capture(&filter);
let mut top_level = TopLevelSanitizerStats::default();
for activity in top_level_stats.dump().active {
top_level
.map
.entry(get_sanitizer_item(activity))
.and_modify(|n| *n += 1)
.or_insert(1);
}
TestSanitizerHelper {
activity_stats: stats,
activity_filter: filter,
top_level_sanitizer_stats: top_level,
}
}
/// The sanitizer must ignore ops, resources and timers that were started at the top-level, but
/// completed and restarted, replacing themselves with the same "thing". For example, if you run a
/// `Deno.serve` server at the top level and make fetch requests to it during the test, those ops
/// should not count as completed during the test because they are immediately replaced.
fn is_empty(
top_level: &TopLevelSanitizerStats,
diff: &RuntimeActivityDiff,
) -> bool {
// If the diff is empty, return empty
if diff.is_empty() {
return true;
}
// If the # of appeared != # of disappeared, we can exit fast with not empty
if diff.appeared.len() != diff.disappeared.len() {
return false;
}
// If there are no top-level ops and !diff.is_empty(), we can exit fast with not empty
if top_level.map.is_empty() {
return false;
}
// Otherwise we need to calculate replacement for top-level stats. Sanitizers will not fire
// if an op, resource or timer is replaced and has a corresponding top-level op.
let mut map = HashMap::new();
for item in &diff.appeared {
let item = get_sanitizer_item_ref(item);
let Some(n1) = top_level.map.get(&item) else {
return false;
};
let n2 = map.entry(item).and_modify(|n| *n += 1).or_insert(1);
// If more ops appeared than were created at the top-level, return false
if *n2 > *n1 {
return false;
}
}
// We know that we replaced no more things than were created at the top-level. So now we just want
// to make sure that whatever thing was created has a corresponding disappearance record.
for item in &diff.disappeared {
let item = get_sanitizer_item_ref(item);
// If more things of this type disappeared than appeared, return false
let Some(n1) = map.get_mut(&item) else {
return false;
};
*n1 -= 1;
if *n1 == 0 {
map.remove(&item);
}
}
// If everything is accounted for, we are empty
map.is_empty()
}
pub async fn wait_for_activity_to_stabilize(
worker: &mut MainWorker,
helper: &TestSanitizerHelper,
before_test_stats: RuntimeActivityStats,
sanitize_ops: bool,
sanitize_resources: bool,
) -> Result<Option<RuntimeActivityDiff>, CoreError> {
// First, check to see if there's any diff at all. If not, just continue.
let after_test_stats = helper.capture_stats();
let mut diff =
RuntimeActivityStats::diff(&before_test_stats, &after_test_stats);
if is_empty(&helper.top_level_sanitizer_stats, &diff) {
// No activity, so we return early
return Ok(None);
}
// We allow for up to MAX_SANITIZER_LOOP_SPINS to get to a point where there is no difference.
// TODO(mmastrac): We could be much smarter about this if we had the concept of "progress" in
// an event loop tick. Ideally we'd be able to tell if we were spinning and doing nothing, or
// spinning and resolving ops.
for _ in 0..MAX_SANITIZER_LOOP_SPINS {
// There was a diff, so let the event loop run once
poll_event_loop(worker).await?;
let after_test_stats = helper.capture_stats();
diff = RuntimeActivityStats::diff(&before_test_stats, &after_test_stats);
if is_empty(&helper.top_level_sanitizer_stats, &diff) {
return Ok(None);
}
}
if !sanitize_ops {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::AsyncOp(..)));
}
if !sanitize_resources {
diff
.appeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
diff
.disappeared
.retain(|activity| !matches!(activity, RuntimeActivity::Resource(..)));
}
// Since we don't have an option to disable timer sanitization, we use sanitize_ops == false &&
// sanitize_resources == false to disable those.
if !sanitize_ops && !sanitize_resources {
diff.appeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
diff.disappeared.retain(|activity| {
!matches!(
activity,
RuntimeActivity::Timer(..) | RuntimeActivity::Interval(..)
)
});
}
Ok(if is_empty(&helper.top_level_sanitizer_stats, &diff) {
None
} else {
Some(diff)
})
}

17134
cli/tsc/00_typescript.js vendored

File diff suppressed because it is too large Load diff

View file

@ -237,8 +237,14 @@ function lspToTsCompilerOptions(config) {
Object.assign(options, { Object.assign(options, {
allowNonTsExtensions: true, allowNonTsExtensions: true,
allowImportingTsExtensions: true, allowImportingTsExtensions: true,
module: ts.ModuleKind.NodeNext, module: options.module === ts.ModuleKind.ESNext ||
moduleResolution: ts.ModuleResolutionKind.NodeNext, options.module === ts.ModuleKind.Preserve
? options.module
: ts.ModuleKind.NodeNext,
moduleResolution:
options.moduleResolution === ts.ModuleResolutionKind.Bundler
? ts.ModuleResolutionKind.Bundler
: ts.ModuleResolutionKind.NodeNext,
}); });
if (errors.length > 0) { if (errors.length > 0) {
debug(ts.formatDiagnostics(errors, host)); debug(ts.formatDiagnostics(errors, host));

View file

@ -1166,6 +1166,78 @@ declare namespace Deno {
options: Omit<TestDefinition, "fn" | "only">, options: Omit<TestDefinition, "fn" | "only">,
fn: (t: TestContext) => void | Promise<void>, fn: (t: TestContext) => void | Promise<void>,
): void; ): void;
/** Register a function to be called before all tests in the current scope.
*
* These functions are run in FIFO order (first in, first out).
*
* If an exception is raised during execution of this hook, the remaining `beforeAll` hooks will not be run.
*
* ```ts
* Deno.test.beforeAll(() => {
* // Setup code that runs once before all tests
* console.log("Setting up test suite");
* });
* ```
*
* @category Testing
*/
beforeAll(
fn: () => void | Promise<void>,
): void;
/** Register a function to be called before each test in the current scope.
*
* These functions are run in FIFO order (first in, first out).
*
* If an exception is raised during execution of this hook, the remaining hooks will not be run and the currently running
* test case will be marked as failed.
*
* ```ts
* Deno.test.beforeEach(() => {
* // Setup code that runs before each test
* console.log("Setting up test");
* });
* ```
*
* @category Testing
*/
beforeEach(fn: () => void | Promise<void>): void;
/** Register a function to be called after each test in the current scope.
*
* These functions are run in LIFO order (last in, first out).
*
* If an exception is raised during execution of this hook, the remaining hooks will not be run and the currently running
* test case will be marked as failed.
*
* ```ts
* Deno.test.afterEach(() => {
* // Cleanup code that runs after each test
* console.log("Cleaning up test");
* });
* ```
*
* @category Testing
*/
afterEach(fn: () => void | Promise<void>): void;
/** Register a function to be called after all tests in the current scope have finished running.
*
* These functions are run in the LIFO order (last in, first out).
*
* If an exception is raised during execution of this hook, the remaining `afterAll` hooks will not be run.
*
* ```ts
* Deno.test.afterAll(() => {
* // Cleanup code that runs once after all tests
* console.log("Cleaning up test suite");
* });
* ```
*
* @category Testing
*/
afterAll(fn: () => void | Promise<void>): void;
} }
/** /**
@ -3719,8 +3791,8 @@ declare namespace Deno {
*/ */
export class ChildProcess implements AsyncDisposable { export class ChildProcess implements AsyncDisposable {
get stdin(): WritableStream<Uint8Array<ArrayBufferLike>>; get stdin(): WritableStream<Uint8Array<ArrayBufferLike>>;
get stdout(): ReadableStream<Uint8Array<ArrayBuffer>>; get stdout(): SubprocessReadableStream;
get stderr(): ReadableStream<Uint8Array<ArrayBuffer>>; get stderr(): SubprocessReadableStream;
readonly pid: number; readonly pid: number;
/** Get the status of the child. */ /** Get the status of the child. */
readonly status: Promise<CommandStatus>; readonly status: Promise<CommandStatus>;
@ -3746,6 +3818,36 @@ declare namespace Deno {
[Symbol.asyncDispose](): Promise<void>; [Symbol.asyncDispose](): Promise<void>;
} }
/**
* The interface for stdout and stderr streams for child process returned from
* {@linkcode Deno.Command.spawn}.
*
* @category Subprocess
*/
export interface SubprocessReadableStream
extends ReadableStream<Uint8Array<ArrayBuffer>> {
/**
* Reads the stream to completion. It returns a promise that resolves with
* an `ArrayBuffer`.
*/
arrayBuffer(): Promise<ArrayBuffer>;
/**
* Reads the stream to completion. It returns a promise that resolves with
* a `Uint8Array`.
*/
bytes(): Promise<Uint8Array>;
/**
* Reads the stream to completion. It returns a promise that resolves with
* the result of parsing the body text as JSON.
*/
json(): Promise<any>;
/**
* Reads the stream to completion. It returns a promise that resolves with
* a `USVString` (text).
*/
text(): Promise<string>;
}
/** /**
* Options which can be set when calling {@linkcode Deno.Command}. * Options which can be set when calling {@linkcode Deno.Command}.
* *
@ -5157,6 +5259,18 @@ declare namespace Deno {
/** Sets `SO_REUSEPORT` on POSIX systems. */ /** Sets `SO_REUSEPORT` on POSIX systems. */
reusePort?: boolean; reusePort?: boolean;
/** Maximum number of pending connections in the listen queue.
*
* This parameter controls how many incoming connections can be queued by the
* operating system while waiting for the application to accept them. If more
* connections arrive when the queue is full, they will be refused.
*
* The kernel may adjust this value (e.g., rounding up to the next power of 2
* plus 1). Different operating systems have different maximum limits.
*
* @default {511} */
tcpBacklog?: number;
} }
/** /**

View file

@ -9,6 +9,171 @@
declare namespace Deno { declare namespace Deno {
export {}; // stop default export type behavior export {}; // stop default export type behavior
/**
* @category Bundler
* @experimental
*/
export namespace bundle {
/**
* The target platform of the bundle.
* @category Bundler
* @experimental
*/
export type Platform = "browser" | "deno";
/**
* The output format of the bundle.
* @category Bundler
* @experimental
*/
export type Format = "esm" | "cjs" | "iife";
/**
* The source map type of the bundle.
* @category Bundler
* @experimental
*/
export type SourceMapType = "linked" | "inline" | "external";
/**
* How to handle packages.
*
* - `bundle`: packages are inlined into the bundle.
* - `external`: packages are excluded from the bundle, and treated as external dependencies.
* @category Bundler
* @experimental
*/
export type PackageHandling = "bundle" | "external";
/**
* Options for the bundle.
* @category Bundler
* @experimental
*/
export interface Options {
/**
* The entrypoints of the bundle.
*/
entrypoints: string[];
/**
* Output file path.
*/
outputPath?: string;
/**
* Output directory path.
*/
outputDir?: string;
/**
* External modules to exclude from bundling.
*/
external?: string[];
/**
* Bundle format.
*/
format?: Format;
/**
* Whether to minify the output.
*/
minify?: boolean;
/**
* Whether to enable code splitting.
*/
codeSplitting?: boolean;
/**
* Whether to inline imports.
*/
inlineImports?: boolean;
/**
* How to handle packages.
*/
packages?: PackageHandling;
/**
* Source map configuration.
*/
sourcemap?: SourceMapType;
/**
* Target platform.
*/
platform?: Platform;
/**
* Whether to write the output to the filesystem.
*
* @default true if outputDir or outputPath is set, false otherwise
*/
write?: boolean;
}
/**
* The location of a message.
* @category Bundler
* @experimental
*/
export interface MessageLocation {
file: string;
namespace?: string;
line: number;
column: number;
length: number;
suggestion?: string;
}
/**
* A note about a message.
* @category Bundler
* @experimental
*/
export interface MessageNote {
text: string;
location?: MessageLocation;
}
/**
* A message emitted from the bundler.
* @category Bundler
* @experimental
*/
export interface Message {
text: string;
location?: MessageLocation;
notes?: MessageNote[];
}
/**
* An output file in the bundle.
* @category Bundler
* @experimental
*/
export interface OutputFile {
path: string;
contents?: Uint8Array;
hash: string;
text(): string;
}
/**
* The result of bundling.
* @category Bundler
* @experimental
*/
export interface Result {
errors: Message[];
warnings: Message[];
success: boolean;
outputFiles?: OutputFile[];
}
}
/** **UNSTABLE**: New API, yet to be vetted.
*
* Bundle Typescript/Javascript code
* @category Bundle
* @experimental
*/
export function bundle(
options: Deno.bundle.Options,
): Promise<Deno.bundle.Result>;
/** **UNSTABLE**: New API, yet to be vetted. /** **UNSTABLE**: New API, yet to be vetted.
* *
* Creates a presentable WebGPU surface from given window and * Creates a presentable WebGPU surface from given window and

View file

@ -45,7 +45,7 @@ declare var FormData: {
/** @category Fetch */ /** @category Fetch */
interface Body { interface Body {
/** A simple getter used to expose a `ReadableStream` of the body contents. */ /** A simple getter used to expose a `ReadableStream` of the body contents. */
readonly body: ReadableStream<Uint8Array> | null; readonly body: ReadableStream<Uint8Array<ArrayBuffer>> | null;
/** Stores a `Boolean` that declares whether the body has been used in a /** Stores a `Boolean` that declares whether the body has been used in a
* response yet. * response yet.
*/ */
@ -61,7 +61,7 @@ interface Body {
/** Takes a `Response` stream and reads it to completion. It returns a promise /** Takes a `Response` stream and reads it to completion. It returns a promise
* that resolves with a `Uint8Array`. * that resolves with a `Uint8Array`.
*/ */
bytes(): Promise<Uint8Array>; bytes(): Promise<Uint8Array<ArrayBuffer>>;
/** Takes a `Response` stream and reads it to completion. It returns a promise /** Takes a `Response` stream and reads it to completion. It returns a promise
* that resolves with a `FormData` object. * that resolves with a `FormData` object.
*/ */

View file

@ -191,6 +191,18 @@ declare namespace Deno {
* *
* @default {"0.0.0.0"} */ * @default {"0.0.0.0"} */
hostname?: string; hostname?: string;
/** Maximum number of pending connections in the listen queue.
*
* This parameter controls how many incoming connections can be queued by the
* operating system while waiting for the application to accept them. If more
* connections arrive when the queue is full, they will be refused.
*
* The kernel may adjust this value (e.g., rounding up to the next power of 2
* plus 1). Different operating systems have different maximum limits.
*
* @default {511} */
tcpBacklog?: number;
} }
/** @category Network */ /** @category Network */
@ -456,6 +468,15 @@ declare namespace Deno {
* TLS handshake. * TLS handshake.
*/ */
alpnProtocols?: string[]; alpnProtocols?: string[];
/** If true, the certificate's common name or subject alternative names will not be
* checked against the hostname provided in the options.
*
* This disables hostname verification but still validates the certificate chain.
* Use with caution and only when connecting to known servers.
*
* @default {false}
*/
unsafelyDisableHostnameVerification?: boolean;
} }
/** Establishes a secure connection over TLS (transport layer security) using /** Establishes a secure connection over TLS (transport layer security) using
@ -505,6 +526,15 @@ declare namespace Deno {
* TLS handshake. * TLS handshake.
*/ */
alpnProtocols?: string[]; alpnProtocols?: string[];
/** If true, the certificate's common name or subject alternative names will not be
* checked against the hostname provided in the options.
*
* This disables hostname verification but still validates the certificate chain.
* Use with caution and only when connecting to known servers.
*
* @default {false}
*/
unsafelyDisableHostnameVerification?: boolean;
} }
/** Start TLS handshake from an existing connection using an optional list of /** Start TLS handshake from an existing connection using an optional list of

View file

@ -452,12 +452,6 @@ interface TextEncoderEncodeIntoResult {
written: number; written: number;
} }
/** @category Encoding */
interface TextEncoder extends TextEncoderCommon {
/** Returns the result of running UTF-8's encoder. */
encode(input?: string): Uint8Array;
encodeInto(input: string, dest: Uint8Array): TextEncoderEncodeIntoResult;
}
/** /**
* Allows you to convert a string into binary data (in the form of a Uint8Array) * Allows you to convert a string into binary data (in the form of a Uint8Array)
* given the encoding. * given the encoding.
@ -474,10 +468,13 @@ interface TextEncoder extends TextEncoderCommon {
*/ */
interface TextEncoder extends TextEncoderCommon { interface TextEncoder extends TextEncoderCommon {
/** Turns a string into binary data (in the form of a Uint8Array) using UTF-8 encoding. */ /** Turns a string into binary data (in the form of a Uint8Array) using UTF-8 encoding. */
encode(input?: string): Uint8Array; encode(input?: string): Uint8Array<ArrayBuffer>;
/** Encodes a string into the destination Uint8Array and returns the result of the encoding. */ /** Encodes a string into the destination Uint8Array and returns the result of the encoding. */
encodeInto(input: string, dest: Uint8Array): TextEncoderEncodeIntoResult; encodeInto(
input: string,
dest: Uint8Array<ArrayBufferLike>,
): TextEncoderEncodeIntoResult;
} }
/** @category Encoding */ /** @category Encoding */

View file

@ -267,6 +267,13 @@ interface WebSocket extends EventTarget {
* // Using URL object instead of string * // Using URL object instead of string
* const url = new URL("ws://localhost:8080/path"); * const url = new URL("ws://localhost:8080/path");
* const wsWithUrl = new WebSocket(url); * const wsWithUrl = new WebSocket(url);
*
* // WebSocket with headers
* const wsWithProtocols = new WebSocket("ws://localhost:8080", {
* headers: {
* "Authorization": "Bearer foo",
* },
* });
* ``` * ```
* *
* @see https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/WebSocket * @see https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/WebSocket
@ -274,13 +281,34 @@ interface WebSocket extends EventTarget {
*/ */
declare var WebSocket: { declare var WebSocket: {
readonly prototype: WebSocket; readonly prototype: WebSocket;
new (url: string | URL, protocols?: string | string[]): WebSocket; new (
url: string | URL,
protocolsOrOptions?: string | string[] | WebSocketOptions,
): WebSocket;
readonly CLOSED: number; readonly CLOSED: number;
readonly CLOSING: number; readonly CLOSING: number;
readonly CONNECTING: number; readonly CONNECTING: number;
readonly OPEN: number; readonly OPEN: number;
}; };
/**
* Options for a WebSocket instance.
* This feature is non-standard.
*
* @category WebSockets
*/
interface WebSocketOptions {
/**
* The sub-protocol(s) that the client would like to use, in order of preference.
*/
protocols?: string | string[];
/**
* A Headers object, an object literal, or an array of two-item arrays to set handshake's headers.
* This feature is non-standard.
*/
headers?: HeadersInit;
}
/** /**
* Specifies the type of binary data being received over a `WebSocket` connection. * Specifies the type of binary data being received over a `WebSocket` connection.
* *

19415
cli/tsc/dts/lib.dom.d.ts vendored

File diff suppressed because it is too large Load diff

View file

@ -21,7 +21,11 @@ and limitations under the License.
///////////////////////////// /////////////////////////////
interface AudioParam { interface AudioParam {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/AudioParam/setValueCurveAtTime) */ /**
* The **`setValueCurveAtTime()`** method of the following a curve defined by a list of values.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AudioParam/setValueCurveAtTime)
*/
setValueCurveAtTime(values: Iterable<number>, startTime: number, duration: number): AudioParam; setValueCurveAtTime(values: Iterable<number>, startTime: number, duration: number): AudioParam;
} }
@ -29,9 +33,17 @@ interface AudioParamMap extends ReadonlyMap<string, AudioParam> {
} }
interface BaseAudioContext { interface BaseAudioContext {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/BaseAudioContext/createIIRFilter) */ /**
* The **`createIIRFilter()`** method of the BaseAudioContext interface creates an IIRFilterNode, which represents a general **infinite impulse response** (IIR) filter which can be configured to serve as various types of filter.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/BaseAudioContext/createIIRFilter)
*/
createIIRFilter(feedforward: Iterable<number>, feedback: Iterable<number>): IIRFilterNode; createIIRFilter(feedforward: Iterable<number>, feedback: Iterable<number>): IIRFilterNode;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/BaseAudioContext/createPeriodicWave) */ /**
* The `createPeriodicWave()` method of the BaseAudioContext interface is used to create a PeriodicWave.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/BaseAudioContext/createPeriodicWave)
*/
createPeriodicWave(real: Iterable<number>, imag: Iterable<number>, constraints?: PeriodicWaveConstraints): PeriodicWave; createPeriodicWave(real: Iterable<number>, imag: Iterable<number>, constraints?: PeriodicWaveConstraints): PeriodicWave;
} }
@ -69,7 +81,11 @@ interface CSSUnparsedValue {
} }
interface Cache { interface Cache {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Cache/addAll) */ /**
* The **`addAll()`** method of the Cache interface takes an array of URLs, retrieves them, and adds the resulting response objects to the given cache.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Cache/addAll)
*/
addAll(requests: Iterable<RequestInfo>): Promise<void>; addAll(requests: Iterable<RequestInfo>): Promise<void>;
} }
@ -83,6 +99,21 @@ interface CanvasPathDrawingStyles {
setLineDash(segments: Iterable<number>): void; setLineDash(segments: Iterable<number>): void;
} }
interface CookieStoreManager {
/**
* The **`subscribe()`** method of the CookieStoreManager interface subscribes a ServiceWorkerRegistration to cookie change events.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CookieStoreManager/subscribe)
*/
subscribe(subscriptions: Iterable<CookieStoreGetOptions>): Promise<void>;
/**
* The **`unsubscribe()`** method of the CookieStoreManager interface stops the ServiceWorkerRegistration from receiving previously subscribed events.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CookieStoreManager/unsubscribe)
*/
unsubscribe(subscriptions: Iterable<CookieStoreGetOptions>): Promise<void>;
}
interface CustomStateSet extends Set<string> { interface CustomStateSet extends Set<string> {
} }
@ -171,7 +202,7 @@ interface HighlightRegistry extends Map<string, Highlight> {
interface IDBDatabase { interface IDBDatabase {
/** /**
* Returns a new transaction with the given mode ("readonly" or "readwrite") and scope which can be a single object store name or an array of names. * The **`transaction`** method of the IDBDatabase interface immediately returns a transaction object (IDBTransaction) containing the IDBTransaction.objectStore method, which you can use to access your object store.
* *
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBDatabase/transaction) * [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBDatabase/transaction)
*/ */
@ -180,9 +211,7 @@ interface IDBDatabase {
interface IDBObjectStore { interface IDBObjectStore {
/** /**
* Creates a new index in store with the given name, keyPath and options and returns a new IDBIndex. If the keyPath and options define constraints that cannot be satisfied with the data already in store the upgrade transaction will abort with a "ConstraintError" DOMException. * The **`createIndex()`** method of the field/column defining a new data point for each database record to contain.
*
* Throws an "InvalidStateError" DOMException if not called within an upgrade transaction.
* *
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBObjectStore/createIndex) * [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBObjectStore/createIndex)
*/ */
@ -197,7 +226,11 @@ interface MIDIInputMap extends ReadonlyMap<string, MIDIInput> {
} }
interface MIDIOutput { interface MIDIOutput {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/MIDIOutput/send) */ /**
* The **`send()`** method of the MIDIOutput interface queues messages for the corresponding MIDI port.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/MIDIOutput/send)
*/
send(data: Iterable<number>, timestamp?: DOMHighResTimeStamp): void; send(data: Iterable<number>, timestamp?: DOMHighResTimeStamp): void;
} }
@ -234,12 +267,17 @@ interface NamedNodeMap {
interface Navigator { interface Navigator {
/** /**
* The **`requestMediaKeySystemAccess()`** method of the Navigator interface returns a Promise which delivers a MediaKeySystemAccess object that can be used to access a particular media key system, which can in turn be used to create keys for decrypting a media stream.
* Available only in secure contexts. * Available only in secure contexts.
* *
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Navigator/requestMediaKeySystemAccess) * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Navigator/requestMediaKeySystemAccess)
*/ */
requestMediaKeySystemAccess(keySystem: string, supportedConfigurations: Iterable<MediaKeySystemConfiguration>): Promise<MediaKeySystemAccess>; requestMediaKeySystemAccess(keySystem: string, supportedConfigurations: Iterable<MediaKeySystemConfiguration>): Promise<MediaKeySystemAccess>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Navigator/vibrate) */ /**
* The **`vibrate()`** method of the Navigator interface pulses the vibration hardware on the device, if such hardware exists.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Navigator/vibrate)
*/
vibrate(pattern: Iterable<number>): boolean; vibrate(pattern: Iterable<number>): boolean;
} }
@ -272,7 +310,11 @@ interface PluginArray {
} }
interface RTCRtpTransceiver { interface RTCRtpTransceiver {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/RTCRtpTransceiver/setCodecPreferences) */ /**
* The **`setCodecPreferences()`** method of the RTCRtpTransceiver interface is used to set the codecs that the transceiver allows for decoding _received_ data, in order of decreasing preference.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/RTCRtpTransceiver/setCodecPreferences)
*/
setCodecPreferences(codecs: Iterable<RTCRtpCodec>): void; setCodecPreferences(codecs: Iterable<RTCRtpCodec>): void;
} }
@ -327,17 +369,33 @@ interface StyleSheetList {
} }
interface SubtleCrypto { interface SubtleCrypto {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) */ /**
* The **`deriveKey()`** method of the SubtleCrypto interface can be used to derive a secret key from a master key.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey)
*/
deriveKey(algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, derivedKeyType: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; deriveKey(algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, derivedKeyType: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) */ /**
* The **`generateKey()`** method of the SubtleCrypto interface is used to generate a new key (for symmetric algorithms) or key pair (for public-key algorithms).
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey)
*/
generateKey(algorithm: "Ed25519" | { name: "Ed25519" }, extractable: boolean, keyUsages: ReadonlyArray<"sign" | "verify">): Promise<CryptoKeyPair>; generateKey(algorithm: "Ed25519" | { name: "Ed25519" }, extractable: boolean, keyUsages: ReadonlyArray<"sign" | "verify">): Promise<CryptoKeyPair>;
generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKeyPair>; generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKeyPair>;
generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>; generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>;
generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKeyPair | CryptoKey>; generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKeyPair | CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) */ /**
* The **`importKey()`** method of the SubtleCrypto interface imports a key: that is, it takes as input a key in an external, portable format and gives you a CryptoKey object that you can use in the Web Crypto API.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey)
*/
importKey(format: "jwk", keyData: JsonWebKey, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>; importKey(format: "jwk", keyData: JsonWebKey, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>;
importKey(format: Exclude<KeyFormat, "jwk">, keyData: BufferSource, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; importKey(format: Exclude<KeyFormat, "jwk">, keyData: BufferSource, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) */ /**
* The **`unwrapKey()`** method of the SubtleCrypto interface 'unwraps' a key.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey)
*/
unwrapKey(format: KeyFormat, wrappedKey: BufferSource, unwrappingKey: CryptoKey, unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; unwrapKey(format: KeyFormat, wrappedKey: BufferSource, unwrappingKey: CryptoKey, unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
} }
@ -371,19 +429,39 @@ interface ViewTransitionTypeSet extends Set<string> {
} }
interface WEBGL_draw_buffers { interface WEBGL_draw_buffers {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_draw_buffers/drawBuffersWEBGL) */ /**
* The **`WEBGL_draw_buffers.drawBuffersWEBGL()`** method is part of the WebGL API and allows you to define the draw buffers to which all fragment colors are written.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_draw_buffers/drawBuffersWEBGL)
*/
drawBuffersWEBGL(buffers: Iterable<GLenum>): void; drawBuffersWEBGL(buffers: Iterable<GLenum>): void;
} }
interface WEBGL_multi_draw { interface WEBGL_multi_draw {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysInstancedWEBGL) */ /**
multiDrawArraysInstancedWEBGL(mode: GLenum, firstsList: Int32Array | Iterable<GLint>, firstsOffset: number, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, instanceCountsList: Int32Array | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void; * The **`WEBGL_multi_draw.multiDrawArraysInstancedWEBGL()`** method of the WebGL API renders multiple primitives from array data.
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysWEBGL) */ *
multiDrawArraysWEBGL(mode: GLenum, firstsList: Int32Array | Iterable<GLint>, firstsOffset: number, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, drawcount: GLsizei): void; * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysInstancedWEBGL)
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsInstancedWEBGL) */ */
multiDrawElementsInstancedWEBGL(mode: GLenum, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array | Iterable<GLsizei>, offsetsOffset: number, instanceCountsList: Int32Array | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void; multiDrawArraysInstancedWEBGL(mode: GLenum, firstsList: Int32Array<ArrayBufferLike> | Iterable<GLint>, firstsOffset: number, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, instanceCountsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsWEBGL) */ /**
multiDrawElementsWEBGL(mode: GLenum, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array | Iterable<GLsizei>, offsetsOffset: number, drawcount: GLsizei): void; * The **`WEBGL_multi_draw.multiDrawArraysWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysWEBGL)
*/
multiDrawArraysWEBGL(mode: GLenum, firstsList: Int32Array<ArrayBufferLike> | Iterable<GLint>, firstsOffset: number, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, drawcount: GLsizei): void;
/**
* The **`WEBGL_multi_draw.multiDrawElementsInstancedWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsInstancedWEBGL)
*/
multiDrawElementsInstancedWEBGL(mode: GLenum, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, offsetsOffset: number, instanceCountsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void;
/**
* The **`WEBGL_multi_draw.multiDrawElementsWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsWEBGL)
*/
multiDrawElementsWEBGL(mode: GLenum, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, offsetsOffset: number, drawcount: GLsizei): void;
} }
interface WebGL2RenderingContextBase { interface WebGL2RenderingContextBase {
@ -398,7 +476,7 @@ interface WebGL2RenderingContextBase {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getActiveUniforms) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getActiveUniforms) */
getActiveUniforms(program: WebGLProgram, uniformIndices: Iterable<GLuint>, pname: GLenum): any; getActiveUniforms(program: WebGLProgram, uniformIndices: Iterable<GLuint>, pname: GLenum): any;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getUniformIndices) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getUniformIndices) */
getUniformIndices(program: WebGLProgram, uniformNames: Iterable<string>): Iterable<GLuint> | null; getUniformIndices(program: WebGLProgram, uniformNames: Iterable<string>): GLuint[] | null;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateFramebuffer) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateFramebuffer) */
invalidateFramebuffer(target: GLenum, attachments: Iterable<GLenum>): void; invalidateFramebuffer(target: GLenum, attachments: Iterable<GLenum>): void;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateSubFramebuffer) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateSubFramebuffer) */

View file

@ -269,7 +269,7 @@ interface String {
} }
interface ArrayBuffer { interface ArrayBuffer {
readonly [Symbol.toStringTag]: string; readonly [Symbol.toStringTag]: "ArrayBuffer";
} }
interface DataView<TArrayBuffer extends ArrayBufferLike> { interface DataView<TArrayBuffer extends ArrayBufferLike> {

View file

@ -29,13 +29,13 @@ interface SharedArrayBuffer {
* Returns a section of an SharedArrayBuffer. * Returns a section of an SharedArrayBuffer.
*/ */
slice(begin?: number, end?: number): SharedArrayBuffer; slice(begin?: number, end?: number): SharedArrayBuffer;
readonly [Symbol.species]: SharedArrayBuffer;
readonly [Symbol.toStringTag]: "SharedArrayBuffer"; readonly [Symbol.toStringTag]: "SharedArrayBuffer";
} }
interface SharedArrayBufferConstructor { interface SharedArrayBufferConstructor {
readonly prototype: SharedArrayBuffer; readonly prototype: SharedArrayBuffer;
new (byteLength?: number): SharedArrayBuffer; new (byteLength?: number): SharedArrayBuffer;
readonly [Symbol.species]: SharedArrayBufferConstructor;
} }
declare var SharedArrayBuffer: SharedArrayBufferConstructor; declare var SharedArrayBuffer: SharedArrayBufferConstructor;

View file

@ -29,15 +29,27 @@ declare namespace Intl {
granularity?: "grapheme" | "word" | "sentence" | undefined; granularity?: "grapheme" | "word" | "sentence" | undefined;
} }
/**
* The `Intl.Segmenter` object enables locale-sensitive text segmentation, enabling you to get meaningful items (graphemes, words or sentences) from a string.
*
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter)
*/
interface Segmenter { interface Segmenter {
/** /**
* Returns `Segments` object containing the segments of the input string, using the segmenter's locale and granularity. * Returns `Segments` object containing the segments of the input string, using the segmenter's locale and granularity.
* *
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter/segment)
*
* @param input - The text to be segmented as a `string`. * @param input - The text to be segmented as a `string`.
* *
* @returns A new iterable Segments object containing the segments of the input string, using the segmenter's locale and granularity. * @returns A new iterable Segments object containing the segments of the input string, using the segmenter's locale and granularity.
*/ */
segment(input: string): Segments; segment(input: string): Segments;
/**
* The `resolvedOptions()` method of `Intl.Segmenter` instances returns a new object with properties reflecting the options computed during initialization of this `Segmenter` object.
*
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter/resolvedOptions)
*/
resolvedOptions(): ResolvedSegmenterOptions; resolvedOptions(): ResolvedSegmenterOptions;
} }
@ -50,13 +62,20 @@ declare namespace Intl {
[Symbol.iterator](): SegmentIterator<T>; [Symbol.iterator](): SegmentIterator<T>;
} }
/**
* A `Segments` object is an iterable collection of the segments of a text string. It is returned by a call to the `segment()` method of an `Intl.Segmenter` object.
*
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter/segment/Segments)
*/
interface Segments { interface Segments {
/** /**
* Returns an object describing the segment in the original string that includes the code unit at a specified index. * Returns an object describing the segment in the original string that includes the code unit at a specified index.
* *
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter/segment/Segments/containing)
*
* @param codeUnitIndex - A number specifying the index of the code unit in the original input string. If the value is omitted, it defaults to `0`. * @param codeUnitIndex - A number specifying the index of the code unit in the original input string. If the value is omitted, it defaults to `0`.
*/ */
containing(codeUnitIndex?: number): SegmentData; containing(codeUnitIndex?: number): SegmentData | undefined;
/** Returns an iterator to iterate over the segments. */ /** Returns an iterator to iterate over the segments. */
[Symbol.iterator](): SegmentIterator<SegmentData>; [Symbol.iterator](): SegmentIterator<SegmentData>;
@ -76,6 +95,11 @@ declare namespace Intl {
isWordLike?: boolean; isWordLike?: boolean;
} }
/**
* The `Intl.Segmenter` object enables locale-sensitive text segmentation, enabling you to get meaningful items (graphemes, words or sentences) from a string.
*
* [MDN](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Intl/Segmenter)
*/
const Segmenter: { const Segmenter: {
prototype: Segmenter; prototype: Segmenter;

View file

@ -1158,6 +1158,7 @@ interface JSON {
* @param text A valid JSON string. * @param text A valid JSON string.
* @param reviver A function that transforms the results. This function is called for each member of the object. * @param reviver A function that transforms the results. This function is called for each member of the object.
* If a member contains nested objects, the nested objects are transformed before the parent object is. * If a member contains nested objects, the nested objects are transformed before the parent object is.
* @throws {SyntaxError} If `text` is not valid JSON.
*/ */
parse(text: string, reviver?: (this: any, key: string, value: any) => any): any; parse(text: string, reviver?: (this: any, key: string, value: any) => any): any;
/** /**
@ -1165,6 +1166,7 @@ interface JSON {
* @param value A JavaScript value, usually an object or array, to be converted. * @param value A JavaScript value, usually an object or array, to be converted.
* @param replacer A function that transforms the results. * @param replacer A function that transforms the results.
* @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read. * @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read.
* @throws {TypeError} If a circular reference or a BigInt value is found.
*/ */
stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string; stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
/** /**
@ -1172,6 +1174,7 @@ interface JSON {
* @param value A JavaScript value, usually an object or array, to be converted. * @param value A JavaScript value, usually an object or array, to be converted.
* @param replacer An array of strings and numbers that acts as an approved list for selecting the object properties that will be stringified. * @param replacer An array of strings and numbers that acts as an approved list for selecting the object properties that will be stringified.
* @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read. * @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read.
* @throws {TypeError} If a circular reference or a BigInt value is found.
*/ */
stringify(value: any, replacer?: (number | string)[] | null, space?: string | number): string; stringify(value: any, replacer?: (number | string)[] | null, space?: string | number): string;
} }
@ -1393,14 +1396,18 @@ interface Array<T> {
/** /**
* Removes elements from an array and, if necessary, inserts new elements in their place, returning the deleted elements. * Removes elements from an array and, if necessary, inserts new elements in their place, returning the deleted elements.
* @param start The zero-based location in the array from which to start removing elements. * @param start The zero-based location in the array from which to start removing elements.
* @param deleteCount The number of elements to remove. * @param deleteCount The number of elements to remove. Omitting this argument will remove all elements from the start
* paramater location to end of the array. If value of this argument is either a negative number, zero, undefined, or a type
* that cannot be converted to an integer, the function will evaluate the argument as zero and not remove any elements.
* @returns An array containing the elements that were deleted. * @returns An array containing the elements that were deleted.
*/ */
splice(start: number, deleteCount?: number): T[]; splice(start: number, deleteCount?: number): T[];
/** /**
* Removes elements from an array and, if necessary, inserts new elements in their place, returning the deleted elements. * Removes elements from an array and, if necessary, inserts new elements in their place, returning the deleted elements.
* @param start The zero-based location in the array from which to start removing elements. * @param start The zero-based location in the array from which to start removing elements.
* @param deleteCount The number of elements to remove. * @param deleteCount The number of elements to remove. If value of this argument is either a negative number, zero,
* undefined, or a type that cannot be converted to an integer, the function will evaluate the argument as zero and
* not remove any elements.
* @param items Elements to insert into the array in place of the deleted elements. * @param items Elements to insert into the array in place of the deleted elements.
* @returns An array containing the elements that were deleted. * @returns An array containing the elements that were deleted.
*/ */

View file

@ -25,3 +25,5 @@ and limitations under the License.
/// <reference lib="esnext.iterator" /> /// <reference lib="esnext.iterator" />
/// <reference lib="esnext.promise" /> /// <reference lib="esnext.promise" />
/// <reference lib="esnext.float16" /> /// <reference lib="esnext.float16" />
/// <reference lib="esnext.error" />
/// <reference lib="esnext.sharedmemory" />

24
cli/tsc/dts/lib.esnext.error.d.ts vendored Normal file
View file

@ -0,0 +1,24 @@
/*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */
/// <reference no-default-lib="true"/>
interface ErrorConstructor {
/**
* Indicates whether the argument provided is a built-in Error instance or not.
*/
isError(error: unknown): error is Error;
}

View file

@ -0,0 +1,25 @@
/*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */
/// <reference no-default-lib="true"/>
interface Atomics {
/**
* Performs a finite-time microwait by signaling to the operating system or
* CPU that the current executing code is in a spin-wait loop.
*/
pause(n?: number): void;
}

File diff suppressed because it is too large Load diff

View file

@ -42,7 +42,11 @@ interface CSSUnparsedValue {
} }
interface Cache { interface Cache {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Cache/addAll) */ /**
* The **`addAll()`** method of the Cache interface takes an array of URLs, retrieves them, and adds the resulting response objects to the given cache.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Cache/addAll)
*/
addAll(requests: Iterable<RequestInfo>): Promise<void>; addAll(requests: Iterable<RequestInfo>): Promise<void>;
} }
@ -56,6 +60,21 @@ interface CanvasPathDrawingStyles {
setLineDash(segments: Iterable<number>): void; setLineDash(segments: Iterable<number>): void;
} }
interface CookieStoreManager {
/**
* The **`subscribe()`** method of the CookieStoreManager interface subscribes a ServiceWorkerRegistration to cookie change events.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CookieStoreManager/subscribe)
*/
subscribe(subscriptions: Iterable<CookieStoreGetOptions>): Promise<void>;
/**
* The **`unsubscribe()`** method of the CookieStoreManager interface stops the ServiceWorkerRegistration from receiving previously subscribed events.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/CookieStoreManager/unsubscribe)
*/
unsubscribe(subscriptions: Iterable<CookieStoreGetOptions>): Promise<void>;
}
interface DOMStringList { interface DOMStringList {
[Symbol.iterator](): ArrayIterator<string>; [Symbol.iterator](): ArrayIterator<string>;
} }
@ -97,7 +116,7 @@ interface Headers {
interface IDBDatabase { interface IDBDatabase {
/** /**
* Returns a new transaction with the given mode ("readonly" or "readwrite") and scope which can be a single object store name or an array of names. * The **`transaction`** method of the IDBDatabase interface immediately returns a transaction object (IDBTransaction) containing the IDBTransaction.objectStore method, which you can use to access your object store.
* *
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBDatabase/transaction) * [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBDatabase/transaction)
*/ */
@ -106,9 +125,7 @@ interface IDBDatabase {
interface IDBObjectStore { interface IDBObjectStore {
/** /**
* Creates a new index in store with the given name, keyPath and options and returns a new IDBIndex. If the keyPath and options define constraints that cannot be satisfied with the data already in store the upgrade transaction will abort with a "ConstraintError" DOMException. * The **`createIndex()`** method of the field/column defining a new data point for each database record to contain.
*
* Throws an "InvalidStateError" DOMException if not called within an upgrade transaction.
* *
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBObjectStore/createIndex) * [MDN Reference](https://developer.mozilla.org/docs/Web/API/IDBObjectStore/createIndex)
*/ */
@ -136,17 +153,33 @@ interface StylePropertyMapReadOnly {
} }
interface SubtleCrypto { interface SubtleCrypto {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey) */ /**
* The **`deriveKey()`** method of the SubtleCrypto interface can be used to derive a secret key from a master key.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/deriveKey)
*/
deriveKey(algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, derivedKeyType: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; deriveKey(algorithm: AlgorithmIdentifier | EcdhKeyDeriveParams | HkdfParams | Pbkdf2Params, baseKey: CryptoKey, derivedKeyType: AlgorithmIdentifier | AesDerivedKeyParams | HmacImportParams | HkdfParams | Pbkdf2Params, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey) */ /**
* The **`generateKey()`** method of the SubtleCrypto interface is used to generate a new key (for symmetric algorithms) or key pair (for public-key algorithms).
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/generateKey)
*/
generateKey(algorithm: "Ed25519" | { name: "Ed25519" }, extractable: boolean, keyUsages: ReadonlyArray<"sign" | "verify">): Promise<CryptoKeyPair>; generateKey(algorithm: "Ed25519" | { name: "Ed25519" }, extractable: boolean, keyUsages: ReadonlyArray<"sign" | "verify">): Promise<CryptoKeyPair>;
generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKeyPair>; generateKey(algorithm: RsaHashedKeyGenParams | EcKeyGenParams, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKeyPair>;
generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>; generateKey(algorithm: AesKeyGenParams | HmacKeyGenParams | Pbkdf2Params, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>;
generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKeyPair | CryptoKey>; generateKey(algorithm: AlgorithmIdentifier, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKeyPair | CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey) */ /**
* The **`importKey()`** method of the SubtleCrypto interface imports a key: that is, it takes as input a key in an external, portable format and gives you a CryptoKey object that you can use in the Web Crypto API.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/importKey)
*/
importKey(format: "jwk", keyData: JsonWebKey, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>; importKey(format: "jwk", keyData: JsonWebKey, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: ReadonlyArray<KeyUsage>): Promise<CryptoKey>;
importKey(format: Exclude<KeyFormat, "jwk">, keyData: BufferSource, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; importKey(format: Exclude<KeyFormat, "jwk">, keyData: BufferSource, algorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey) */ /**
* The **`unwrapKey()`** method of the SubtleCrypto interface 'unwraps' a key.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/SubtleCrypto/unwrapKey)
*/
unwrapKey(format: KeyFormat, wrappedKey: BufferSource, unwrappingKey: CryptoKey, unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>; unwrapKey(format: KeyFormat, wrappedKey: BufferSource, unwrappingKey: CryptoKey, unwrapAlgorithm: AlgorithmIdentifier | RsaOaepParams | AesCtrParams | AesCbcParams | AesGcmParams, unwrappedKeyAlgorithm: AlgorithmIdentifier | RsaHashedImportParams | EcKeyImportParams | HmacImportParams | AesKeyAlgorithm, extractable: boolean, keyUsages: Iterable<KeyUsage>): Promise<CryptoKey>;
} }
@ -165,19 +198,39 @@ interface URLSearchParams {
} }
interface WEBGL_draw_buffers { interface WEBGL_draw_buffers {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_draw_buffers/drawBuffersWEBGL) */ /**
* The **`WEBGL_draw_buffers.drawBuffersWEBGL()`** method is part of the WebGL API and allows you to define the draw buffers to which all fragment colors are written.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_draw_buffers/drawBuffersWEBGL)
*/
drawBuffersWEBGL(buffers: Iterable<GLenum>): void; drawBuffersWEBGL(buffers: Iterable<GLenum>): void;
} }
interface WEBGL_multi_draw { interface WEBGL_multi_draw {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysInstancedWEBGL) */ /**
multiDrawArraysInstancedWEBGL(mode: GLenum, firstsList: Int32Array | Iterable<GLint>, firstsOffset: number, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, instanceCountsList: Int32Array | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void; * The **`WEBGL_multi_draw.multiDrawArraysInstancedWEBGL()`** method of the WebGL API renders multiple primitives from array data.
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysWEBGL) */ *
multiDrawArraysWEBGL(mode: GLenum, firstsList: Int32Array | Iterable<GLint>, firstsOffset: number, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, drawcount: GLsizei): void; * [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysInstancedWEBGL)
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsInstancedWEBGL) */ */
multiDrawElementsInstancedWEBGL(mode: GLenum, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array | Iterable<GLsizei>, offsetsOffset: number, instanceCountsList: Int32Array | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void; multiDrawArraysInstancedWEBGL(mode: GLenum, firstsList: Int32Array<ArrayBufferLike> | Iterable<GLint>, firstsOffset: number, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, instanceCountsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsWEBGL) */ /**
multiDrawElementsWEBGL(mode: GLenum, countsList: Int32Array | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array | Iterable<GLsizei>, offsetsOffset: number, drawcount: GLsizei): void; * The **`WEBGL_multi_draw.multiDrawArraysWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawArraysWEBGL)
*/
multiDrawArraysWEBGL(mode: GLenum, firstsList: Int32Array<ArrayBufferLike> | Iterable<GLint>, firstsOffset: number, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, drawcount: GLsizei): void;
/**
* The **`WEBGL_multi_draw.multiDrawElementsInstancedWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsInstancedWEBGL)
*/
multiDrawElementsInstancedWEBGL(mode: GLenum, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, offsetsOffset: number, instanceCountsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, instanceCountsOffset: number, drawcount: GLsizei): void;
/**
* The **`WEBGL_multi_draw.multiDrawElementsWEBGL()`** method of the WebGL API renders multiple primitives from array data.
*
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/WEBGL_multi_draw/multiDrawElementsWEBGL)
*/
multiDrawElementsWEBGL(mode: GLenum, countsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, countsOffset: number, type: GLenum, offsetsList: Int32Array<ArrayBufferLike> | Iterable<GLsizei>, offsetsOffset: number, drawcount: GLsizei): void;
} }
interface WebGL2RenderingContextBase { interface WebGL2RenderingContextBase {
@ -192,7 +245,7 @@ interface WebGL2RenderingContextBase {
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getActiveUniforms) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getActiveUniforms) */
getActiveUniforms(program: WebGLProgram, uniformIndices: Iterable<GLuint>, pname: GLenum): any; getActiveUniforms(program: WebGLProgram, uniformIndices: Iterable<GLuint>, pname: GLenum): any;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getUniformIndices) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/getUniformIndices) */
getUniformIndices(program: WebGLProgram, uniformNames: Iterable<string>): Iterable<GLuint> | null; getUniformIndices(program: WebGLProgram, uniformNames: Iterable<string>): GLuint[] | null;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateFramebuffer) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateFramebuffer) */
invalidateFramebuffer(target: GLenum, attachments: Iterable<GLenum>): void; invalidateFramebuffer(target: GLenum, attachments: Iterable<GLenum>): void;
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateSubFramebuffer) */ /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/WebGL2RenderingContext/invalidateSubFramebuffer) */

View file

@ -139,7 +139,7 @@ declare namespace ts {
readonly kind: ActionWatchTypingLocations; readonly kind: ActionWatchTypingLocations;
} }
} }
const versionMajorMinor = "5.8"; const versionMajorMinor = "5.9";
/** The version of the TypeScript compiler release */ /** The version of the TypeScript compiler release */
const version: string; const version: string;
/** /**
@ -335,203 +335,204 @@ declare namespace ts {
BigIntKeyword = 163, BigIntKeyword = 163,
OverrideKeyword = 164, OverrideKeyword = 164,
OfKeyword = 165, OfKeyword = 165,
QualifiedName = 166, DeferKeyword = 166,
ComputedPropertyName = 167, QualifiedName = 167,
TypeParameter = 168, ComputedPropertyName = 168,
Parameter = 169, TypeParameter = 169,
Decorator = 170, Parameter = 170,
PropertySignature = 171, Decorator = 171,
PropertyDeclaration = 172, PropertySignature = 172,
MethodSignature = 173, PropertyDeclaration = 173,
MethodDeclaration = 174, MethodSignature = 174,
ClassStaticBlockDeclaration = 175, MethodDeclaration = 175,
Constructor = 176, ClassStaticBlockDeclaration = 176,
GetAccessor = 177, Constructor = 177,
SetAccessor = 178, GetAccessor = 178,
CallSignature = 179, SetAccessor = 179,
ConstructSignature = 180, CallSignature = 180,
IndexSignature = 181, ConstructSignature = 181,
TypePredicate = 182, IndexSignature = 182,
TypeReference = 183, TypePredicate = 183,
FunctionType = 184, TypeReference = 184,
ConstructorType = 185, FunctionType = 185,
TypeQuery = 186, ConstructorType = 186,
TypeLiteral = 187, TypeQuery = 187,
ArrayType = 188, TypeLiteral = 188,
TupleType = 189, ArrayType = 189,
OptionalType = 190, TupleType = 190,
RestType = 191, OptionalType = 191,
UnionType = 192, RestType = 192,
IntersectionType = 193, UnionType = 193,
ConditionalType = 194, IntersectionType = 194,
InferType = 195, ConditionalType = 195,
ParenthesizedType = 196, InferType = 196,
ThisType = 197, ParenthesizedType = 197,
TypeOperator = 198, ThisType = 198,
IndexedAccessType = 199, TypeOperator = 199,
MappedType = 200, IndexedAccessType = 200,
LiteralType = 201, MappedType = 201,
NamedTupleMember = 202, LiteralType = 202,
TemplateLiteralType = 203, NamedTupleMember = 203,
TemplateLiteralTypeSpan = 204, TemplateLiteralType = 204,
ImportType = 205, TemplateLiteralTypeSpan = 205,
ObjectBindingPattern = 206, ImportType = 206,
ArrayBindingPattern = 207, ObjectBindingPattern = 207,
BindingElement = 208, ArrayBindingPattern = 208,
ArrayLiteralExpression = 209, BindingElement = 209,
ObjectLiteralExpression = 210, ArrayLiteralExpression = 210,
PropertyAccessExpression = 211, ObjectLiteralExpression = 211,
ElementAccessExpression = 212, PropertyAccessExpression = 212,
CallExpression = 213, ElementAccessExpression = 213,
NewExpression = 214, CallExpression = 214,
TaggedTemplateExpression = 215, NewExpression = 215,
TypeAssertionExpression = 216, TaggedTemplateExpression = 216,
ParenthesizedExpression = 217, TypeAssertionExpression = 217,
FunctionExpression = 218, ParenthesizedExpression = 218,
ArrowFunction = 219, FunctionExpression = 219,
DeleteExpression = 220, ArrowFunction = 220,
TypeOfExpression = 221, DeleteExpression = 221,
VoidExpression = 222, TypeOfExpression = 222,
AwaitExpression = 223, VoidExpression = 223,
PrefixUnaryExpression = 224, AwaitExpression = 224,
PostfixUnaryExpression = 225, PrefixUnaryExpression = 225,
BinaryExpression = 226, PostfixUnaryExpression = 226,
ConditionalExpression = 227, BinaryExpression = 227,
TemplateExpression = 228, ConditionalExpression = 228,
YieldExpression = 229, TemplateExpression = 229,
SpreadElement = 230, YieldExpression = 230,
ClassExpression = 231, SpreadElement = 231,
OmittedExpression = 232, ClassExpression = 232,
ExpressionWithTypeArguments = 233, OmittedExpression = 233,
AsExpression = 234, ExpressionWithTypeArguments = 234,
NonNullExpression = 235, AsExpression = 235,
MetaProperty = 236, NonNullExpression = 236,
SyntheticExpression = 237, MetaProperty = 237,
SatisfiesExpression = 238, SyntheticExpression = 238,
TemplateSpan = 239, SatisfiesExpression = 239,
SemicolonClassElement = 240, TemplateSpan = 240,
Block = 241, SemicolonClassElement = 241,
EmptyStatement = 242, Block = 242,
VariableStatement = 243, EmptyStatement = 243,
ExpressionStatement = 244, VariableStatement = 244,
IfStatement = 245, ExpressionStatement = 245,
DoStatement = 246, IfStatement = 246,
WhileStatement = 247, DoStatement = 247,
ForStatement = 248, WhileStatement = 248,
ForInStatement = 249, ForStatement = 249,
ForOfStatement = 250, ForInStatement = 250,
ContinueStatement = 251, ForOfStatement = 251,
BreakStatement = 252, ContinueStatement = 252,
ReturnStatement = 253, BreakStatement = 253,
WithStatement = 254, ReturnStatement = 254,
SwitchStatement = 255, WithStatement = 255,
LabeledStatement = 256, SwitchStatement = 256,
ThrowStatement = 257, LabeledStatement = 257,
TryStatement = 258, ThrowStatement = 258,
DebuggerStatement = 259, TryStatement = 259,
VariableDeclaration = 260, DebuggerStatement = 260,
VariableDeclarationList = 261, VariableDeclaration = 261,
FunctionDeclaration = 262, VariableDeclarationList = 262,
ClassDeclaration = 263, FunctionDeclaration = 263,
InterfaceDeclaration = 264, ClassDeclaration = 264,
TypeAliasDeclaration = 265, InterfaceDeclaration = 265,
EnumDeclaration = 266, TypeAliasDeclaration = 266,
ModuleDeclaration = 267, EnumDeclaration = 267,
ModuleBlock = 268, ModuleDeclaration = 268,
CaseBlock = 269, ModuleBlock = 269,
NamespaceExportDeclaration = 270, CaseBlock = 270,
ImportEqualsDeclaration = 271, NamespaceExportDeclaration = 271,
ImportDeclaration = 272, ImportEqualsDeclaration = 272,
ImportClause = 273, ImportDeclaration = 273,
NamespaceImport = 274, ImportClause = 274,
NamedImports = 275, NamespaceImport = 275,
ImportSpecifier = 276, NamedImports = 276,
ExportAssignment = 277, ImportSpecifier = 277,
ExportDeclaration = 278, ExportAssignment = 278,
NamedExports = 279, ExportDeclaration = 279,
NamespaceExport = 280, NamedExports = 280,
ExportSpecifier = 281, NamespaceExport = 281,
MissingDeclaration = 282, ExportSpecifier = 282,
ExternalModuleReference = 283, MissingDeclaration = 283,
JsxElement = 284, ExternalModuleReference = 284,
JsxSelfClosingElement = 285, JsxElement = 285,
JsxOpeningElement = 286, JsxSelfClosingElement = 286,
JsxClosingElement = 287, JsxOpeningElement = 287,
JsxFragment = 288, JsxClosingElement = 288,
JsxOpeningFragment = 289, JsxFragment = 289,
JsxClosingFragment = 290, JsxOpeningFragment = 290,
JsxAttribute = 291, JsxClosingFragment = 291,
JsxAttributes = 292, JsxAttribute = 292,
JsxSpreadAttribute = 293, JsxAttributes = 293,
JsxExpression = 294, JsxSpreadAttribute = 294,
JsxNamespacedName = 295, JsxExpression = 295,
CaseClause = 296, JsxNamespacedName = 296,
DefaultClause = 297, CaseClause = 297,
HeritageClause = 298, DefaultClause = 298,
CatchClause = 299, HeritageClause = 299,
ImportAttributes = 300, CatchClause = 300,
ImportAttribute = 301, ImportAttributes = 301,
/** @deprecated */ AssertClause = 300, ImportAttribute = 302,
/** @deprecated */ AssertEntry = 301, /** @deprecated */ AssertClause = 301,
/** @deprecated */ ImportTypeAssertionContainer = 302, /** @deprecated */ AssertEntry = 302,
PropertyAssignment = 303, /** @deprecated */ ImportTypeAssertionContainer = 303,
ShorthandPropertyAssignment = 304, PropertyAssignment = 304,
SpreadAssignment = 305, ShorthandPropertyAssignment = 305,
EnumMember = 306, SpreadAssignment = 306,
SourceFile = 307, EnumMember = 307,
Bundle = 308, SourceFile = 308,
JSDocTypeExpression = 309, Bundle = 309,
JSDocNameReference = 310, JSDocTypeExpression = 310,
JSDocMemberName = 311, JSDocNameReference = 311,
JSDocAllType = 312, JSDocMemberName = 312,
JSDocUnknownType = 313, JSDocAllType = 313,
JSDocNullableType = 314, JSDocUnknownType = 314,
JSDocNonNullableType = 315, JSDocNullableType = 315,
JSDocOptionalType = 316, JSDocNonNullableType = 316,
JSDocFunctionType = 317, JSDocOptionalType = 317,
JSDocVariadicType = 318, JSDocFunctionType = 318,
JSDocNamepathType = 319, JSDocVariadicType = 319,
JSDoc = 320, JSDocNamepathType = 320,
JSDoc = 321,
/** @deprecated Use SyntaxKind.JSDoc */ /** @deprecated Use SyntaxKind.JSDoc */
JSDocComment = 320, JSDocComment = 321,
JSDocText = 321, JSDocText = 322,
JSDocTypeLiteral = 322, JSDocTypeLiteral = 323,
JSDocSignature = 323, JSDocSignature = 324,
JSDocLink = 324, JSDocLink = 325,
JSDocLinkCode = 325, JSDocLinkCode = 326,
JSDocLinkPlain = 326, JSDocLinkPlain = 327,
JSDocTag = 327, JSDocTag = 328,
JSDocAugmentsTag = 328, JSDocAugmentsTag = 329,
JSDocImplementsTag = 329, JSDocImplementsTag = 330,
JSDocAuthorTag = 330, JSDocAuthorTag = 331,
JSDocDeprecatedTag = 331, JSDocDeprecatedTag = 332,
JSDocClassTag = 332, JSDocClassTag = 333,
JSDocPublicTag = 333, JSDocPublicTag = 334,
JSDocPrivateTag = 334, JSDocPrivateTag = 335,
JSDocProtectedTag = 335, JSDocProtectedTag = 336,
JSDocReadonlyTag = 336, JSDocReadonlyTag = 337,
JSDocOverrideTag = 337, JSDocOverrideTag = 338,
JSDocCallbackTag = 338, JSDocCallbackTag = 339,
JSDocOverloadTag = 339, JSDocOverloadTag = 340,
JSDocEnumTag = 340, JSDocEnumTag = 341,
JSDocParameterTag = 341, JSDocParameterTag = 342,
JSDocReturnTag = 342, JSDocReturnTag = 343,
JSDocThisTag = 343, JSDocThisTag = 344,
JSDocTypeTag = 344, JSDocTypeTag = 345,
JSDocTemplateTag = 345, JSDocTemplateTag = 346,
JSDocTypedefTag = 346, JSDocTypedefTag = 347,
JSDocSeeTag = 347, JSDocSeeTag = 348,
JSDocPropertyTag = 348, JSDocPropertyTag = 349,
JSDocThrowsTag = 349, JSDocThrowsTag = 350,
JSDocSatisfiesTag = 350, JSDocSatisfiesTag = 351,
JSDocImportTag = 351, JSDocImportTag = 352,
SyntaxList = 352, SyntaxList = 353,
NotEmittedStatement = 353, NotEmittedStatement = 354,
NotEmittedTypeElement = 354, NotEmittedTypeElement = 355,
PartiallyEmittedExpression = 355, PartiallyEmittedExpression = 356,
CommaListExpression = 356, CommaListExpression = 357,
SyntheticReferenceExpression = 357, SyntheticReferenceExpression = 358,
Count = 358, Count = 359,
FirstAssignment = 64, FirstAssignment = 64,
LastAssignment = 79, LastAssignment = 79,
FirstCompoundAssignment = 65, FirstCompoundAssignment = 65,
@ -539,15 +540,15 @@ declare namespace ts {
FirstReservedWord = 83, FirstReservedWord = 83,
LastReservedWord = 118, LastReservedWord = 118,
FirstKeyword = 83, FirstKeyword = 83,
LastKeyword = 165, LastKeyword = 166,
FirstFutureReservedWord = 119, FirstFutureReservedWord = 119,
LastFutureReservedWord = 127, LastFutureReservedWord = 127,
FirstTypeNode = 182, FirstTypeNode = 183,
LastTypeNode = 205, LastTypeNode = 206,
FirstPunctuation = 19, FirstPunctuation = 19,
LastPunctuation = 79, LastPunctuation = 79,
FirstToken = 0, FirstToken = 0,
LastToken = 165, LastToken = 166,
FirstTriviaToken = 2, FirstTriviaToken = 2,
LastTriviaToken = 7, LastTriviaToken = 7,
FirstLiteralToken = 9, FirstLiteralToken = 9,
@ -556,13 +557,13 @@ declare namespace ts {
LastTemplateToken = 18, LastTemplateToken = 18,
FirstBinaryOperator = 30, FirstBinaryOperator = 30,
LastBinaryOperator = 79, LastBinaryOperator = 79,
FirstStatement = 243, FirstStatement = 244,
LastStatement = 259, LastStatement = 260,
FirstNode = 166, FirstNode = 167,
FirstJSDocNode = 309, FirstJSDocNode = 310,
LastJSDocNode = 351, LastJSDocNode = 352,
FirstJSDocTagNode = 327, FirstJSDocTagNode = 328,
LastJSDocTagNode = 351, LastJSDocTagNode = 352,
} }
type TriviaSyntaxKind = SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia | SyntaxKind.NewLineTrivia | SyntaxKind.WhitespaceTrivia | SyntaxKind.ShebangTrivia | SyntaxKind.ConflictMarkerTrivia; type TriviaSyntaxKind = SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia | SyntaxKind.NewLineTrivia | SyntaxKind.WhitespaceTrivia | SyntaxKind.ShebangTrivia | SyntaxKind.ConflictMarkerTrivia;
type LiteralSyntaxKind = SyntaxKind.NumericLiteral | SyntaxKind.BigIntLiteral | SyntaxKind.StringLiteral | SyntaxKind.JsxText | SyntaxKind.JsxTextAllWhiteSpaces | SyntaxKind.RegularExpressionLiteral | SyntaxKind.NoSubstitutionTemplateLiteral; type LiteralSyntaxKind = SyntaxKind.NumericLiteral | SyntaxKind.BigIntLiteral | SyntaxKind.StringLiteral | SyntaxKind.JsxText | SyntaxKind.JsxTextAllWhiteSpaces | SyntaxKind.RegularExpressionLiteral | SyntaxKind.NoSubstitutionTemplateLiteral;
@ -650,6 +651,7 @@ declare namespace ts {
| SyntaxKind.DebuggerKeyword | SyntaxKind.DebuggerKeyword
| SyntaxKind.DeclareKeyword | SyntaxKind.DeclareKeyword
| SyntaxKind.DefaultKeyword | SyntaxKind.DefaultKeyword
| SyntaxKind.DeferKeyword
| SyntaxKind.DeleteKeyword | SyntaxKind.DeleteKeyword
| SyntaxKind.DoKeyword | SyntaxKind.DoKeyword
| SyntaxKind.ElseKeyword | SyntaxKind.ElseKeyword
@ -1622,7 +1624,7 @@ declare namespace ts {
readonly expression: SuperExpression; readonly expression: SuperExpression;
} }
interface ImportCall extends CallExpression { interface ImportCall extends CallExpression {
readonly expression: ImportExpression; readonly expression: ImportExpression | ImportDeferProperty;
} }
interface ExpressionWithTypeArguments extends MemberExpression, NodeWithTypeArguments { interface ExpressionWithTypeArguments extends MemberExpression, NodeWithTypeArguments {
readonly kind: SyntaxKind.ExpressionWithTypeArguments; readonly kind: SyntaxKind.ExpressionWithTypeArguments;
@ -1672,6 +1674,12 @@ declare namespace ts {
readonly keywordToken: SyntaxKind.NewKeyword | SyntaxKind.ImportKeyword; readonly keywordToken: SyntaxKind.NewKeyword | SyntaxKind.ImportKeyword;
readonly name: Identifier; readonly name: Identifier;
} }
interface ImportDeferProperty extends MetaProperty {
readonly keywordToken: SyntaxKind.ImportKeyword;
readonly name: Identifier & {
readonly escapedText: __String & "defer";
};
}
interface JsxElement extends PrimaryExpression { interface JsxElement extends PrimaryExpression {
readonly kind: SyntaxKind.JsxElement; readonly kind: SyntaxKind.JsxElement;
readonly openingElement: JsxOpeningElement; readonly openingElement: JsxOpeningElement;
@ -2011,10 +2019,13 @@ declare namespace ts {
interface ImportClause extends NamedDeclaration { interface ImportClause extends NamedDeclaration {
readonly kind: SyntaxKind.ImportClause; readonly kind: SyntaxKind.ImportClause;
readonly parent: ImportDeclaration | JSDocImportTag; readonly parent: ImportDeclaration | JSDocImportTag;
/** @deprecated Use `phaseModifier` instead */
readonly isTypeOnly: boolean; readonly isTypeOnly: boolean;
readonly phaseModifier: undefined | ImportPhaseModifierSyntaxKind;
readonly name?: Identifier; readonly name?: Identifier;
readonly namedBindings?: NamedImportBindings; readonly namedBindings?: NamedImportBindings;
} }
type ImportPhaseModifierSyntaxKind = SyntaxKind.TypeKeyword | SyntaxKind.DeferKeyword;
/** @deprecated */ /** @deprecated */
type AssertionKey = ImportAttributeName; type AssertionKey = ImportAttributeName;
/** @deprecated */ /** @deprecated */
@ -2389,6 +2400,7 @@ declare namespace ts {
*/ */
interface SourceFileLike { interface SourceFileLike {
readonly text: string; readonly text: string;
languageVariant?: LanguageVariant;
} }
interface SourceFileLike { interface SourceFileLike {
getLineAndCharacterOfPosition(pos: number): LineAndCharacter; getLineAndCharacterOfPosition(pos: number): LineAndCharacter;
@ -2808,6 +2820,10 @@ declare namespace ts {
* is `never`. Instead, use `type.flags & TypeFlags.Never`. * is `never`. Instead, use `type.flags & TypeFlags.Never`.
*/ */
getNeverType(): Type; getNeverType(): Type;
/**
* Gets the intrinsic `object` type.
*/
getNonPrimitiveType(): Type;
/** /**
* Returns true if the "source" type is assignable to the "target" type. * Returns true if the "source" type is assignable to the "target" type.
* *
@ -3195,11 +3211,11 @@ declare namespace ts {
JSLiteral = 4096, JSLiteral = 4096,
FreshLiteral = 8192, FreshLiteral = 8192,
ArrayLiteral = 16384, ArrayLiteral = 16384,
SingleSignatureType = 134217728,
ClassOrInterface = 3, ClassOrInterface = 3,
ContainsSpread = 2097152, ContainsSpread = 2097152,
ObjectRestType = 4194304, ObjectRestType = 4194304,
InstantiationExpressionType = 8388608, InstantiationExpressionType = 8388608,
SingleSignatureType = 134217728,
} }
interface ObjectType extends Type { interface ObjectType extends Type {
objectFlags: ObjectFlags; objectFlags: ObjectFlags;
@ -3638,6 +3654,7 @@ declare namespace ts {
ESNext = 99, ESNext = 99,
Node16 = 100, Node16 = 100,
Node18 = 101, Node18 = 101,
Node20 = 102,
NodeNext = 199, NodeNext = 199,
Preserve = 200, Preserve = 200,
} }
@ -4218,8 +4235,10 @@ declare namespace ts {
updateImportEqualsDeclaration(node: ImportEqualsDeclaration, modifiers: readonly ModifierLike[] | undefined, isTypeOnly: boolean, name: Identifier, moduleReference: ModuleReference): ImportEqualsDeclaration; updateImportEqualsDeclaration(node: ImportEqualsDeclaration, modifiers: readonly ModifierLike[] | undefined, isTypeOnly: boolean, name: Identifier, moduleReference: ModuleReference): ImportEqualsDeclaration;
createImportDeclaration(modifiers: readonly ModifierLike[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression, attributes?: ImportAttributes): ImportDeclaration; createImportDeclaration(modifiers: readonly ModifierLike[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression, attributes?: ImportAttributes): ImportDeclaration;
updateImportDeclaration(node: ImportDeclaration, modifiers: readonly ModifierLike[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression, attributes: ImportAttributes | undefined): ImportDeclaration; updateImportDeclaration(node: ImportDeclaration, modifiers: readonly ModifierLike[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression, attributes: ImportAttributes | undefined): ImportDeclaration;
createImportClause(isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause; createImportClause(phaseModifier: ImportPhaseModifierSyntaxKind | undefined, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause;
updateImportClause(node: ImportClause, isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause; /** @deprecated */ createImportClause(isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause;
updateImportClause(node: ImportClause, phaseModifier: ImportPhaseModifierSyntaxKind | undefined, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause;
/** @deprecated */ updateImportClause(node: ImportClause, isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause;
/** @deprecated */ createAssertClause(elements: NodeArray<AssertEntry>, multiLine?: boolean): AssertClause; /** @deprecated */ createAssertClause(elements: NodeArray<AssertEntry>, multiLine?: boolean): AssertClause;
/** @deprecated */ updateAssertClause(node: AssertClause, elements: NodeArray<AssertEntry>, multiLine?: boolean): AssertClause; /** @deprecated */ updateAssertClause(node: AssertClause, elements: NodeArray<AssertEntry>, multiLine?: boolean): AssertClause;
/** @deprecated */ createAssertEntry(name: AssertionKey, value: Expression): AssertEntry; /** @deprecated */ createAssertEntry(name: AssertionKey, value: Expression): AssertEntry;
@ -4896,6 +4915,12 @@ declare namespace ts {
readonly displayPartsForJSDoc?: boolean; readonly displayPartsForJSDoc?: boolean;
readonly generateReturnInDocTemplate?: boolean; readonly generateReturnInDocTemplate?: boolean;
readonly disableLineTextInReferences?: boolean; readonly disableLineTextInReferences?: boolean;
/**
* A positive integer indicating the maximum length of a hover text before it is truncated.
*
* Default: `500`
*/
readonly maximumHoverLength?: number;
} }
type OrganizeImportsTypeOrder = "last" | "inline" | "first"; type OrganizeImportsTypeOrder = "last" | "inline" | "first";
/** Represents a bigint literal value without requiring bigint support */ /** Represents a bigint literal value without requiring bigint support */
@ -6673,8 +6698,9 @@ declare namespace ts {
* *
* @param fileName The path to the file * @param fileName The path to the file
* @param position A zero-based index of the character where you want the quick info * @param position A zero-based index of the character where you want the quick info
* @param maximumLength Maximum length of a quickinfo text before it is truncated.
*/ */
getQuickInfoAtPosition(fileName: string, position: number): QuickInfo | undefined; getQuickInfoAtPosition(fileName: string, position: number, maximumLength?: number): QuickInfo | undefined;
getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined; getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined;
getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined; getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined;
getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined; getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined;
@ -7262,6 +7288,7 @@ declare namespace ts {
displayParts?: SymbolDisplayPart[]; displayParts?: SymbolDisplayPart[];
documentation?: SymbolDisplayPart[]; documentation?: SymbolDisplayPart[];
tags?: JSDocTagInfo[]; tags?: JSDocTagInfo[];
canIncreaseVerbosityLevel?: boolean;
} }
type RenameInfo = RenameInfoSuccess | RenameInfoFailure; type RenameInfo = RenameInfoSuccess | RenameInfoFailure;
interface RenameInfoSuccess { interface RenameInfoSuccess {

View file

@ -738,23 +738,7 @@ fn op_load_inner(
} else { } else {
&specifier &specifier
}; };
let maybe_module = match graph.try_get(specifier) { let maybe_module = graph.try_get(specifier).ok().flatten();
Ok(maybe_module) => maybe_module,
Err(err) => match err.as_kind() {
deno_graph::ModuleErrorKind::UnsupportedMediaType {
media_type,
..
} => {
return Ok(Some(LoadResponse {
data: FastString::from_static(""),
version: Some("1".to_string()),
script_kind: as_ts_script_kind(*media_type),
is_cjs: false,
}));
}
_ => None,
},
};
let maybe_source = if let Some(module) = maybe_module { let maybe_source = if let Some(module) = maybe_module {
match module { match module {
Module::Js(module) => { Module::Js(module) => {
@ -1296,7 +1280,6 @@ fn op_respond_inner(state: &mut OpState, args: RespondArgs) {
state.maybe_response = Some(args); state.maybe_response = Some(args);
} }
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Error, deno_error::JsError)] #[derive(Debug, Error, deno_error::JsError)]
pub enum ExecError { pub enum ExecError {
#[class(generic)] #[class(generic)]
@ -1304,7 +1287,7 @@ pub enum ExecError {
ResponseNotSet, ResponseNotSet,
#[class(inherit)] #[class(inherit)]
#[error(transparent)] #[error(transparent)]
Js(deno_core::error::JsError), Js(Box<deno_core::error::JsError>),
} }
#[derive(Clone)] #[derive(Clone)]

View file

@ -18,7 +18,6 @@ use deno_resolver::deno_json::CompilerOptionsData;
use deno_resolver::deno_json::CompilerOptionsParseError; use deno_resolver::deno_json::CompilerOptionsParseError;
use deno_resolver::deno_json::CompilerOptionsResolver; use deno_resolver::deno_json::CompilerOptionsResolver;
use deno_resolver::deno_json::ToMaybeJsxImportSourceConfigError; use deno_resolver::deno_json::ToMaybeJsxImportSourceConfigError;
use deno_resolver::factory::WorkspaceDirectoryProvider;
use deno_resolver::graph::maybe_additional_sloppy_imports_message; use deno_resolver::graph::maybe_additional_sloppy_imports_message;
use deno_semver::npm::NpmPackageNvReference; use deno_semver::npm::NpmPackageNvReference;
use deno_terminal::colors; use deno_terminal::colors;
@ -106,7 +105,6 @@ pub struct TypeChecker {
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: CliNpmResolver, npm_resolver: CliNpmResolver,
sys: CliSys, sys: CliSys,
workspace_directory_provider: Arc<WorkspaceDirectoryProvider>,
compiler_options_resolver: Arc<CompilerOptionsResolver>, compiler_options_resolver: Arc<CompilerOptionsResolver>,
code_cache: Option<Arc<crate::cache::CodeCache>>, code_cache: Option<Arc<crate::cache::CodeCache>>,
} }
@ -121,7 +119,6 @@ impl TypeChecker {
node_resolver: Arc<CliNodeResolver>, node_resolver: Arc<CliNodeResolver>,
npm_resolver: CliNpmResolver, npm_resolver: CliNpmResolver,
sys: CliSys, sys: CliSys,
workspace_directory_provider: Arc<WorkspaceDirectoryProvider>,
compiler_options_resolver: Arc<CompilerOptionsResolver>, compiler_options_resolver: Arc<CompilerOptionsResolver>,
code_cache: Option<Arc<crate::cache::CodeCache>>, code_cache: Option<Arc<crate::cache::CodeCache>>,
) -> Self { ) -> Self {
@ -133,7 +130,6 @@ impl TypeChecker {
node_resolver, node_resolver,
npm_resolver, npm_resolver,
sys, sys,
workspace_directory_provider,
compiler_options_resolver, compiler_options_resolver,
code_cache, code_cache,
} }
@ -280,7 +276,7 @@ impl TypeChecker {
.clone(); .clone();
let group_key = (compiler_options, imports.clone()); let group_key = (compiler_options, imports.clone());
let group = groups_by_key.entry(group_key).or_insert_with(|| { let group = groups_by_key.entry(group_key).or_insert_with(|| {
let dir = self.workspace_directory_provider.for_specifier(root); let dir = self.cli_options.workspace().resolve_member_dir(root);
CheckGroup { CheckGroup {
roots: Default::default(), roots: Default::default(),
compiler_options, compiler_options,
@ -288,8 +284,9 @@ impl TypeChecker {
// this is slightly hacky. It's used as the referrer for resolving // this is slightly hacky. It's used as the referrer for resolving
// npm imports in the key // npm imports in the key
referrer: self referrer: self
.workspace_directory_provider .cli_options
.for_specifier(root) .workspace()
.resolve_member_dir(root)
.maybe_deno_json() .maybe_deno_json()
.map(|d| d.specifier.clone()) .map(|d| d.specifier.clone())
.unwrap_or_else(|| dir.dir_url().as_ref().clone()), .unwrap_or_else(|| dir.dir_url().as_ref().clone()),

View file

@ -11,9 +11,11 @@ use deno_config::glob::PathOrPattern;
use deno_config::glob::PathOrPatternSet; use deno_config::glob::PathOrPatternSet;
use deno_config::glob::WalkEntry; use deno_config::glob::WalkEntry;
use deno_core::ModuleSpecifier; use deno_core::ModuleSpecifier;
use deno_core::anyhow::Context;
use deno_core::anyhow::anyhow; use deno_core::anyhow::anyhow;
use deno_core::error::AnyError; use deno_core::error::AnyError;
use super::progress_bar::UpdateGuard;
use crate::sys::CliSys; use crate::sys::CliSys;
/// Creates a std::fs::File handling if the parent does not exist. /// Creates a std::fs::File handling if the parent does not exist.
@ -132,7 +134,7 @@ pub fn collect_specifiers(
.ignore_git_folder() .ignore_git_folder()
.ignore_node_modules() .ignore_node_modules()
.set_vendor_folder(vendor_folder) .set_vendor_folder(vendor_folder)
.collect_file_patterns(&CliSys::default(), file_patterns); .collect_file_patterns(&CliSys::default(), &file_patterns);
let mut collected_files_as_urls = collected_files let mut collected_files_as_urls = collected_files
.iter() .iter()
.map(|f| specifier_from_file_path(f).unwrap()) .map(|f| specifier_from_file_path(f).unwrap())
@ -175,6 +177,76 @@ pub fn specifier_from_file_path(
.map_err(|_| anyhow!("Invalid file path '{}'", path.display())) .map_err(|_| anyhow!("Invalid file path '{}'", path.display()))
} }
#[derive(Default)]
pub struct FsCleaner {
pub files_removed: u64,
pub dirs_removed: u64,
pub bytes_removed: u64,
pub progress_guard: Option<UpdateGuard>,
}
impl FsCleaner {
pub fn new(progress_guard: Option<UpdateGuard>) -> Self {
Self {
files_removed: 0,
dirs_removed: 0,
bytes_removed: 0,
progress_guard,
}
}
pub fn rm_rf(&mut self, path: &Path) -> Result<(), AnyError> {
for entry in walkdir::WalkDir::new(path).contents_first(true) {
let entry = entry?;
if entry.file_type().is_dir() {
self.dirs_removed += 1;
self.update_progress();
std::fs::remove_dir_all(entry.path())?;
} else {
self.remove_file(entry.path(), entry.metadata().ok())?;
}
}
Ok(())
}
pub fn remove_file(
&mut self,
path: &Path,
meta: Option<std::fs::Metadata>,
) -> Result<(), AnyError> {
if let Some(meta) = meta {
self.bytes_removed += meta.len();
}
self.files_removed += 1;
self.update_progress();
match std::fs::remove_file(path)
.with_context(|| format!("Failed to remove file: {}", path.display()))
{
Err(e) => {
if cfg!(windows)
&& let Ok(meta) = path.symlink_metadata()
&& meta.is_symlink()
{
std::fs::remove_dir(path).with_context(|| {
format!("Failed to remove symlink: {}", path.display())
})?;
return Ok(());
}
Err(e)
}
_ => Ok(()),
}
}
fn update_progress(&self) {
if let Some(pg) = &self.progress_guard {
pg.set_position(self.files_removed + self.dirs_removed);
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;

View file

@ -15,4 +15,5 @@ pub mod sync;
pub mod text_encoding; pub mod text_encoding;
pub mod unix; pub mod unix;
pub mod v8; pub mod v8;
pub mod watch_env_tracker;
pub mod windows; pub mod windows;

View file

@ -0,0 +1,258 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::ffi::OsString;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::OnceLock;
use deno_terminal::colors;
#[derive(Debug, Clone)]
struct WatchEnvTrackerInner {
// Track all loaded variables and their values
loaded_variables: HashSet<OsString>,
// Track variables that are no longer present in any loaded file
unused_variables: HashSet<OsString>,
// Track original env vars that existed before we started
original_env: HashMap<OsString, OsString>,
}
impl WatchEnvTrackerInner {
fn new() -> Self {
// Capture the original environment state
let original_env: HashMap<OsString, OsString> = env::vars_os().collect();
Self {
loaded_variables: HashSet::new(),
unused_variables: HashSet::new(),
original_env,
}
}
}
#[derive(Debug, Clone)]
pub struct WatchEnvTracker {
inner: Arc<Mutex<WatchEnvTrackerInner>>,
}
// Global singleton instance
static WATCH_ENV_TRACKER: OnceLock<WatchEnvTracker> = OnceLock::new();
impl WatchEnvTracker {
/// Get the global singleton instance
pub fn snapshot() -> &'static WatchEnvTracker {
WATCH_ENV_TRACKER.get_or_init(|| WatchEnvTracker {
inner: Arc::new(Mutex::new(WatchEnvTrackerInner::new())),
})
}
// Consolidated error handling function
fn handle_dotenvy_error(
error: dotenvy::Error,
file_path: &Path,
log_level: Option<log::Level>,
) {
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Info).unwrap_or(true) {
match error {
dotenvy::Error::LineParse(line, index) => eprintln!(
"{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",
colors::yellow("Warning"),
file_path.display(),
index,
line
),
dotenvy::Error::Io(_) => eprintln!(
"{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",
colors::yellow("Warning"),
file_path.display()
),
dotenvy::Error::EnvVar(_) => eprintln!(
"{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",
colors::yellow("Warning"),
file_path.display()
),
_ => eprintln!(
"{} Unknown failure occurred with the specified environment file: {}",
colors::yellow("Warning"),
file_path.display()
),
}
}
}
// Internal method that accepts an already-acquired lock to avoid deadlocks
fn load_env_file_inner(
&self,
file_path: PathBuf,
log_level: Option<log::Level>,
inner: &mut WatchEnvTrackerInner,
) {
// Check if file exists
if !file_path.exists() {
// Only show warning if logging is enabled
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Info).unwrap_or(true) {
eprintln!(
"{} The environment file specified '{}' was not found.",
colors::yellow("Warning"),
file_path.display()
);
}
return;
}
match dotenvy::from_path_iter(&file_path) {
Ok(iter) => {
for item in iter {
match item {
Ok((key, value)) => {
// Convert to OsString for consistency
let key_os = OsString::from(key);
let value_os = OsString::from(value);
// Check if this variable is already loaded from a previous file
if inner.loaded_variables.contains(&key_os) {
// Variable already exists from a previous file, skip it
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Debug).unwrap_or(false) {
eprintln!(
"{} Variable '{}' already loaded from '{}', skipping value from '{}'",
colors::yellow("Debug"),
key_os.to_string_lossy(),
inner
.loaded_variables
.get(&key_os)
.map(|k| k.to_string_lossy().to_string())
.unwrap_or_else(|| "unknown".to_string()),
file_path.display()
);
}
continue;
}
// Set the environment variable
// SAFETY: We're setting environment variables with valid UTF-8 strings
// from the .env file. Both key and value are guaranteed to be valid strings.
unsafe {
env::set_var(&key_os, &value_os);
}
// Track this variable
inner.loaded_variables.insert(key_os.clone());
inner.unused_variables.remove(&key_os);
}
Err(e) => {
Self::handle_dotenvy_error(e, &file_path, log_level);
}
}
}
}
Err(e) =>
{
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Info).unwrap_or(true) {
eprintln!(
"{} Failed to read {}: {}",
colors::yellow("Warning"),
file_path.display(),
e
);
}
}
}
}
/// Clean up variables that are no longer present in any loaded file
fn _cleanup_removed_variables(
&self,
inner: &mut WatchEnvTrackerInner,
log_level: Option<log::Level>,
) {
for var_name in inner.unused_variables.iter() {
if !inner.original_env.contains_key(var_name) {
// SAFETY: We're removing an environment variable that we previously set
unsafe {
env::remove_var(var_name);
}
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Debug).unwrap_or(false) {
eprintln!(
"{} Variable '{}' removed from environment as it's no longer present in any loaded file",
colors::yellow("Debug"),
var_name.to_string_lossy()
);
}
} else {
let original_value = inner.original_env.get(var_name).unwrap();
// SAFETY: We're setting an environment variable to a value we control
unsafe {
env::set_var(var_name, original_value);
}
#[allow(clippy::print_stderr)]
if log_level.map(|l| l >= log::Level::Debug).unwrap_or(false) {
eprintln!(
"{} Variable '{}' restored to original value as it's no longer present in any loaded file",
colors::yellow("Debug"),
var_name.to_string_lossy()
);
}
}
}
}
// Load multiple env files in reverse order (later files take precedence over earlier ones)
pub fn load_env_variables_from_env_files(
&self,
file_paths: Option<&Vec<PathBuf>>,
log_level: Option<log::Level>,
) {
let Some(env_file_names) = file_paths else {
return;
};
let mut inner = self.inner.lock().unwrap();
inner.unused_variables = std::mem::take(&mut inner.loaded_variables);
inner.loaded_variables = HashSet::new();
for env_file_name in env_file_names.iter().rev() {
self.load_env_file_inner(
env_file_name.to_path_buf(),
log_level,
&mut inner,
);
}
self._cleanup_removed_variables(&mut inner, log_level);
}
}
pub fn load_env_variables_from_env_files(
filename: Option<&Vec<PathBuf>>,
flags_log_level: Option<log::Level>,
) {
let Some(env_file_names) = filename else {
return;
};
for env_file_name in env_file_names.iter().rev() {
match dotenvy::from_filename(env_file_name) {
Ok(_) => (),
Err(error) => {
WatchEnvTracker::handle_dotenvy_error(
error,
env_file_name,
flags_log_level,
);
}
}
}
}

View file

@ -33,6 +33,7 @@ use crate::tools::coverage::CoverageCollector;
use crate::tools::run::hmr::HmrRunner; use crate::tools::run::hmr::HmrRunner;
use crate::util::file_watcher::WatcherCommunicator; use crate::util::file_watcher::WatcherCommunicator;
use crate::util::file_watcher::WatcherRestartMode; use crate::util::file_watcher::WatcherRestartMode;
use crate::util::progress_bar::ProgressBar;
pub type CreateHmrRunnerCb = pub type CreateHmrRunnerCb =
Box<dyn Fn(deno_core::LocalInspectorSession) -> HmrRunner + Send + Sync>; Box<dyn Fn(deno_core::LocalInspectorSession) -> HmrRunner + Send + Sync>;
@ -276,12 +277,11 @@ impl CliMainWorker {
Ok(Some(coverage_collector)) Ok(Some(coverage_collector))
} }
#[allow(clippy::result_large_err)]
pub fn execute_script_static( pub fn execute_script_static(
&mut self, &mut self,
name: &'static str, name: &'static str,
source_code: &'static str, source_code: &'static str,
) -> Result<v8::Global<v8::Value>, JsError> { ) -> Result<v8::Global<v8::Value>, Box<JsError>> {
self.worker.js_runtime().execute_script(name, source_code) self.worker.js_runtime().execute_script(name, source_code)
} }
} }
@ -318,6 +318,7 @@ pub struct CliMainWorkerFactory {
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_installer: Option<Arc<CliNpmInstaller>>, npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver, npm_resolver: CliNpmResolver,
progress_bar: ProgressBar,
root_permissions: PermissionsContainer, root_permissions: PermissionsContainer,
shared: Arc<SharedState>, shared: Arc<SharedState>,
sys: CliSys, sys: CliSys,
@ -333,6 +334,7 @@ impl CliMainWorkerFactory {
maybe_lockfile: Option<Arc<CliLockfile>>, maybe_lockfile: Option<Arc<CliLockfile>>,
npm_installer: Option<Arc<CliNpmInstaller>>, npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver, npm_resolver: CliNpmResolver,
progress_bar: ProgressBar,
sys: CliSys, sys: CliSys,
options: CliMainWorkerOptions, options: CliMainWorkerOptions,
root_permissions: PermissionsContainer, root_permissions: PermissionsContainer,
@ -342,6 +344,7 @@ impl CliMainWorkerFactory {
maybe_lockfile, maybe_lockfile,
npm_installer, npm_installer,
npm_resolver, npm_resolver,
progress_bar,
root_permissions, root_permissions,
sys, sys,
shared: Arc::new(SharedState { shared: Arc::new(SharedState {
@ -408,6 +411,7 @@ impl CliMainWorkerFactory {
{ {
Ok(package_ref) => { Ok(package_ref) => {
if let Some(npm_installer) = &self.npm_installer { if let Some(npm_installer) = &self.npm_installer {
let _clear_guard = self.progress_bar.deferred_keep_initialize_alive();
let reqs = &[package_ref.req().clone()]; let reqs = &[package_ref.req().clone()];
npm_installer npm_installer
.add_package_reqs( .add_package_reqs(
@ -543,6 +547,7 @@ mod tests {
compiled_wasm_module_store: Default::default(), compiled_wasm_module_store: Default::default(),
v8_code_cache: Default::default(), v8_code_cache: Default::default(),
fs, fs,
bundle_provider: None,
}, },
options, options,
) )

17
ext/bundle/Cargo.toml Normal file
View file

@ -0,0 +1,17 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_bundle_runtime"
version = "0.1.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
repository.workspace = true
[dependencies]
async-trait.workspace = true
deno_core.workspace = true
deno_error.workspace = true
deno_web.workspace = true
regex.workspace = true
serde.workspace = true

45
ext/bundle/bundle.ts Normal file
View file

@ -0,0 +1,45 @@
// Copyright 2018-2025 the Deno authors. MIT license.
/// <reference path="../../cli/tsc/dts/lib.deno.unstable.d.ts" />
import { op_bundle } from "ext:core/ops";
import { primordials } from "ext:core/mod.js";
import { TextDecoder } from "ext:deno_web/08_text_encoding.js";
const { SafeArrayIterator, Uint8Array, ObjectPrototypeIsPrototypeOf } =
primordials;
const decoder = new TextDecoder();
export async function bundle(
options: Deno.bundle.Options,
): Promise<Deno.bundle.Result> {
const result = {
success: false,
...await op_bundle(
options,
),
};
result.success = result.errors.length === 0;
for (
const f of new SafeArrayIterator(
// deno-lint-ignore no-explicit-any
result.outputFiles as any ?? [],
)
) {
// deno-lint-ignore no-explicit-any
const file = f as any;
if (file.contents?.length === 0) {
delete file.contents;
file.text = () => "";
} else {
if (!ObjectPrototypeIsPrototypeOf(Uint8Array, file.contents)) {
file.contents = new Uint8Array(file.contents);
}
file.text = () => decoder.decode(file.contents ?? "");
}
}
if (result.outputFiles?.length === 0) {
delete result.outputFiles;
}
return result;
}

241
ext/bundle/src/lib.rs Normal file
View file

@ -0,0 +1,241 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::OpState;
use deno_core::error::AnyError;
use deno_core::op2;
use deno_error::JsErrorBox;
deno_core::extension!(
deno_bundle_runtime,
deps = [
deno_web
],
ops = [
op_bundle,
],
esm = [
"bundle.ts"
],
options = {
bundle_provider: Option<Arc<dyn BundleProvider>>,
},
state = |state, options| {
if let Some(bundle_provider) = options.bundle_provider {
state.put(bundle_provider);
} else {
state.put::<Arc<dyn BundleProvider>>(Arc::new(()));
}
},
);
#[async_trait]
impl BundleProvider for () {
async fn bundle(
&self,
_options: BundleOptions,
) -> Result<BuildResponse, AnyError> {
Err(deno_core::anyhow::anyhow!(
"default BundleProvider does not do anything"
))
}
}
#[async_trait]
pub trait BundleProvider: Send + Sync {
async fn bundle(
&self,
options: BundleOptions,
) -> Result<BuildResponse, AnyError>;
}
#[derive(Clone, Debug, Eq, PartialEq, Default, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BundleOptions {
pub entrypoints: Vec<String>,
#[serde(default)]
pub output_path: Option<String>,
#[serde(default)]
pub output_dir: Option<String>,
#[serde(default)]
pub external: Vec<String>,
#[serde(default)]
pub format: BundleFormat,
#[serde(default)]
pub minify: bool,
#[serde(default)]
pub code_splitting: bool,
#[serde(default = "tru")]
pub inline_imports: bool,
#[serde(default)]
pub packages: PackageHandling,
#[serde(default)]
pub sourcemap: Option<SourceMapType>,
#[serde(default)]
pub platform: BundlePlatform,
#[serde(default = "tru")]
pub write: bool,
}
fn tru() -> bool {
true
}
#[derive(Clone, Debug, Eq, PartialEq, Copy, Default, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum BundlePlatform {
Browser,
#[default]
Deno,
}
#[derive(Clone, Debug, Eq, PartialEq, Copy, Default, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum BundleFormat {
#[default]
Esm,
Cjs,
Iife,
}
#[derive(Clone, Debug, Eq, PartialEq, Copy, Default, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SourceMapType {
#[default]
Linked,
Inline,
External,
}
impl std::fmt::Display for BundleFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BundleFormat::Esm => write!(f, "esm"),
BundleFormat::Cjs => write!(f, "cjs"),
BundleFormat::Iife => write!(f, "iife"),
}
}
}
impl std::fmt::Display for SourceMapType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SourceMapType::Linked => write!(f, "linked"),
SourceMapType::Inline => write!(f, "inline"),
SourceMapType::External => write!(f, "external"),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Copy, Default, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum PackageHandling {
#[default]
Bundle,
External,
}
impl std::fmt::Display for PackageHandling {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
PackageHandling::Bundle => write!(f, "bundle"),
PackageHandling::External => write!(f, "external"),
}
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Message {
pub text: String,
pub location: Option<Location>,
pub notes: Vec<Note>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PartialMessage {
pub id: Option<String>,
pub plugin_name: Option<String>,
pub text: Option<String>,
pub location: Option<Location>,
pub notes: Option<Vec<Note>>,
pub detail: Option<u32>,
}
#[derive(Debug, Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BuildOutputFile {
pub path: String,
pub contents: Option<Vec<u8>>,
pub hash: String,
}
#[derive(Debug, Clone, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct BuildResponse {
pub errors: Vec<Message>,
pub warnings: Vec<Message>,
pub output_files: Option<Vec<BuildOutputFile>>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Note {
pub text: String,
pub location: Option<Location>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Location {
pub file: String,
pub namespace: Option<String>,
pub line: u32,
pub column: u32,
pub length: Option<u32>,
pub suggestion: Option<String>,
}
fn deserialize_regex<'de, D>(deserializer: D) -> Result<regex::Regex, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let s = String::deserialize(deserializer)?;
regex::Regex::new(&s).map_err(serde::de::Error::custom)
}
#[derive(Debug, Clone, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OnResolveOptions {
#[serde(deserialize_with = "deserialize_regex")]
pub filter: regex::Regex,
pub namespace: Option<String>,
}
#[derive(Debug, Clone, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OnLoadOptions {
#[serde(deserialize_with = "deserialize_regex")]
pub filter: regex::Regex,
pub namespace: Option<String>,
}
#[op2(async)]
#[serde]
pub async fn op_bundle(
state: Rc<RefCell<OpState>>,
#[serde] options: BundleOptions,
) -> Result<BuildResponse, JsErrorBox> {
// eprintln!("op_bundle: {:?}", options);
let provider = {
let state = state.borrow();
state.borrow::<Arc<dyn BundleProvider>>().clone()
};
provider
.bundle(options)
.await
.map_err(|e| JsErrorBox::generic(e.to_string()))
}

View file

@ -543,4 +543,4 @@ webidl.converters["BodyInit_DOMString?"] = webidl.createNullableConverter(
webidl.converters["BodyInit_DOMString"], webidl.converters["BodyInit_DOMString"],
); );
export { extractBody, InnerBody, mixinBody }; export { extractBody, InnerBody, mixinBody, packageData };

View file

@ -18,6 +18,7 @@ const {
ObjectPrototypeIsPrototypeOf, ObjectPrototypeIsPrototypeOf,
RegExpPrototypeExec, RegExpPrototypeExec,
StringPrototypeStartsWith, StringPrototypeStartsWith,
StringPrototypeToUpperCase,
Symbol, Symbol,
SymbolFor, SymbolFor,
TypeError, TypeError,
@ -25,10 +26,7 @@ const {
import * as webidl from "ext:deno_webidl/00_webidl.js"; import * as webidl from "ext:deno_webidl/00_webidl.js";
import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { createFilteredInspectProxy } from "ext:deno_console/01_console.js";
import { import { HTTP_TOKEN_CODE_POINT_RE } from "ext:deno_web/00_infra.js";
byteUpperCase,
HTTP_TOKEN_CODE_POINT_RE,
} from "ext:deno_web/00_infra.js";
import { URL } from "ext:deno_url/00_url.js"; import { URL } from "ext:deno_url/00_url.js";
import { extractBody, mixinBody } from "ext:deno_fetch/22_body.js"; import { extractBody, mixinBody } from "ext:deno_fetch/22_body.js";
import { getLocationHref } from "ext:deno_web/12_location.js"; import { getLocationHref } from "ext:deno_web/12_location.js";
@ -213,6 +211,7 @@ function cloneInnerRequest(request, skipBody = false) {
// method => normalized method // method => normalized method
const KNOWN_METHODS = { const KNOWN_METHODS = {
__proto__: null,
"DELETE": "DELETE", "DELETE": "DELETE",
"delete": "DELETE", "delete": "DELETE",
"GET": "GET", "GET": "GET",
@ -222,7 +221,6 @@ const KNOWN_METHODS = {
"OPTIONS": "OPTIONS", "OPTIONS": "OPTIONS",
"options": "OPTIONS", "options": "OPTIONS",
"PATCH": "PATCH", "PATCH": "PATCH",
"patch": "PATCH",
"POST": "POST", "POST": "POST",
"post": "POST", "post": "POST",
"PUT": "PUT", "PUT": "PUT",
@ -237,13 +235,21 @@ function validateAndNormalizeMethod(m) {
if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) { if (RegExpPrototypeExec(HTTP_TOKEN_CODE_POINT_RE, m) === null) {
throw new TypeError("Method is not valid"); throw new TypeError("Method is not valid");
} }
const upperCase = byteUpperCase(m); const upperCase = StringPrototypeToUpperCase(m);
if ( switch (upperCase) {
upperCase === "CONNECT" || upperCase === "TRACE" || upperCase === "TRACK" case "DELETE":
) { case "GET":
throw new TypeError("Method is forbidden"); case "HEAD":
case "OPTIONS":
case "POST":
case "PUT":
return upperCase;
case "CONNECT":
case "TRACE":
case "TRACK":
throw new TypeError("Method is forbidden");
} }
return upperCase; return m;
} }
class Request { class Request {

View file

@ -6,8 +6,6 @@ import { primordials } from "ext:core/mod.js";
import { op_utf8_to_byte_string } from "ext:core/ops"; import { op_utf8_to_byte_string } from "ext:core/ops";
const { const {
ArrayPrototypeFind, ArrayPrototypeFind,
ArrayPrototypeSlice,
ArrayPrototypeSplice,
Number, Number,
NumberIsFinite, NumberIsFinite,
NumberIsNaN, NumberIsNaN,
@ -200,16 +198,7 @@ class EventSource extends EventTarget {
); );
if (this.#headers) { if (this.#headers) {
const headerList = headerListFromHeaders(initialHeaders); fillHeaders(initialHeaders, this.#headers);
const headers = this.#headers ?? ArrayPrototypeSlice(
headerList,
0,
headerList.length,
);
if (headerList.length !== 0) {
ArrayPrototypeSplice(headerList, 0, headerList.length);
}
fillHeaders(initialHeaders, headers);
} }
const req = newInnerRequest( const req = newInnerRequest(

View file

@ -1055,14 +1055,17 @@ pub fn create_http_client(
user_agent: &str, user_agent: &str,
options: CreateHttpClientOptions, options: CreateHttpClientOptions,
) -> Result<Client, HttpClientCreateError> { ) -> Result<Client, HttpClientCreateError> {
let mut tls_config = deno_tls::create_client_config( let mut tls_config =
options.root_cert_store, deno_tls::create_client_config(deno_tls::TlsClientConfigOptions {
options.ca_certs, root_cert_store: options.root_cert_store,
options.unsafely_ignore_certificate_errors, ca_certs: options.ca_certs,
options.client_cert_chain_and_key.into(), unsafely_ignore_certificate_errors: options
deno_tls::SocketUse::Http, .unsafely_ignore_certificate_errors,
) unsafely_disable_hostname_verification: false,
.map_err(HttpClientCreateError::Tls)?; cert_chain_and_key: options.client_cert_chain_and_key.into(),
socket_use: deno_tls::SocketUse::Http,
})
.map_err(HttpClientCreateError::Tls)?;
// Proxy TLS should not send ALPN // Proxy TLS should not send ALPN
tls_config.alpn_protocols.clear(); tls_config.alpn_protocols.clear();

View file

@ -211,6 +211,26 @@ impl std::fmt::Debug for Intercept {
impl Target { impl Target {
pub(crate) fn parse(val: &str) -> Option<Self> { pub(crate) fn parse(val: &str) -> Option<Self> {
// unix:<path> is valid RFC3986 but not as an http::Uri
#[cfg(not(windows))]
if let Some(encoded_path) = val.strip_prefix("unix:") {
use std::os::unix::ffi::OsStringExt;
let decoded = std::ffi::OsString::from_vec(
percent_decode_str(encoded_path).collect::<Vec<u8>>(),
);
return Some(Target::Unix {
path: decoded.into(),
});
}
// vsock:<cid>:<port> is valid RFC3986 but not as an http::Uri
#[cfg(any(target_os = "linux", target_os = "macos"))]
if let Some(cid_port) = val.strip_prefix("vsock:") {
let (left, right) = cid_port.split_once(":")?;
let cid = left.parse::<u32>().ok()?;
let port = right.parse::<u32>().ok()?;
return Some(Target::Vsock { cid, port });
}
let uri = val.parse::<Uri>().ok()?; let uri = val.parse::<Uri>().ok()?;
let mut builder = Uri::builder(); let mut builder = Uri::builder();
@ -939,6 +959,25 @@ fn test_proxy_parse_from_env() {
} }
_ => panic!("bad target"), _ => panic!("bad target"),
} }
// unix
#[cfg(not(windows))]
match parse("unix:foo%20bar/baz") {
Target::Unix { path } => {
assert_eq!(path.to_str(), Some("foo bar/baz"));
}
_ => panic!("bad target"),
}
// vsock
#[cfg(any(target_os = "linux", target_os = "macos"))]
match parse("vsock:1234:5678") {
Target::Vsock { cid, port } => {
assert_eq!(cid, 1234);
assert_eq!(port, 5678);
}
_ => panic!("bad target"),
}
} }
#[test] #[test]

View file

@ -330,12 +330,8 @@ pub trait FileSystem: std::fmt::Debug + MaybeSend + MaybeSync {
.unwrap_or(false) .unwrap_or(false)
} }
fn exists_sync(&self, path: &CheckedPath) -> bool { fn exists_sync(&self, path: &CheckedPath) -> bool;
self.stat_sync(path).is_ok() async fn exists_async(&self, path: CheckedPathBuf) -> FsResult<bool>;
}
async fn exists_async(&self, path: CheckedPathBuf) -> FsResult<bool> {
Ok(self.stat_async(path).await.is_ok())
}
fn read_text_file_lossy_sync( fn read_text_file_lossy_sync(
&self, &self,

View file

@ -52,10 +52,6 @@ pub trait FsPermissions {
) -> Result<CheckedPath<'a>, PermissionCheckError>; ) -> Result<CheckedPath<'a>, PermissionCheckError>;
fn check_write_all(&self, api_name: &str) fn check_write_all(&self, api_name: &str)
-> Result<(), PermissionCheckError>; -> Result<(), PermissionCheckError>;
fn allows_all(&self) -> bool {
false
}
} }
impl FsPermissions for deno_permissions::PermissionsContainer { impl FsPermissions for deno_permissions::PermissionsContainer {
@ -109,10 +105,6 @@ impl FsPermissions for deno_permissions::PermissionsContainer {
) -> Result<(), PermissionCheckError> { ) -> Result<(), PermissionCheckError> {
deno_permissions::PermissionsContainer::check_write_all(self, api_name) deno_permissions::PermissionsContainer::check_write_all(self, api_name)
} }
fn allows_all(&self) -> bool {
self.allows_all()
}
} }
pub const UNSTABLE_FEATURE_NAME: &str = "fs"; pub const UNSTABLE_FEATURE_NAME: &str = "fs";

View file

@ -1021,20 +1021,7 @@ fn exists(path: &Path) -> bool {
#[cfg(windows)] #[cfg(windows)]
{ {
use std::os::windows::ffi::OsStrExt; fs::exists(path).unwrap_or(false)
use winapi::um::fileapi::GetFileAttributesW;
use winapi::um::fileapi::INVALID_FILE_ATTRIBUTES;
let path = path
.as_os_str()
.encode_wide()
.chain(std::iter::once(0))
.collect::<Vec<_>>();
// Safety: `path` is a null-terminated string
let attrs = unsafe { GetFileAttributesW(path.as_ptr()) };
attrs != INVALID_FILE_ATTRIBUTES
} }
} }

View file

@ -936,6 +936,7 @@ function serveInner(options, handler) {
port: options.port ?? 8000, port: options.port ?? 8000,
reusePort: options.reusePort ?? false, reusePort: options.reusePort ?? false,
loadBalanced: options[kLoadBalanced] ?? false, loadBalanced: options[kLoadBalanced] ?? false,
backlog: options.backlog,
}; };
if (options.certFile || options.keyFile) { if (options.certFile || options.keyFile) {

View file

@ -2938,7 +2938,7 @@ fn napi_create_external_arraybuffer<'s>(
env: &'s mut Env, env: &'s mut Env,
data: *mut c_void, data: *mut c_void,
byte_length: usize, byte_length: usize,
finalize_cb: napi_finalize, finalize_cb: Option<napi_finalize>,
finalize_hint: *mut c_void, finalize_hint: *mut c_void,
result: *mut napi_value<'s>, result: *mut napi_value<'s>,
) -> napi_status { ) -> napi_status {

View file

@ -319,7 +319,7 @@ fn napi_create_external_buffer<'s>(
env: &'s mut Env, env: &'s mut Env,
length: usize, length: usize,
data: *mut c_void, data: *mut c_void,
finalize_cb: napi_finalize, finalize_cb: Option<napi_finalize>,
finalize_hint: *mut c_void, finalize_hint: *mut c_void,
result: *mut napi_value<'s>, result: *mut napi_value<'s>,
) -> napi_status { ) -> napi_status {

View file

@ -25,15 +25,17 @@ pub fn get_array_buffer_ptr(ab: v8::Local<v8::ArrayBuffer>) -> *mut c_void {
struct BufferFinalizer { struct BufferFinalizer {
env: *mut Env, env: *mut Env,
finalize_cb: napi_finalize, finalize_cb: Option<napi_finalize>,
finalize_data: *mut c_void, finalize_data: *mut c_void,
finalize_hint: *mut c_void, finalize_hint: *mut c_void,
} }
impl Drop for BufferFinalizer { impl Drop for BufferFinalizer {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { if let Some(finalize_cb) = self.finalize_cb {
(self.finalize_cb)(self.env as _, self.finalize_data, self.finalize_hint); unsafe {
finalize_cb(self.env as _, self.finalize_data, self.finalize_hint);
}
} }
} }
} }
@ -56,7 +58,7 @@ pub(crate) fn make_external_backing_store(
data: *mut c_void, data: *mut c_void,
byte_length: usize, byte_length: usize,
finalize_data: *mut c_void, finalize_data: *mut c_void,
finalize_cb: napi_finalize, finalize_cb: Option<napi_finalize>,
finalize_hint: *mut c_void, finalize_hint: *mut c_void,
) -> v8::UniqueRef<v8::BackingStore> { ) -> v8::UniqueRef<v8::BackingStore> {
let finalizer = Box::new(BufferFinalizer { let finalizer = Box::new(BufferFinalizer {

View file

@ -601,6 +601,7 @@ function listen(args) {
}, },
args.reusePort, args.reusePort,
args.loadBalanced ?? false, args.loadBalanced ?? false,
args.tcpBacklog ?? 511,
); );
addr.transport = "tcp"; addr.transport = "tcp";
return new Listener(rid, addr, "tcp"); return new Listener(rid, addr, "tcp");

View file

@ -58,6 +58,7 @@ async function connectTls({
keyFormat = undefined, keyFormat = undefined,
cert = undefined, cert = undefined,
key = undefined, key = undefined,
unsafelyDisableHostnameVerification = false,
}) { }) {
if (transport !== "tcp") { if (transport !== "tcp") {
throw new TypeError(`Unsupported transport: '${transport}'`); throw new TypeError(`Unsupported transport: '${transport}'`);
@ -73,7 +74,7 @@ async function connectTls({
const serverName = arguments[0][serverNameSymbol] ?? null; const serverName = arguments[0][serverNameSymbol] ?? null;
const { 0: rid, 1: localAddr, 2: remoteAddr } = await op_net_connect_tls( const { 0: rid, 1: localAddr, 2: remoteAddr } = await op_net_connect_tls(
{ hostname, port }, { hostname, port },
{ caCerts, alpnProtocols, serverName }, { caCerts, alpnProtocols, serverName, unsafelyDisableHostnameVerification },
keyPair, keyPair,
); );
localAddr.transport = "tcp"; localAddr.transport = "tcp";
@ -161,6 +162,7 @@ function listenTls({
transport = "tcp", transport = "tcp",
alpnProtocols = undefined, alpnProtocols = undefined,
reusePort = false, reusePort = false,
tcpBacklog = 511,
}) { }) {
if (transport !== "tcp") { if (transport !== "tcp") {
throw new TypeError(`Unsupported transport: '${transport}'`); throw new TypeError(`Unsupported transport: '${transport}'`);
@ -175,7 +177,7 @@ function listenTls({
const keyPair = loadTlsKeyPair("Deno.listenTls", arguments[0]); const keyPair = loadTlsKeyPair("Deno.listenTls", arguments[0]);
const { 0: rid, 1: localAddr } = op_net_listen_tls( const { 0: rid, 1: localAddr } = op_net_listen_tls(
{ hostname, port }, { hostname, port },
{ alpnProtocols, reusePort }, { alpnProtocols, reusePort, tcpBacklog },
keyPair, keyPair,
); );
return new TlsListener(rid, localAddr); return new TlsListener(rid, localAddr);
@ -188,12 +190,14 @@ async function startTls(
hostname = "127.0.0.1", hostname = "127.0.0.1",
caCerts = [], caCerts = [],
alpnProtocols = undefined, alpnProtocols = undefined,
unsafelyDisableHostnameVerification = false,
} = { __proto__: null }, } = { __proto__: null },
) { ) {
return startTlsInternal(conn, { return startTlsInternal(conn, {
hostname, hostname,
caCerts, caCerts,
alpnProtocols, alpnProtocols,
unsafelyDisableHostnameVerification,
}); });
} }
@ -205,6 +209,7 @@ function startTlsInternal(
alpnProtocols = undefined, alpnProtocols = undefined,
keyPair = null, keyPair = null,
rejectUnauthorized, rejectUnauthorized,
unsafelyDisableHostnameVerification,
}, },
) { ) {
const { 0: rid, 1: localAddr, 2: remoteAddr } = op_tls_start({ const { 0: rid, 1: localAddr, 2: remoteAddr } = op_tls_start({
@ -213,6 +218,7 @@ function startTlsInternal(
caCerts, caCerts,
alpnProtocols, alpnProtocols,
rejectUnauthorized, rejectUnauthorized,
unsafelyDisableHostnameVerification,
}, keyPair); }, keyPair);
return new TlsConn(rid, remoteAddr, localAddr); return new TlsConn(rid, remoteAddr, localAddr);
} }

View file

@ -574,6 +574,7 @@ pub fn op_net_listen_tcp<NP>(
#[serde] addr: IpAddr, #[serde] addr: IpAddr,
reuse_port: bool, reuse_port: bool,
load_balanced: bool, load_balanced: bool,
tcp_backlog: i32,
) -> Result<(ResourceId, IpAddr), NetError> ) -> Result<(ResourceId, IpAddr), NetError>
where where
NP: NetPermissions + 'static, NP: NetPermissions + 'static,
@ -589,9 +590,9 @@ where
.ok_or_else(|| NetError::NoResolvedAddress)?; .ok_or_else(|| NetError::NoResolvedAddress)?;
let listener = if load_balanced { let listener = if load_balanced {
TcpListener::bind_load_balanced(addr) TcpListener::bind_load_balanced(addr, tcp_backlog)
} else { } else {
TcpListener::bind_direct(addr, reuse_port) TcpListener::bind_direct(addr, reuse_port, tcp_backlog)
}?; }?;
let local_addr = listener.local_addr()?; let local_addr = listener.local_addr()?;
let listener_resource = NetworkListenerResource::new(listener); let listener_resource = NetworkListenerResource::new(listener);
@ -1483,7 +1484,7 @@ mod tests {
let sockets = Arc::new(Mutex::new(vec![])); let sockets = Arc::new(Mutex::new(vec![]));
let clone_addr = addr.clone(); let clone_addr = addr.clone();
let addr = addr.to_socket_addrs().unwrap().next().unwrap(); let addr = addr.to_socket_addrs().unwrap().next().unwrap();
let listener = TcpListener::bind_direct(addr, false).unwrap(); let listener = TcpListener::bind_direct(addr, false, 511).unwrap();
let accept_fut = listener.accept().boxed_local(); let accept_fut = listener.accept().boxed_local();
let store_fut = async move { let store_fut = async move {
let socket = accept_fut.await.unwrap(); let socket = accept_fut.await.unwrap();

View file

@ -28,6 +28,7 @@ use deno_error::JsErrorBox;
use deno_permissions::OpenAccessKind; use deno_permissions::OpenAccessKind;
use deno_tls::ServerConfigProvider; use deno_tls::ServerConfigProvider;
use deno_tls::SocketUse; use deno_tls::SocketUse;
use deno_tls::TlsClientConfigOptions;
use deno_tls::TlsKey; use deno_tls::TlsKey;
use deno_tls::TlsKeyLookup; use deno_tls::TlsKeyLookup;
use deno_tls::TlsKeys; use deno_tls::TlsKeys;
@ -232,6 +233,7 @@ pub struct ConnectTlsArgs {
ca_certs: Vec<String>, ca_certs: Vec<String>,
alpn_protocols: Option<Vec<String>>, alpn_protocols: Option<Vec<String>>,
server_name: Option<String>, server_name: Option<String>,
unsafely_disable_hostname_verification: Option<bool>,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -242,6 +244,7 @@ pub struct StartTlsArgs {
hostname: String, hostname: String,
alpn_protocols: Option<Vec<String>>, alpn_protocols: Option<Vec<String>>,
reject_unauthorized: Option<bool>, reject_unauthorized: Option<bool>,
unsafely_disable_hostname_verification: Option<bool>,
} }
#[op2] #[op2]
@ -343,6 +346,9 @@ where
Some(Vec::new()) Some(Vec::new())
}; };
let unsafely_disable_hostname_verification =
args.unsafely_disable_hostname_verification.unwrap_or(false);
let root_cert_store = state let root_cert_store = state
.borrow() .borrow()
.borrow::<DefaultTlsOptions>() .borrow::<DefaultTlsOptions>()
@ -367,13 +373,14 @@ where
let tls_null = TlsKeysHolder::from(TlsKeys::Null); let tls_null = TlsKeysHolder::from(TlsKeys::Null);
let key_pair = key_pair.unwrap_or(&tls_null); let key_pair = key_pair.unwrap_or(&tls_null);
let mut tls_config = create_client_config( let mut tls_config = create_client_config(TlsClientConfigOptions {
root_cert_store, root_cert_store,
ca_certs, ca_certs,
unsafely_ignore_certificate_errors, unsafely_ignore_certificate_errors,
key_pair.take(), unsafely_disable_hostname_verification,
SocketUse::GeneralSsl, cert_chain_and_key: key_pair.take(),
)?; socket_use: SocketUse::GeneralSsl,
})?;
if let Some(alpn_protocols) = args.alpn_protocols { if let Some(alpn_protocols) = args.alpn_protocols {
tls_config.alpn_protocols = tls_config.alpn_protocols =
@ -413,6 +420,8 @@ where
.borrow() .borrow()
.try_borrow::<UnsafelyIgnoreCertificateErrors>() .try_borrow::<UnsafelyIgnoreCertificateErrors>()
.and_then(|it| it.0.clone()); .and_then(|it| it.0.clone());
let unsafely_disable_hostname_verification =
args.unsafely_disable_hostname_verification.unwrap_or(false);
let cert_file = { let cert_file = {
let mut s = state.borrow_mut(); let mut s = state.borrow_mut();
@ -466,13 +475,14 @@ where
let local_addr = tcp_stream.local_addr()?; let local_addr = tcp_stream.local_addr()?;
let remote_addr = tcp_stream.peer_addr()?; let remote_addr = tcp_stream.peer_addr()?;
let mut tls_config = create_client_config( let mut tls_config = create_client_config(TlsClientConfigOptions {
root_cert_store, root_cert_store,
ca_certs, ca_certs,
unsafely_ignore_certificate_errors, unsafely_ignore_certificate_errors,
key_pair.take(), unsafely_disable_hostname_verification,
SocketUse::GeneralSsl, cert_chain_and_key: key_pair.take(),
)?; socket_use: SocketUse::GeneralSsl,
})?;
if let Some(alpn_protocols) = args.alpn_protocols { if let Some(alpn_protocols) = args.alpn_protocols {
tls_config.alpn_protocols = tls_config.alpn_protocols =
@ -504,6 +514,7 @@ pub struct ListenTlsArgs {
reuse_port: bool, reuse_port: bool,
#[serde(default)] #[serde(default)]
load_balanced: bool, load_balanced: bool,
tcp_backlog: i32,
} }
#[op2(stack_trace)] #[op2(stack_trace)]
@ -533,9 +544,9 @@ where
.ok_or(NetError::NoResolvedAddress)?; .ok_or(NetError::NoResolvedAddress)?;
let tcp_listener = if args.load_balanced { let tcp_listener = if args.load_balanced {
TcpListener::bind_load_balanced(bind_addr) TcpListener::bind_load_balanced(bind_addr, args.tcp_backlog)
} else { } else {
TcpListener::bind_direct(bind_addr, args.reuse_port) TcpListener::bind_direct(bind_addr, args.reuse_port, args.tcp_backlog)
}?; }?;
let local_addr = tcp_listener.local_addr()?; let local_addr = tcp_listener.local_addr()?;
let alpn = args let alpn = args

View file

@ -35,6 +35,7 @@ use deno_error::JsError;
use deno_error::JsErrorBox; use deno_error::JsErrorBox;
use deno_permissions::PermissionCheckError; use deno_permissions::PermissionCheckError;
use deno_tls::SocketUse; use deno_tls::SocketUse;
use deno_tls::TlsClientConfigOptions;
use deno_tls::TlsError; use deno_tls::TlsError;
use deno_tls::TlsKeys; use deno_tls::TlsKeys;
use deno_tls::TlsKeysHolder; use deno_tls::TlsKeysHolder;
@ -574,13 +575,14 @@ where
)) ))
.with_no_client_auth() .with_no_client_auth()
} else { } else {
create_client_config( create_client_config(TlsClientConfigOptions {
root_cert_store, root_cert_store,
ca_certs, ca_certs,
unsafely_ignore_certificate_errors, unsafely_ignore_certificate_errors,
key_pair.take(), unsafely_disable_hostname_verification: false,
SocketUse::GeneralSsl, cert_chain_and_key: key_pair.take(),
)? socket_use: SocketUse::GeneralSsl,
})?
}; };
if let Some(alpn_protocols) = args.alpn_protocols { if let Some(alpn_protocols) = args.alpn_protocols {

Some files were not shown because too many files have changed in this diff Show more