From d15a302d41b643b4be001920a297c08bdcf5ad77 Mon Sep 17 00:00:00 2001 From: David Sherret Date: Fri, 13 Jun 2025 13:01:26 -0400 Subject: [PATCH] refactor: add deno_config crate back to cli (#29740) Co-authored-by: nayeemrmn --- .dprint.json | 1 + .github/workflows/ci.generate.ts | 14 +- .github/workflows/ci.yml | 12 +- Cargo.lock | 4 +- Cargo.toml | 4 +- ext/webgpu/LICENSE.md | 2 +- libs/config/Cargo.toml | 45 + libs/config/README.md | 5 + libs/config/clippy.toml | 43 + libs/config/deno_json/mod.rs | 2952 ++++++++ libs/config/deno_json/ts.rs | 229 + libs/config/glob/collector.rs | 374 + libs/config/glob/gitignore.rs | 180 + libs/config/glob/mod.rs | 1626 +++++ libs/config/lib.rs | 20 + libs/config/sync.rs | 20 + .../config/testdata/additional_files/jsr.json | 3 + libs/config/testdata/deno.json | 1 + .../fmt/with_config/deno.deprecated.jsonc | 20 + .../testdata/fmt/with_config/deno.jsonc | 16 + .../testdata/fmt/with_config/subdir/a.ts | 46 + .../testdata/fmt/with_config/subdir/b.ts | 15 + .../testdata/fmt/with_config/subdir/c.md | 17 + .../testdata/module_graph/tsconfig.json | 6 + libs/config/util.rs | 32 + libs/config/workspace/discovery.rs | 1094 +++ libs/config/workspace/mod.rs | 6363 +++++++++++++++++ tests/specs/run/webtransport/main.ts | 2 +- tools/copyright_checker.js | 1 + 29 files changed, 13138 insertions(+), 9 deletions(-) create mode 100644 libs/config/Cargo.toml create mode 100644 libs/config/README.md create mode 100644 libs/config/clippy.toml create mode 100644 libs/config/deno_json/mod.rs create mode 100644 libs/config/deno_json/ts.rs create mode 100644 libs/config/glob/collector.rs create mode 100644 libs/config/glob/gitignore.rs create mode 100644 libs/config/glob/mod.rs create mode 100644 libs/config/lib.rs create mode 100644 libs/config/sync.rs create mode 100644 libs/config/testdata/additional_files/jsr.json create mode 100644 libs/config/testdata/deno.json create mode 100644 libs/config/testdata/fmt/with_config/deno.deprecated.jsonc create mode 100644 libs/config/testdata/fmt/with_config/deno.jsonc create mode 100644 libs/config/testdata/fmt/with_config/subdir/a.ts create mode 100644 libs/config/testdata/fmt/with_config/subdir/b.ts create mode 100644 libs/config/testdata/fmt/with_config/subdir/c.md create mode 100644 libs/config/testdata/module_graph/tsconfig.json create mode 100644 libs/config/util.rs create mode 100644 libs/config/workspace/discovery.rs create mode 100644 libs/config/workspace/mod.rs diff --git a/.dprint.json b/.dprint.json index 26fb5f877b..458c6784ca 100644 --- a/.dprint.json +++ b/.dprint.json @@ -35,6 +35,7 @@ "cli/tools/doc/prism.js", "ext/websocket/autobahn/reports", "gh-pages", + "libs/config/testdata", "target", "tests/ffi/tests/test.js", "tests/node_compat/runner/suite", diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index b227d961ce..e7da354b4d 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -1230,8 +1230,8 @@ const ci = { }, ]), }, - wasm: { - name: "build wasm32", + libs: { + name: "build libs", needs: ["pre_build"], if: "${{ needs.pre_build.outputs.skip_build != 'true' }}", "runs-on": ubuntuX86Runner, @@ -1254,6 +1254,16 @@ const ci = { run: "cargo check --target wasm32-unknown-unknown -p deno_npm_installer", }, + { + name: "Cargo check (deno_config)", + run: [ + "cargo check --no-default-features -p deno_config", + "cargo check --no-default-features --features workspace -p deno_config", + "cargo check --no-default-features --features package_json -p deno_config", + "cargo check --no-default-features --features workspace --features sync -p deno_config", + "cargo check --target wasm32-unknown-unknown --all-features -p deno_config", + ].join("\n"), + }, ]), }, "publish-canary": { diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7f0d40ab9e..3ab7285a0a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -781,8 +781,8 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: '60-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' - wasm: - name: build wasm32 + libs: + name: build libs needs: - pre_build if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}' @@ -811,6 +811,14 @@ jobs: - name: Cargo check (deno_npm_installer) run: cargo check --target wasm32-unknown-unknown -p deno_npm_installer if: '!(matrix.skip)' + - name: Cargo check (deno_config) + run: |- + cargo check --no-default-features -p deno_config + cargo check --no-default-features --features workspace -p deno_config + cargo check --no-default-features --features package_json -p deno_config + cargo check --no-default-features --features workspace --features sync -p deno_config + cargo check --target wasm32-unknown-unknown --all-features -p deno_config + if: '!(matrix.skip)' publish-canary: name: publish canary runs-on: ubuntu-24.04 diff --git a/Cargo.lock b/Cargo.lock index b664af9eac..ec73fdb825 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1722,8 +1722,6 @@ dependencies = [ [[package]] name = "deno_config" version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cede8eaf636037d9f3d9206eeecb0bdcea042810da5eb00be74ecb0d17b95acc" dependencies = [ "boxed_error", "capacity_builder", @@ -1739,9 +1737,11 @@ dependencies = [ "log", "percent-encoding", "phf", + "pretty_assertions", "serde", "serde_json", "sys_traits", + "tempfile", "thiserror 2.0.12", "url", ] diff --git a/Cargo.toml b/Cargo.toml index 4af89fb466..5640bea4e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,6 +32,7 @@ members = [ "ext/webidl", "ext/websocket", "ext/webstorage", + "libs/config", "resolvers/deno", "resolvers/node", "resolvers/npm_cache", @@ -59,7 +60,7 @@ deno_ast = { version = "=0.48.0", features = ["transpiling"] } deno_core = { version = "0.350.0" } deno_cache_dir = "=0.22.2" -deno_config = { version = "=0.57.0", features = ["workspace"] } +deno_config = { version = "=0.57.0", features = ["workspace"], path = "./libs/config" } deno_doc = "=0.178.0" deno_error = "=0.6.1" deno_graph = { version = "=0.95.1", default-features = false } @@ -179,6 +180,7 @@ hyper-rustls = { version = "0.27.2", default-features = false, features = ["http hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } idna = "1.0.3" +ignore = "0.4" import_map = { version = "0.22.0", features = ["ext"] } indexmap = { version = "2", features = ["serde"] } ipnet = "2.3" diff --git a/ext/webgpu/LICENSE.md b/ext/webgpu/LICENSE.md index 56753af367..406ae09364 100644 --- a/ext/webgpu/LICENSE.md +++ b/ext/webgpu/LICENSE.md @@ -1,6 +1,6 @@ MIT License -Copyright 2018-2024 the Deno authors +Copyright 2018-2025 the Deno authors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/libs/config/Cargo.toml b/libs/config/Cargo.toml new file mode 100644 index 0000000000..ef6b57fd91 --- /dev/null +++ b/libs/config/Cargo.toml @@ -0,0 +1,45 @@ +# Copyright 2018-2025 the Deno authors. MIT license. +[package] +name = "deno_config" +version = "0.57.0" +authors = ["the Deno authors"] +edition = "2021" +license = "MIT" +repository = "https://github.com/denoland/deno_config" +description = "Config file implementation for the Deno CLI" + +[lib] +path = "lib.rs" + +[features] +default = ["workspace"] +deno_json = ["jsonc-parser", "glob", "ignore", "import_map", "phf"] +package_json = ["deno_package_json"] +sync = ["deno_package_json/sync"] +workspace = ["deno_json", "deno_semver", "package_json"] + +[dependencies] +boxed_error.workspace = true +capacity_builder = { workspace = true } +deno_error = { workspace = true, features = ["url"] } +deno_package_json = { workspace = true, optional = true } +deno_path_util.workspace = true +deno_semver = { workspace = true, optional = true } +glob = { workspace = true, optional = true } +ignore = { workspace = true, optional = true } +import_map = { workspace = true, features = ["ext"], optional = true } +indexmap = { workspace = true, features = ["serde"] } +jsonc-parser = { workspace = true, features = ["serde"], optional = true } +log.workspace = true +percent-encoding.workspace = true +phf = { workspace = true, features = ["macros"], optional = true } +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +sys_traits.workspace = true +thiserror.workspace = true +url = { workspace = true } + +[dev-dependencies] +pretty_assertions.workspace = true +sys_traits = { workspace = true, features = ["memory", "real", "serde_json"] } +tempfile.workspace = true diff --git a/libs/config/README.md b/libs/config/README.md new file mode 100644 index 0000000000..ca562f875c --- /dev/null +++ b/libs/config/README.md @@ -0,0 +1,5 @@ +# `deno_config` + +An implementation of the +[Deno configuration file](https://docs.deno.com/runtime/manual/getting_started/configuration_file/) +in Rust. diff --git a/libs/config/clippy.toml b/libs/config/clippy.toml new file mode 100644 index 0000000000..db939c6f11 --- /dev/null +++ b/libs/config/clippy.toml @@ -0,0 +1,43 @@ +disallowed-methods = [ + { path = "std::env::current_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::copy", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::read", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::read_link", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::rename", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" }, + { path = "std::fs::write", reason = "File system operations should be done using the sys_traits crate" }, +] +disallowed-types = [ + { path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" }, +] diff --git a/libs/config/deno_json/mod.rs b/libs/config/deno_json/mod.rs new file mode 100644 index 0000000000..03d9b1590b --- /dev/null +++ b/libs/config/deno_json/mod.rs @@ -0,0 +1,2952 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::path::Path; +use std::path::PathBuf; + +use boxed_error::Boxed; +use deno_error::JsError; +use deno_path_util::url_from_file_path; +use deno_path_util::url_parent; +use deno_path_util::url_to_file_path; +use import_map::ImportMapWithDiagnostics; +use indexmap::IndexMap; +use jsonc_parser::ParseResult; +use serde::de; +use serde::de::Unexpected; +use serde::de::Visitor; +use serde::Deserialize; +use serde::Deserializer; +use serde::Serialize; +use serde::Serializer; +use serde_json::json; +use serde_json::Value; +use sys_traits::FsRead; +use thiserror::Error; +use ts::parse_compiler_options; +use url::Url; + +use crate::glob::FilePatterns; +use crate::glob::PathOrPatternSet; +use crate::util::is_skippable_io_error; +use crate::UrlToFilePathError; + +mod ts; + +pub use ts::EmitConfigOptions; +pub use ts::IgnoredCompilerOptions; +pub use ts::ParsedTsConfigOptions; +pub use ts::RawJsxCompilerOptions; +pub use ts::TsConfig; + +#[derive(Clone, Debug, Default, Deserialize, Hash, PartialEq)] +#[serde(default, deny_unknown_fields)] +pub struct LintRulesConfig { + pub tags: Option>, + pub include: Option>, + pub exclude: Option>, +} + +#[derive(Debug, JsError, Boxed)] +pub struct IntoResolvedError(pub Box); + +#[derive(Debug, Error, JsError)] +pub enum IntoResolvedErrorKind { + #[class(uri)] + #[error(transparent)] + UrlParse(#[from] url::ParseError), + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] UrlToFilePathError), + #[class(inherit)] + #[error("Invalid include: {0}")] + InvalidInclude(crate::glob::PathOrPatternParseError), + #[class(inherit)] + #[error("Invalid exclude: {0}")] + InvalidExclude(crate::glob::FromExcludeRelativePathOrPatternsError), +} + +#[derive(Debug, Error, JsError)] +#[class(generic)] +#[error("Failed deserilaizing \"compilerOptions\".\"types\" in {}", self.specifier)] +pub struct CompilerOptionTypesDeserializeError { + specifier: Url, + #[source] + source: serde_json::Error, +} + +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields)] +struct SerializedFilesConfig { + pub include: Option>, + pub exclude: Vec, +} + +impl SerializedFilesConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let config_dir = url_to_file_path(&url_parent(config_file_specifier))?; + Ok(FilePatterns { + base: config_dir.clone(), + include: match self.include { + Some(i) => Some( + PathOrPatternSet::from_include_relative_path_or_patterns( + &config_dir, + &i, + ) + .map_err(IntoResolvedErrorKind::InvalidInclude)?, + ), + None => None, + }, + exclude: PathOrPatternSet::from_exclude_relative_path_or_patterns( + &config_dir, + &self.exclude, + ) + .map_err(IntoResolvedErrorKind::InvalidExclude)?, + }) + } +} + +/// `lint` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields)] +struct SerializedLintConfig { + pub rules: LintRulesConfig, + pub include: Option>, + pub exclude: Vec, + + #[serde(rename = "files")] + pub deprecated_files: serde_json::Value, + pub report: Option, + pub plugins: Vec, +} + +impl SerializedLintConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + if !self.deprecated_files.is_null() { + log::warn!( "Warning: \"files\" configuration in \"lint\" was removed in Deno 2, use \"include\" and \"exclude\" instead."); + } + Ok(LintConfig { + options: LintOptionsConfig { + rules: self.rules, + plugins: self + .plugins + .into_iter() + .map(|specifier| LintPluginConfig { + specifier, + base: config_file_specifier.clone(), + }) + .collect(), + }, + files: files.into_resolved(config_file_specifier)?, + }) + } +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct LintPluginConfig { + pub specifier: String, + pub base: Url, +} + +#[derive(Clone, Debug, Default, Hash, PartialEq)] +pub struct LintOptionsConfig { + pub rules: LintRulesConfig, + pub plugins: Vec, +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct LintConfig { + pub options: LintOptionsConfig, + pub files: FilePatterns, +} + +impl LintConfig { + pub fn new_with_base(base: PathBuf) -> Self { + // note: don't create Default implementations of these + // config structs because the base of FilePatterns matters + Self { + options: Default::default(), + files: FilePatterns::new_with_base(base), + } + } +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum ProseWrap { + Always, + Never, + Preserve, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum QuoteProps { + AsNeeded, + Consistent, + Preserve, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum NewLineKind { + Auto, + #[serde(rename = "lf")] + LineFeed, + #[serde(rename = "crlf")] + CarriageReturnLineFeed, + System, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum UseBraces { + Maintain, + WhenNotSingleLine, + Always, + PreferNone, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum BracePosition { + Maintain, + SameLine, + NextLine, + SameLineUnlessHanging, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum SingleBodyPosition { + Maintain, + SameLine, + NextLine, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum NextControlFlowPosition { + Maintain, + SameLine, + NextLine, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum TrailingCommas { + Always, + Never, + OnlyMultiLine, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum OperatorPosition { + Maintain, + SameLine, + NextLine, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum BracketPosition { + Maintain, + SameLine, + NextLine, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum MultiLineParens { + Never, + Prefer, + Always, +} + +#[derive(Clone, Copy, Debug, Serialize, Deserialize, Hash, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub enum SeparatorKind { + SemiColon, + Comma, +} + +#[derive(Clone, Debug, Default, Serialize, Deserialize, Hash, PartialEq)] +#[serde(default, deny_unknown_fields, rename_all = "camelCase")] +pub struct FmtOptionsConfig { + pub use_tabs: Option, + pub line_width: Option, + pub indent_width: Option, + pub single_quote: Option, + pub prose_wrap: Option, + pub semi_colons: Option, + pub quote_props: Option, + pub new_line_kind: Option, + pub use_braces: Option, + pub brace_position: Option, + pub single_body_position: Option, + pub next_control_flow_position: Option, + pub trailing_commas: Option, + pub operator_position: Option, + pub jsx_bracket_position: Option, + pub jsx_force_new_lines_surrounding_content: Option, + pub jsx_multi_line_parens: Option, + pub type_literal_separator_kind: Option, + pub space_around: Option, + pub space_surrounding_properties: Option, +} + +impl FmtOptionsConfig { + pub fn is_empty(&self) -> bool { + self.use_tabs.is_none() + && self.line_width.is_none() + && self.indent_width.is_none() + && self.single_quote.is_none() + && self.prose_wrap.is_none() + && self.semi_colons.is_none() + && self.quote_props.is_none() + && self.new_line_kind.is_none() + && self.use_braces.is_none() + && self.brace_position.is_none() + && self.single_body_position.is_none() + && self.next_control_flow_position.is_none() + && self.trailing_commas.is_none() + && self.operator_position.is_none() + && self.jsx_bracket_position.is_none() + && self.jsx_force_new_lines_surrounding_content.is_none() + && self.jsx_multi_line_parens.is_none() + && self.type_literal_separator_kind.is_none() + && self.space_around.is_none() + && self.space_surrounding_properties.is_none() + } +} + +/// Choose between flat and nested fmt options. +/// +/// `options` has precedence over `deprecated_options`. +/// when `deprecated_options` is present, a warning is logged. +/// +/// caveat: due to default values, it's not possible to distinguish between +/// an empty configuration and a configuration with default values. +/// `{ "fmt": {} } is equivalent to `{ "fmt": { "options": {} } }` +/// and it wouldn't be able to emit warning for `{ "fmt": { "options": {}, "semiColons": "false" } }`. +/// +/// # Arguments +/// +/// * `options` - Flat options. +/// * `deprecated_options` - Nested files configuration ("option"). +fn choose_fmt_options( + options: FmtOptionsConfig, + deprecated_options: FmtOptionsConfig, +) -> FmtOptionsConfig { + const DEPRECATED_OPTIONS: &str = + "Warning: \"options\" configuration is deprecated"; + const FLAT_OPTION: &str = "\"flat\" options"; + + let (options_nonempty, deprecated_options_nonempty) = + (!options.is_empty(), !deprecated_options.is_empty()); + + match (options_nonempty, deprecated_options_nonempty) { + (true, true) => { + log::warn!("{DEPRECATED_OPTIONS} and ignored by {FLAT_OPTION}."); + options + } + (true, false) => options, + (false, true) => { + log::warn!("{DEPRECATED_OPTIONS}. Please use {FLAT_OPTION} instead."); + deprecated_options + } + (false, false) => FmtOptionsConfig::default(), + } +} + +/// `fmt` config representation for serde +/// +/// fields from `use_tabs`..`semi_colons` are expanded from [FmtOptionsConfig]. +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields, rename_all = "camelCase")] +struct SerializedFmtConfig { + pub use_tabs: Option, + pub line_width: Option, + pub indent_width: Option, + pub single_quote: Option, + pub prose_wrap: Option, + pub semi_colons: Option, + pub quote_props: Option, + pub new_line_kind: Option, + pub use_braces: Option, + pub brace_position: Option, + pub single_body_position: Option, + pub next_control_flow_position: Option, + pub trailing_commas: Option, + pub operator_position: Option, + #[serde(rename = "jsx.bracketPosition")] + pub jsx_bracket_position: Option, + #[serde(rename = "jsx.forceNewLinesSurroundingContent")] + pub jsx_force_new_lines_surrounding_content: Option, + #[serde(rename = "jsx.multiLineParens")] + pub jsx_multi_line_parens: Option, + #[serde(rename = "typeLiteral.separatorKind")] + pub type_literal_separator_kind: Option, + pub space_around: Option, + pub space_surrounding_properties: Option, + #[serde(rename = "options")] + pub deprecated_options: FmtOptionsConfig, + pub include: Option>, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: serde_json::Value, +} + +impl SerializedFmtConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + let options = FmtOptionsConfig { + use_tabs: self.use_tabs, + line_width: self.line_width, + indent_width: self.indent_width, + single_quote: self.single_quote, + prose_wrap: self.prose_wrap, + semi_colons: self.semi_colons, + quote_props: self.quote_props, + new_line_kind: self.new_line_kind, + use_braces: self.use_braces, + brace_position: self.brace_position, + single_body_position: self.single_body_position, + next_control_flow_position: self.next_control_flow_position, + trailing_commas: self.trailing_commas, + operator_position: self.operator_position, + jsx_bracket_position: self.jsx_bracket_position, + jsx_force_new_lines_surrounding_content: self + .jsx_force_new_lines_surrounding_content, + jsx_multi_line_parens: self.jsx_multi_line_parens, + type_literal_separator_kind: self.type_literal_separator_kind, + space_around: self.space_around, + space_surrounding_properties: self.space_surrounding_properties, + }; + if !self.deprecated_files.is_null() { + log::warn!( "Warning: \"files\" configuration in \"fmt\" was removed in Deno 2, use \"include\" and \"exclude\" instead."); + } + Ok(FmtConfig { + options: choose_fmt_options(options, self.deprecated_options), + files: files.into_resolved(config_file_specifier)?, + }) + } +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct FmtConfig { + pub options: FmtOptionsConfig, + pub files: FilePatterns, +} + +impl FmtConfig { + pub fn new_with_base(base: PathBuf) -> Self { + Self { + options: Default::default(), + files: FilePatterns::new_with_base(base), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct ExportsConfig { + base: Url, + map: IndexMap, +} + +impl ExportsConfig { + pub fn into_map(self) -> IndexMap { + self.map + } + + pub fn get(&self, export_name: &str) -> Option<&String> { + self.map.get(export_name) + } + + pub fn get_resolved( + &self, + export_name: &str, + ) -> Result, url::ParseError> { + match self.get(export_name) { + Some(name) => self.base.join(name).map(Some), + None => Ok(None), + } + } +} + +/// `test` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields)] +struct SerializedTestConfig { + pub include: Option>, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: serde_json::Value, +} + +impl SerializedTestConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + if !self.deprecated_files.is_null() { + log::warn!( "Warning: \"files\" configuration in \"test\" was removed in Deno 2, use \"include\" and \"exclude\" instead."); + } + Ok(TestConfig { + files: files.into_resolved(config_file_specifier)?, + }) + } +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct TestConfig { + pub files: FilePatterns, +} + +impl TestConfig { + pub fn new_with_base(base: PathBuf) -> Self { + Self { + files: FilePatterns::new_with_base(base), + } + } +} + +/// `publish` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields)] +struct SerializedPublishConfig { + pub include: Option>, + pub exclude: Vec, +} + +impl SerializedPublishConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + + Ok(PublishConfig { + files: files.into_resolved(config_file_specifier)?, + }) + } +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct PublishConfig { + pub files: FilePatterns, +} + +impl PublishConfig { + pub fn new_with_base(base: PathBuf) -> Self { + Self { + files: FilePatterns::new_with_base(base), + } + } +} + +/// `bench` config representation for serde +/// +/// fields `include` and `exclude` are expanded from [SerializedFilesConfig]. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +#[serde(default, deny_unknown_fields)] +struct SerializedBenchConfig { + pub include: Option>, + pub exclude: Vec, + #[serde(rename = "files")] + pub deprecated_files: serde_json::Value, +} + +impl SerializedBenchConfig { + pub fn into_resolved( + self, + config_file_specifier: &Url, + ) -> Result { + let (include, exclude) = (self.include, self.exclude); + let files = SerializedFilesConfig { include, exclude }; + if !self.deprecated_files.is_null() { + log::warn!( "Warning: \"files\" configuration in \"bench\" was removed in Deno 2, use \"include\" and \"exclude\" instead."); + } + Ok(BenchConfig { + files: files.into_resolved(config_file_specifier)?, + }) + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct BenchConfig { + pub files: FilePatterns, +} + +impl BenchConfig { + pub fn new_with_base(base: PathBuf) -> Self { + Self { + files: FilePatterns::new_with_base(base), + } + } +} + +#[derive(Clone, Debug, Deserialize, PartialEq)] +#[serde(untagged)] +pub enum LockConfig { + Bool(bool), + PathBuf(PathBuf), + Object { + path: Option, + frozen: Option, + }, +} + +impl LockConfig { + pub fn frozen(&self) -> bool { + matches!( + self, + LockConfig::Object { + frozen: Some(true), + .. + } + ) + } +} + +#[derive(Debug, Error, JsError)] +#[class(inherit)] +#[error("Failed to parse \"workspace\" configuration.")] +pub struct WorkspaceConfigParseError(#[source] serde_json::Error); + +#[derive(Clone, Debug, Deserialize, PartialEq)] +#[serde(deny_unknown_fields)] +pub struct WorkspaceConfig { + pub members: Vec, +} + +#[derive(Debug, Error, JsError)] +#[class(inherit)] +#[error("Failed to parse \"link\" configuration.")] +pub struct LinkConfigParseError(#[source] serde_json::Error); + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct TaskDefinition { + pub command: Option, + #[serde(default)] + pub dependencies: Vec, + #[serde(default)] + pub description: Option, +} + +#[cfg(test)] +impl From<&str> for TaskDefinition { + fn from(value: &str) -> Self { + Self { + command: Some(value.to_string()), + dependencies: vec![], + description: None, + } + } +} + +impl TaskDefinition { + pub fn deserialize_tasks<'de, D>( + deserializer: D, + ) -> Result, D::Error> + where + D: Deserializer<'de>, + { + use std::fmt; + + use serde::de::MapAccess; + use serde::de::Visitor; + + struct TasksVisitor; + + impl<'de> Visitor<'de> for TasksVisitor { + type Value = IndexMap; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a map of task definitions") + } + + fn visit_map(self, mut access: M) -> Result + where + M: MapAccess<'de>, + { + let mut map = IndexMap::with_capacity(access.size_hint().unwrap_or(4)); + + while let Some((key, value)) = + access.next_entry::()? + { + let task_def = match value { + serde_json::Value::String(command) => TaskDefinition { + command: Some(command), + dependencies: Vec::new(), + description: None, + }, + serde_json::Value::Object(_) => { + serde_json::from_value(value).map_err(serde::de::Error::custom)? + } + _ => { + return Err(serde::de::Error::custom("invalid task definition")) + } + }; + map.insert(key, task_def); + } + + Ok(map) + } + } + + deserializer.deserialize_map(TasksVisitor) + } +} + +#[derive(Debug, JsError, Boxed)] +pub struct ConfigFileReadError(pub Box); + +impl ConfigFileReadError { + pub fn is_not_found(&self) -> bool { + if let ConfigFileReadErrorKind::FailedReading { source: ioerr, .. } = + self.as_kind() + { + matches!(ioerr.kind(), std::io::ErrorKind::NotFound) + } else { + false + } + } +} + +#[derive(Debug, Error, JsError)] +pub enum ConfigFileReadErrorKind { + #[class(type)] + #[error("Could not convert config file path to specifier. Path: {0}")] + PathToUrl(PathBuf), + #[class(inherit)] + #[error(transparent)] + UrlToFilePathError(#[from] UrlToFilePathError), + #[class(inherit)] + #[error("Error reading config file '{specifier}'.")] + FailedReading { + specifier: Url, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(type)] + #[error("Unable to parse config file JSON {specifier}.")] + Parse { + specifier: Url, + #[source] + source: Box, + }, + #[class(inherit)] + #[error("Failed deserializing config file '{specifier}'.")] + Deserialize { + specifier: Url, + #[source] + #[inherit] + source: serde_json::Error, + }, + #[class(type)] + #[error("Config file JSON should be an object '{specifier}'.")] + NotObject { specifier: Url }, +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +#[error("Unsupported \"nodeModulesDir\" value.")] +pub struct NodeModulesDirParseError { + #[source] + pub source: serde_json::Error, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)] +#[serde(rename_all = "kebab-case")] +pub enum NodeModulesDirMode { + Auto, + Manual, + None, +} + +impl<'de> Deserialize<'de> for NodeModulesDirMode { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct NodeModulesDirModeVisitor; + + impl Visitor<'_> for NodeModulesDirModeVisitor { + type Value = NodeModulesDirMode; + + fn expecting( + &self, + formatter: &mut std::fmt::Formatter, + ) -> std::fmt::Result { + formatter.write_str(r#""auto", "manual", or "none""#) + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + match value { + "auto" => Ok(NodeModulesDirMode::Auto), + "manual" => Ok(NodeModulesDirMode::Manual), + "none" => Ok(NodeModulesDirMode::None), + _ => Err(de::Error::invalid_value(Unexpected::Str(value), &self)), + } + } + + fn visit_bool(self, value: bool) -> Result + where + E: de::Error, + { + if value { + Ok(NodeModulesDirMode::Auto) + } else { + Ok(NodeModulesDirMode::None) + } + } + } + + deserializer.deserialize_any(NodeModulesDirModeVisitor) + } +} + +impl std::fmt::Display for NodeModulesDirMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +impl NodeModulesDirMode { + pub fn as_str(self) -> &'static str { + match self { + NodeModulesDirMode::Auto => "auto", + NodeModulesDirMode::Manual => "manual", + NodeModulesDirMode::None => "none", + } + } + + pub fn uses_node_modules_dir(self) -> bool { + matches!(self, Self::Manual | Self::Auto) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ConfigFileJson { + pub compiler_options: Option, + pub import_map: Option, + pub imports: Option, + pub scopes: Option, + pub lint: Option, + pub fmt: Option, + pub tasks: Option, + pub test: Option, + pub bench: Option, + pub lock: Option, + pub exclude: Option, + pub node_modules_dir: Option, + pub vendor: Option, + pub license: Option, + pub publish: Option, + + pub name: Option, + pub version: Option, + pub workspace: Option, + pub links: Option, + #[serde(rename = "patch")] + pub(crate) deprecated_patch: Option, + #[serde(rename = "workspaces")] + pub(crate) deprecated_workspaces: Option>, + pub exports: Option, + #[serde(default)] + pub unstable: Vec, +} + +pub trait DenoJsonCache { + fn get(&self, path: &Path) -> Option; + fn set(&self, path: PathBuf, deno_json: ConfigFileRc); +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +#[error("compilerOptions should be an object at '{specifier}'")] +pub struct CompilerOptionsParseError { + pub specifier: Url, + #[source] + pub source: serde_json::Error, +} + +#[derive(Debug, Error, JsError)] +pub enum ConfigFileError { + #[class(inherit)] + #[error(transparent)] + CompilerOptionsParseError(CompilerOptionsParseError), + #[class(type)] + #[error("Only file: specifiers are supported for security reasons in import maps stored in a deno.json. To use a remote import map, use the --import-map flag and \"deno.importMap\" in the language server config")] + OnlyFileSpecifiersSupported, + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] UrlToFilePathError), + #[class(inherit)] + #[error(transparent)] + UrlParse(#[from] url::ParseError), + #[class(inherit)] + #[error(transparent)] + SerdeJson(#[from] serde_json::Error), + #[class(inherit)] + #[error(transparent)] + ImportMap(#[from] import_map::ImportMapError), + #[class(inherit)] + #[error(transparent)] + Io(std::io::Error), +} + +#[derive(Debug, Error, JsError)] +pub enum ConfigFileExportsError { + #[class(type)] + #[error("The {0} must not be empty. Use '.' if you meant the root export.")] + KeyMustNotBeEmpty(Cow<'static, str>), + #[class(type)] + #[error("The {key} must start with './'. Did you mean '{suggestion}'?")] + KeyMustStartWithDotSlash { + key: Cow<'static, str>, + suggestion: String, + }, + #[class(type)] + #[error("The {key} must not end with '/'. Did you mean '{suggestion}'?")] + KeyMustNotEndWithSlash { + key: Cow<'static, str>, + suggestion: String, + }, + #[class(type)] + #[error("The {0} must only contain alphanumeric characters, underscores (_), dashes (-), dots (.), and slashes (/).")] + KeyInvalidCharacter(Cow<'static, str>), + #[class(type)] + #[error("The {0} must not contain double slashes (//), or parts consisting entirely of dots (.).")] + KeyTooManySlashesOrDots(Cow<'static, str>), + #[class(type)] + #[error("The path for the {0} must not be empty.")] + ValueMustNotBeEmpty(Cow<'static, str>), + #[class(type)] + #[error("The path '{value}' at the {key} could not be resolved as a relative path from the config file. Did you mean '{suggestion}'?")] + ValueCouldNotBeResolved { + value: String, + key: Cow<'static, str>, + suggestion: String, + }, + #[class(type)] + #[error("The path '{value}' at the {key} must not end with '/'. Did you mean '{suggestion}'?")] + ValueMustNotEndWithSlash { + value: String, + key: Cow<'static, str>, + suggestion: String, + }, + #[class(type)] + #[error("The path '{value}' at the {key} is missing a file extension. Add a file extension such as '.js' or '.ts'.")] + ValueMissingFileExtension { + value: String, + key: Cow<'static, str>, + }, + #[class(type)] + #[error("The path of the {key} must be a string, found invalid value '{value}'. Exports in deno.json do not support conditional exports.")] + InvalidValueConditionalExports { + key: Cow<'static, str>, + value: Value, + }, + #[class(type)] + #[error( + "The path of the {key} must be a string, found invalid value '{value}'." + )] + InvalidValue { + key: Cow<'static, str>, + value: Value, + }, + #[class(type)] + #[error( + "The 'exports' key must be a string or object, found invalid value '{0}'." + )] + ExportsKeyInvalidValue(Value), +} + +#[derive(Debug, Error, JsError)] +pub enum ToInvalidConfigError { + #[class(inherit)] + #[error("Invalid {config} config")] + InvalidConfig { + config: &'static str, + #[source] + #[inherit] + source: IntoResolvedError, + }, + #[class(inherit)] + #[error("Failed to parse \"{config}\" configuration")] + Parse { + config: &'static str, + #[source] + #[inherit] + source: serde_json::Error, + }, +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +pub enum ResolveTaskConfigError { + #[error("Configuration file task names cannot be empty")] + TaskNameEmpty, + #[error("Configuration file task names must only contain alpha-numeric characters, colons (:), underscores (_), or dashes (-). Task: {0}")] + TaskNameInvalidCharacter(String), + #[error("Configuration file task names must start with an alphabetic character. Task: {0}")] + TaskNameInvalidStartingCharacter(String), + #[class(inherit)] + #[error(transparent)] + ToInvalidConfig(#[from] ToInvalidConfigError), +} + +#[derive(Debug, Error, JsError)] +pub enum ResolveExportValueUrlsError { + #[class(inherit)] + #[error("Failed to parse exports at {specifier}")] + ExportsConfig { + specifier: Url, + #[source] + #[inherit] + error: Box, + }, + #[class(inherit)] + #[error("Failed to join {specifier} with {value}")] + JoinError { + specifier: Url, + value: String, + #[source] + #[inherit] + error: url::ParseError, + }, +} + +#[derive(Debug, Error, JsError)] +pub enum ToLockConfigError { + #[class(inherit)] + #[error(transparent)] + ToInvalidConfigError(#[from] ToInvalidConfigError), + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] UrlToFilePathError), +} + +#[allow(clippy::disallowed_types)] +pub type ConfigFileRc = crate::sync::MaybeArc; + +#[derive(Clone, Debug)] +pub struct ConfigFile { + pub specifier: Url, + pub json: ConfigFileJson, +} + +impl ConfigFile { + /// Filenames that Deno will recognize when discovering config. + pub(crate) fn resolve_config_file_names<'a>( + additional_config_file_names: &[&'a str], + ) -> Cow<'a, [&'a str]> { + const CONFIG_FILE_NAMES: [&str; 2] = ["deno.json", "deno.jsonc"]; + if additional_config_file_names.is_empty() { + Cow::Borrowed(&CONFIG_FILE_NAMES) + } else { + Cow::Owned( + CONFIG_FILE_NAMES + .iter() + .copied() + .chain(additional_config_file_names.iter().copied()) + .collect::>(), + ) + } + } + + pub(crate) fn maybe_find_in_folder( + sys: &impl FsRead, + maybe_cache: Option<&dyn DenoJsonCache>, + folder: &Path, + config_file_names: &[&str], + ) -> Result, ConfigFileReadError> { + fn is_skippable_err(e: &ConfigFileReadError) -> bool { + if let ConfigFileReadErrorKind::FailedReading { source: ioerr, .. } = + e.as_kind() + { + is_skippable_io_error(ioerr) + } else { + false + } + } + + for config_filename in config_file_names { + let file_path = folder.join(config_filename); + if let Some(item) = maybe_cache.and_then(|c| c.get(&file_path)) { + return Ok(Some(item)); + } + match ConfigFile::read(sys, &file_path) { + Ok(cf) => { + let cf = crate::sync::new_rc(cf); + log::debug!("Config file found at '{}'", file_path.display()); + if let Some(cache) = maybe_cache { + cache.set(file_path, cf.clone()); + } + return Ok(Some(cf)); + } + Err(e) if is_skippable_err(&e) => { + // ok, keep going + } + Err(e) => { + return Err(e); + } + } + } + Ok(None) + } + + pub fn read( + sys: &impl FsRead, + config_path: &Path, + ) -> Result { + debug_assert!(config_path.is_absolute()); + let specifier = url_from_file_path(config_path).map_err(|_| { + ConfigFileReadErrorKind::PathToUrl(config_path.to_path_buf()).into_box() + })?; + Self::from_specifier_and_path(sys, specifier, config_path) + } + + pub fn from_specifier( + sys: &impl FsRead, + specifier: Url, + ) -> Result { + let config_path = url_to_file_path(&specifier)?; + Self::from_specifier_and_path(sys, specifier, &config_path) + } + + fn from_specifier_and_path( + sys: &impl FsRead, + specifier: Url, + config_path: &Path, + ) -> Result { + let text = sys.fs_read_to_string_lossy(config_path).map_err(|err| { + ConfigFileReadErrorKind::FailedReading { + specifier: specifier.clone(), + source: err, + } + .into_box() + })?; + Self::new(&text, specifier) + } + + pub fn new(text: &str, specifier: Url) -> Result { + let jsonc = match jsonc_parser::parse_to_ast( + text, + &Default::default(), + &Default::default(), + ) { + Ok(ParseResult { + value: Some(value @ jsonc_parser::ast::Value::Object(_)), + .. + }) => Value::from(value), + Ok(ParseResult { value: None, .. }) => { + json!({}) + } + Err(e) => { + return Err( + ConfigFileReadErrorKind::Parse { + specifier, + source: Box::new(e), + } + .into_box(), + ); + } + _ => { + return Err( + ConfigFileReadErrorKind::NotObject { specifier }.into_box(), + ); + } + }; + let json: ConfigFileJson = + serde_json::from_value(jsonc).map_err(|err| { + ConfigFileReadErrorKind::Deserialize { + specifier: specifier.clone(), + source: err, + } + .into_box() + })?; + + Ok(Self { specifier, json }) + } + + pub fn dir_path(&self) -> PathBuf { + url_to_file_path(&self.specifier) + .unwrap() + .parent() + .unwrap() + .to_path_buf() + } + + /// Returns if the configuration indicates that JavaScript should be + /// type checked, otherwise None if not set. + pub fn check_js(&self) -> Option { + self + .json + .compiler_options + .as_ref() + .and_then(|co| co.get("checkJs").and_then(|v| v.as_bool())) + } + + /// Parse `compilerOptions` and return a serde `Value`. + /// The result also contains any options that were ignored. + pub fn to_compiler_options( + &self, + ) -> Result, CompilerOptionsParseError> { + let Some(compiler_options) = self.json.compiler_options.clone() else { + return Ok(None); + }; + let options: serde_json::Map = + serde_json::from_value(compiler_options).map_err(|source| { + CompilerOptionsParseError { + specifier: self.specifier.clone(), + source, + } + })?; + Ok(Some(parse_compiler_options(options, Some(&self.specifier)))) + } + + pub fn to_import_map_specifier( + &self, + ) -> Result, ConfigFileError> { + let Some(value) = self.json.import_map.as_ref() else { + return Ok(None); + }; + // try to resolve as a url + if let Ok(specifier) = Url::parse(value) { + if specifier.scheme() != "file" { + return Err(ConfigFileError::OnlyFileSpecifiersSupported); + } + return Ok(Some(specifier)); + } + // now as a relative file path + Ok(Some(url_parent(&self.specifier).join(value)?)) + } + + pub fn to_import_map_path(&self) -> Result, ConfigFileError> { + let maybe_specifier = self.to_import_map_specifier()?; + match maybe_specifier { + Some(specifier) => Ok(Some(url_to_file_path(&specifier)?)), + None => Ok(None), + } + } + + pub fn vendor(&self) -> Option { + self.json.vendor + } + + /// Resolves the import map potentially resolving the file specified + /// at the "importMap" entry. + pub fn to_import_map( + &self, + sys: &impl FsRead, + ) -> Result, ConfigFileError> { + let maybe_result = self.to_import_map_value(sys)?; + match maybe_result { + Some((specifier, value)) => { + let import_map = + import_map::parse_from_value(specifier.into_owned(), value)?; + Ok(Some(import_map)) + } + None => Ok(None), + } + } + + /// Resolves the import map `serde_json::Value` potentially resolving the + /// file specified at the "importMap" entry. + pub fn to_import_map_value( + &self, + sys: &impl FsRead, + ) -> Result, serde_json::Value)>, ConfigFileError> { + // has higher precedence over the path + if self.json.imports.is_some() || self.json.scopes.is_some() { + Ok(Some(( + Cow::Borrowed(&self.specifier), + self.to_import_map_value_from_imports(), + ))) + } else { + let Some(specifier) = self.to_import_map_specifier()? else { + return Ok(None); + }; + let Ok(import_map_path) = url_to_file_path(&specifier) else { + return Ok(None); + }; + let text = sys + .fs_read_to_string_lossy(&import_map_path) + .map_err(ConfigFileError::Io)?; + let value = serde_json::from_str(&text)?; + // does not expand the imports because this one will use the import map standard + Ok(Some((Cow::Owned(specifier), value))) + } + } + + /// Creates the import map from the imports entry. + /// + /// Warning: This does not take into account the 'importMap' entry. Use `to_import_map` instead. + pub fn to_import_map_from_imports( + &self, + ) -> Result { + let value = self.to_import_map_value_from_imports(); + let result = import_map::parse_from_value(self.specifier.clone(), value)?; + Ok(result) + } + + pub fn to_import_map_value_from_imports(&self) -> Value { + let mut value = serde_json::Map::with_capacity(2); + if let Some(imports) = &self.json.imports { + value.insert("imports".to_string(), imports.clone()); + } + if let Some(scopes) = &self.json.scopes { + value.insert("scopes".to_string(), scopes.clone()); + } + import_map::ext::expand_import_map_value(Value::Object(value)) + } + + pub fn is_an_import_map(&self) -> bool { + self.json.imports.is_some() || self.json.scopes.is_some() + } + + pub fn is_package(&self) -> bool { + self.json.name.is_some() && self.json.exports.is_some() + } + + pub fn is_workspace(&self) -> bool { + self.json.workspace.is_some() + } + + pub fn has_unstable(&self, name: &str) -> bool { + self.json.unstable.iter().any(|v| v == name) + } + + /// Resolve the export values in a config file to their URLs. + pub fn resolve_export_value_urls( + &self, + ) -> Result, ResolveExportValueUrlsError> { + let exports_config = self + .to_exports_config() + .map_err(|error| ResolveExportValueUrlsError::ExportsConfig { + specifier: self.specifier.clone(), + error: Box::new(error), + })? + .into_map(); + let mut exports = Vec::with_capacity(exports_config.len()); + for (_, value) in exports_config { + let entry_point = self.specifier.join(&value).map_err(|error| { + ResolveExportValueUrlsError::JoinError { + specifier: self.specifier.clone(), + value: value.to_string(), + error, + } + })?; + exports.push(entry_point); + } + Ok(exports) + } + + pub fn to_exports_config( + &self, + ) -> Result { + fn has_extension(value: &str) -> bool { + let search_text = &value[value.rfind('/').unwrap_or(0)..]; + search_text.contains('.') + } + + fn validate_key( + key_display: &dyn Fn() -> Cow<'static, str>, + key: &str, + ) -> Result<(), ConfigFileExportsError> { + if key == "." { + return Ok(()); + } + if key.is_empty() { + return Err(ConfigFileExportsError::KeyMustNotBeEmpty(key_display())); + } + if !key.starts_with("./") { + let suggestion = if key.starts_with('/') { + format!(".{}", key) + } else { + format!("./{}", key) + }; + return Err(ConfigFileExportsError::KeyMustStartWithDotSlash { + key: key_display(), + suggestion, + }); + } + if key.ends_with('/') { + let suggestion = key.trim_end_matches('/'); + return Err(ConfigFileExportsError::KeyMustNotEndWithSlash { + key: key_display(), + suggestion: suggestion.to_string(), + }); + } + // ban anything that is not [a-zA-Z0-9_-./] + if key.chars().any(|c| { + !matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '.' | '/') + }) { + return Err(ConfigFileExportsError::KeyInvalidCharacter(key_display())); + } + // ban parts consisting of only dots, and empty parts (e.g. `./foo//bar`) + for part in key.split('/').skip(1) { + if part.is_empty() || part.chars().all(|c| c == '.') { + return Err(ConfigFileExportsError::KeyTooManySlashesOrDots( + key_display(), + )); + } + } + Ok(()) + } + + fn validate_value( + key_display: &dyn Fn() -> Cow<'static, str>, + value: &str, + ) -> Result<(), ConfigFileExportsError> { + if value.is_empty() { + return Err(ConfigFileExportsError::ValueMustNotBeEmpty(key_display())); + } + if !value.starts_with("./") { + let suggestion = if value.starts_with('/') { + format!(".{}", value) + } else { + format!("./{}", value) + }; + return Err(ConfigFileExportsError::ValueCouldNotBeResolved { + value: value.to_string(), + key: key_display(), + suggestion, + }); + } + if value.ends_with('/') { + let suggestion = value.trim_end_matches('/'); + return Err(ConfigFileExportsError::ValueMustNotEndWithSlash { + value: value.to_string(), + key: key_display(), + suggestion: suggestion.to_string(), + }); + } + if !has_extension(value) { + return Err(ConfigFileExportsError::ValueMissingFileExtension { + value: value.to_string(), + key: key_display(), + }); + } + Ok(()) + } + + let map = match &self.json.exports { + Some(Value::Object(map)) => { + let mut result = IndexMap::with_capacity(map.len()); + for (k, v) in map { + let key_display = || Cow::Owned(format!("'{}' export", k)); + validate_key(&key_display, k)?; + match v { + Value::String(value) => { + validate_value(&key_display, value)?; + result.insert(k.clone(), value.clone()); + } + Value::Object(_) => { + return Err( + ConfigFileExportsError::InvalidValueConditionalExports { + key: key_display(), + value: v.clone(), + }, + ); + } + Value::Bool(_) + | Value::Number(_) + | Value::Array(_) + | Value::Null => { + return Err(ConfigFileExportsError::InvalidValue { + key: key_display(), + value: v.clone(), + }); + } + } + } + result + } + Some(Value::String(value)) => { + validate_value(&|| "root export".into(), value)?; + IndexMap::from([(".".to_string(), value.clone())]) + } + Some( + v @ Value::Bool(_) + | v @ Value::Array(_) + | v @ Value::Number(_) + | v @ Value::Null, + ) => { + return Err(ConfigFileExportsError::ExportsKeyInvalidValue(v.clone())); + } + None => IndexMap::new(), + }; + + Ok(ExportsConfig { + base: self.specifier.clone(), + map, + }) + } + + pub fn to_exclude_files_config( + &self, + ) -> Result { + let exclude = self.resolve_exclude_patterns()?; + let raw_files_config = SerializedFilesConfig { + exclude, + ..Default::default() + }; + raw_files_config + .into_resolved(&self.specifier) + .map_err(|error| ToInvalidConfigError::InvalidConfig { + config: "exclude", + source: error, + }) + } + + fn resolve_exclude_patterns( + &self, + ) -> Result, ToInvalidConfigError> { + let mut exclude: Vec = + if let Some(exclude) = self.json.exclude.clone() { + serde_json::from_value(exclude).map_err(|error| { + ToInvalidConfigError::Parse { + config: "exclude", + source: error, + } + })? + } else { + Vec::new() + }; + + if self.json.vendor == Some(true) { + exclude.push("vendor".to_string()); + } + Ok(exclude) + } + + pub fn to_bench_config(&self) -> Result { + match self.json.bench.clone() { + Some(config) => { + let mut exclude_patterns = self.resolve_exclude_patterns()?; + let mut serialized: SerializedBenchConfig = + serde_json::from_value(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "bench", + source: error, + } + })?; + // top level excludes at the start because they're lower priority + exclude_patterns.extend(std::mem::take(&mut serialized.exclude)); + serialized.exclude = exclude_patterns; + serialized.into_resolved(&self.specifier).map_err(|error| { + ToInvalidConfigError::InvalidConfig { + config: "bench", + source: error, + } + }) + } + None => Ok(BenchConfig { + files: self.to_exclude_files_config()?, + }), + } + } + + pub fn to_fmt_config(&self) -> Result { + match self.json.fmt.clone() { + Some(config) => { + let mut exclude_patterns = self.resolve_exclude_patterns()?; + let mut serialized: SerializedFmtConfig = + serde_json::from_value(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "fmt", + source: error, + } + })?; + // top level excludes at the start because they're lower priority + exclude_patterns.extend(std::mem::take(&mut serialized.exclude)); + serialized.exclude = exclude_patterns; + serialized.into_resolved(&self.specifier).map_err(|error| { + ToInvalidConfigError::InvalidConfig { + config: "fmt", + source: error, + } + }) + } + None => Ok(FmtConfig { + options: Default::default(), + files: self.to_exclude_files_config()?, + }), + } + } + + pub fn to_lint_config(&self) -> Result { + match self.json.lint.clone() { + Some(config) => { + let mut exclude_patterns = self.resolve_exclude_patterns()?; + let mut serialized: SerializedLintConfig = + serde_json::from_value(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "lint", + source: error, + } + })?; + // top level excludes at the start because they're lower priority + exclude_patterns.extend(std::mem::take(&mut serialized.exclude)); + serialized.exclude = exclude_patterns; + serialized.into_resolved(&self.specifier).map_err(|error| { + ToInvalidConfigError::InvalidConfig { + config: "lint", + source: error, + } + }) + } + None => Ok(LintConfig { + options: Default::default(), + files: self.to_exclude_files_config()?, + }), + } + } + + pub fn to_test_config(&self) -> Result { + match self.json.test.clone() { + Some(config) => { + let mut exclude_patterns = self.resolve_exclude_patterns()?; + let mut serialized: SerializedTestConfig = + serde_json::from_value(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "test", + source: error, + } + })?; + // top level excludes at the start because they're lower priority + exclude_patterns.extend(std::mem::take(&mut serialized.exclude)); + serialized.exclude = exclude_patterns; + serialized.into_resolved(&self.specifier).map_err(|error| { + ToInvalidConfigError::InvalidConfig { + config: "test", + source: error, + } + }) + } + None => Ok(TestConfig { + files: self.to_exclude_files_config()?, + }), + } + } + + pub(crate) fn to_publish_config( + &self, + ) -> Result { + match self.json.publish.clone() { + Some(config) => { + let mut exclude_patterns = self.resolve_exclude_patterns()?; + let mut serialized: SerializedPublishConfig = + serde_json::from_value(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "publish", + source: error, + } + })?; + // top level excludes at the start because they're lower priority + exclude_patterns.extend(std::mem::take(&mut serialized.exclude)); + serialized.exclude = exclude_patterns; + serialized.into_resolved(&self.specifier).map_err(|error| { + ToInvalidConfigError::InvalidConfig { + config: "public", + source: error, + } + }) + } + None => Ok(PublishConfig { + files: self.to_exclude_files_config()?, + }), + } + } + + pub fn to_link_config( + &self, + ) -> Result>, LinkConfigParseError> { + match self + .json + .links + .clone() + .or(self.json.deprecated_patch.clone()) + { + Some(config) => match config { + Value::Null => Ok(None), + config => { + let members: Vec = + serde_json::from_value(config).map_err(LinkConfigParseError)?; + Ok(Some(members)) + } + }, + None => Ok(None), + } + } + + pub fn to_workspace_config( + &self, + ) -> Result, WorkspaceConfigParseError> { + match self.json.workspace.clone() { + Some(config) => match config { + Value::Null => Ok(None), + Value::Array(_) => { + let members: Vec = serde_json::from_value(config) + .map_err(WorkspaceConfigParseError)?; + Ok(Some(WorkspaceConfig { members })) + } + _ => { + let config: WorkspaceConfig = serde_json::from_value(config) + .map_err(WorkspaceConfigParseError)?; + Ok(Some(config)) + } + }, + None => Ok(None), + } + } + + pub fn to_license(&self) -> Option { + self.json.license.as_ref().and_then(|value| match value { + Value::String(license) if !license.trim().is_empty() => { + Some(license.trim().to_string()) + } + _ => None, + }) + } + + /// Return any tasks that are defined in the configuration file as a sequence + /// of JSON objects providing the name of the task and the arguments of the + /// task in a detail field. + pub fn to_lsp_tasks(&self) -> Option { + let value = self.json.tasks.clone()?; + let tasks: BTreeMap = serde_json::from_value(value).ok()?; + Some( + tasks + .into_iter() + .map(|(key, value)| { + json!({ + "name": key, + "detail": value, + }) + }) + .collect(), + ) + } + + pub fn to_tasks_config( + &self, + ) -> Result>, ToInvalidConfigError> + { + if let Some(config) = self.json.tasks.clone() { + let tasks_config: IndexMap = + TaskDefinition::deserialize_tasks(config).map_err(|error| { + ToInvalidConfigError::Parse { + config: "tasks", + source: error, + } + })?; + Ok(Some(tasks_config)) + } else { + Ok(None) + } + } + + pub fn to_compiler_option_types( + &self, + ) -> Result)>, CompilerOptionTypesDeserializeError> + { + let Some(compiler_options_value) = self.json.compiler_options.as_ref() + else { + return Ok(None); + }; + let Some(types) = compiler_options_value.get("types") else { + return Ok(None); + }; + let imports: Vec = + serde_json::from_value(types.clone()).map_err(|source| { + CompilerOptionTypesDeserializeError { + specifier: self.specifier.clone(), + source, + } + })?; + if !imports.is_empty() { + let referrer = self.specifier.clone(); + Ok(Some((referrer, imports))) + } else { + Ok(None) + } + } + + /// Based on the compiler options in the configuration file, return the + /// JSX import source configuration. + pub fn to_raw_jsx_compiler_options(&self) -> RawJsxCompilerOptions { + self + .json + .compiler_options + .as_ref() + .and_then(|compiler_options_value| { + serde_json::from_value::( + compiler_options_value.clone(), + ) + .ok() + }) + .unwrap_or_default() + } + + pub fn resolve_tasks_config( + &self, + ) -> Result, ResolveTaskConfigError> { + let maybe_tasks_config = self.to_tasks_config()?; + let tasks_config = maybe_tasks_config.unwrap_or_default(); + for key in tasks_config.keys() { + if key.is_empty() { + return Err(ResolveTaskConfigError::TaskNameEmpty); + } else if !key + .chars() + .all(|c| c.is_ascii_alphanumeric() || matches!(c, '_' | '-' | ':')) + { + return Err(ResolveTaskConfigError::TaskNameInvalidCharacter( + key.to_string(), + )); + } else if !key.chars().next().unwrap().is_ascii_alphabetic() { + return Err(ResolveTaskConfigError::TaskNameInvalidStartingCharacter( + key.to_string(), + )); + } + } + Ok(tasks_config) + } + + pub fn to_lock_config( + &self, + ) -> Result, ToLockConfigError> { + if let Some(config) = self.json.lock.clone() { + let mut lock_config: LockConfig = serde_json::from_value(config) + .map_err(|error| ToInvalidConfigError::Parse { + config: "lock", + source: error, + })?; + if let LockConfig::PathBuf(path) + | LockConfig::Object { + path: Some(path), .. + } = &mut lock_config + { + *path = url_to_file_path(&self.specifier)? + .parent() + .unwrap() + .join(&path); + } + Ok(Some(lock_config)) + } else { + Ok(None) + } + } + + pub fn resolve_lockfile_path( + &self, + ) -> Result, ToLockConfigError> { + match self.to_lock_config()? { + Some(LockConfig::Bool(lock)) if !lock => Ok(None), + Some(LockConfig::PathBuf(lock)) => Ok(Some(lock)), + Some(LockConfig::Object { path, .. }) if path.is_some() => Ok(path), + _ => { + let mut path = url_to_file_path(&self.specifier)?; + path.set_file_name("deno.lock"); + Ok(Some(path)) + } + } + } +} + +/// Represents the "default" type library that should be used when type +/// checking the code in the module graph. Note that a user provided config +/// of `"lib"` would override this value. +#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq)] +pub enum TsTypeLib { + DenoWindow, + DenoWorker, +} + +impl Default for TsTypeLib { + fn default() -> Self { + Self::DenoWindow + } +} + +impl Serialize for TsTypeLib { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let value = match self { + Self::DenoWindow => { + vec!["deno.window".to_string(), "deno.unstable".to_string()] + } + Self::DenoWorker => { + vec!["deno.worker".to_string(), "deno.unstable".to_string()] + } + }; + Serialize::serialize(&value, serializer) + } +} + +/// An enum that represents the base tsc configuration to return. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TsConfigType { + /// Return a configuration for bundling, using swc to emit the bundle. This is + /// independent of type checking. + Bundle, + /// Return a configuration to use tsc to type check. This + /// is independent of either bundling or emitting via swc. + Check { lib: TsTypeLib }, + /// Return a configuration to use swc to emit single module files. + Emit, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TsConfigWithIgnoredOptions { + pub ts_config: TsConfig, + pub ignored_options: Vec, +} + +/// For a given configuration type get the starting point TsConfig +/// used that can then be merged with user specified tsconfigs. +pub fn get_base_ts_config_for_emit(config_type: TsConfigType) -> TsConfig { + match config_type { + TsConfigType::Bundle => TsConfig::new(json!({ + "allowImportingTsExtensions": true, + "checkJs": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": true, + "importsNotUsedAsValues": "remove", + "inlineSourceMap": false, + "inlineSources": false, + "sourceMap": false, + "jsx": "react", + "jsxFactory": "React.createElement", + "jsxFragmentFactory": "React.Fragment", + "module": "NodeNext", + "moduleResolution": "NodeNext", + })), + TsConfigType::Check { lib } => TsConfig::new(json!({ + "allowJs": true, + "allowImportingTsExtensions": true, + "allowSyntheticDefaultImports": true, + "checkJs": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": false, + "incremental": true, + "jsx": "react", + "importsNotUsedAsValues": "remove", + "inlineSourceMap": true, + "inlineSources": true, + "isolatedModules": true, + "lib": lib, + "module": "NodeNext", + "moduleResolution": "NodeNext", + "moduleDetection": "force", + "noEmit": true, + "noImplicitOverride": true, + "resolveJsonModule": true, + "sourceMap": false, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "internal:///.tsbuildinfo", + "useDefineForClassFields": true, + })), + TsConfigType::Emit => TsConfig::new(json!({ + "allowImportingTsExtensions": true, + "checkJs": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": false, + "importsNotUsedAsValues": "remove", + "inlineSourceMap": true, + "inlineSources": true, + "sourceMap": false, + "jsx": "react", + "jsxFactory": "React.createElement", + "jsxFragmentFactory": "React.Fragment", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "resolveJsonModule": true, + })), + } +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use deno_path_util::url_to_file_path; + use pretty_assertions::assert_eq; + use sys_traits::impls::RealSys; + + use super::*; + use crate::glob::PathOrPattern; + + #[macro_export] + macro_rules! assert_contains { + ($string:expr, $($test:expr),+ $(,)?) => { + let string = &$string; // This might be a function call or something + if !($(string.contains($test))||+) { + panic!("{:?} does not contain any of {:?}", string, [$($test),+]); + } + } + } + + struct UnreachableSys; + + impl sys_traits::BaseFsRead for UnreachableSys { + fn base_fs_read( + &self, + _path: &Path, + ) -> std::io::Result> { + unreachable!() + } + } + + fn testdata_path() -> PathBuf { + PathBuf::from(concat!(env!("CARGO_MANIFEST_DIR"))).join("testdata") + } + + fn unpack_object( + result: Result, + name: &str, + ) -> T { + result + .unwrap_or_else(|err| panic!("error parsing {name} object but got {err}")) + } + + #[test] + fn read_config_file_absolute() { + let path = testdata_path().join("module_graph/tsconfig.json"); + let config_file = ConfigFile::read(&RealSys, path.as_path()).unwrap(); + assert!(config_file.json.compiler_options.is_some()); + } + + #[test] + fn include_config_path_on_error() { + let path = testdata_path().join("404.json"); + let error = ConfigFile::read(&RealSys, path.as_path()).err().unwrap(); + assert!(error.to_string().contains("404.json")); + } + + #[test] + fn test_parse_config() { + let config_text = r#"{ + "compilerOptions": { + "build": true, + // comments are allowed + "strict": true + }, + "lint": { + "include": ["src/"], + "exclude": ["src/testdata/"], + "rules": { + "tags": ["recommended"], + "include": ["ban-untagged-todo"] + } + }, + "fmt": { + "include": ["src/"], + "exclude": ["src/testdata/"], + "useTabs": true, + "lineWidth": 80, + "indentWidth": 4, + "singleQuote": true, + "proseWrap": "preserve", + "quoteProps": "asNeeded", + "newLineKind": "crlf", + "useBraces": "whenNotSingleLine", + "bracePosition": "sameLine", + "singleBodyPosition": "nextLine", + "nextControlFlowPosition": "sameLine", + "trailingCommas": "never", + "operatorPosition": "maintain", + "jsx.bracketPosition": "maintain", + "jsx.forceNewLinesSurroundingContent": true, + "jsx.multiLineParens": "never", + "typeLiteral.separatorKind": "semiColon", + "spaceAround": true, + "spaceSurroundingProperties": true + }, + "tasks": { + "build": "deno run --allow-read --allow-write build.ts", + "server": "deno run --allow-net --allow-read server.ts", + "client": { + "description": "Build client project", + "command": "deno run -A client.js", + "dependencies": ["build"] + } + }, + "unstable": ["kv", "ffi"] + }"#; + let config_dir = Url::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = + ConfigFile::new(config_text, config_specifier.clone()).unwrap(); + let ParsedTsConfigOptions { + options, + maybe_ignored, + } = config_file + .to_compiler_options() + .unwrap() + .unwrap_or_default(); + assert!(options.contains_key("strict")); + assert_eq!(options.len(), 1); + assert_eq!( + maybe_ignored, + Some(IgnoredCompilerOptions { + items: vec!["build".to_string()], + maybe_specifier: Some(config_specifier), + }), + ); + + let config_dir_path = url_to_file_path(&config_dir).unwrap(); + assert_eq!( + unpack_object(config_file.to_lint_config(), "lint"), + LintConfig { + files: FilePatterns { + base: config_dir_path.clone(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("/deno/src/") + )])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("/deno/src/testdata/") + )]), + }, + options: LintOptionsConfig { + rules: LintRulesConfig { + include: Some(vec!["ban-untagged-todo".to_string()]), + exclude: None, + tags: Some(vec!["recommended".to_string()]), + }, + plugins: vec![], + } + } + ); + assert_eq!( + unpack_object(config_file.to_fmt_config(), "fmt"), + FmtConfig { + files: FilePatterns { + base: config_dir_path.clone(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("/deno/src/") + )])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + PathBuf::from("/deno/src/testdata/") + )]), + }, + options: FmtOptionsConfig { + use_tabs: Some(true), + line_width: Some(80), + indent_width: Some(4), + single_quote: Some(true), + semi_colons: None, + prose_wrap: Some(ProseWrap::Preserve), + quote_props: Some(QuoteProps::AsNeeded), + new_line_kind: Some(NewLineKind::CarriageReturnLineFeed), + use_braces: Some(UseBraces::WhenNotSingleLine), + brace_position: Some(BracePosition::SameLine), + single_body_position: Some(SingleBodyPosition::NextLine), + next_control_flow_position: Some(NextControlFlowPosition::SameLine), + trailing_commas: Some(TrailingCommas::Never), + operator_position: Some(OperatorPosition::Maintain), + jsx_bracket_position: Some(BracketPosition::Maintain), + jsx_force_new_lines_surrounding_content: Some(true), + jsx_multi_line_parens: Some(MultiLineParens::Never), + type_literal_separator_kind: Some(SeparatorKind::SemiColon), + space_around: Some(true), + space_surrounding_properties: Some(true), + }, + } + ); + + let tasks_config = config_file.to_tasks_config().unwrap().unwrap(); + assert_eq!( + tasks_config["build"], + "deno run --allow-read --allow-write build.ts".into(), + ); + assert_eq!( + tasks_config["server"], + "deno run --allow-net --allow-read server.ts".into(), + ); + assert_eq!( + tasks_config["client"], + TaskDefinition { + description: Some("Build client project".to_string()), + command: Some("deno run -A client.js".to_string()), + dependencies: vec!["build".to_string()] + } + ); + + assert_eq!( + config_file.json.unstable, + vec!["kv".to_string(), "ffi".to_string()], + ) + } + + #[test] + fn test_parse_config_exclude_lower_priority_path() { + let config_text = r#"{ + "fmt": { + "exclude": ["!dist/data", "dist/"] + } + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + + let err = config_file.to_fmt_config().err().unwrap(); + assert_eq!(err.to_string(), "Invalid fmt config"); + assert_eq!( + std::error::Error::source(&err).unwrap().to_string(), + r#"Invalid exclude: The negation of '!dist/data' is never reached due to the higher priority 'dist/' exclude. Move '!dist/data' after 'dist/'."# + ); + } + + #[test] + fn test_parse_config_exclude_lower_priority_glob() { + let config_text = r#"{ + "lint": { + "exclude": ["!dist/data/**/*.ts", "dist/"] + } + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + + let err = config_file.to_lint_config().err().unwrap(); + assert_eq!(err.to_string(), "Invalid lint config"); + assert_eq!( + std::error::Error::source(&err).unwrap().to_string(), + r#"Invalid exclude: The negation of '!dist/data/**/*.ts' is never reached due to the higher priority 'dist/' exclude. Move '!dist/data/**/*.ts' after 'dist/'."# + ); + } + + #[test] + fn test_parse_config_with_deprecated_fmt_options() { + let config_text_both = r#"{ + "fmt": { + "options": { + "semiColons": true + }, + "semiColons": false + } + }"#; + let config_text_deprecated = r#"{ + "fmt": { + "options": { + "semiColons": true + } + } + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file_both = + ConfigFile::new(config_text_both, config_specifier.clone()).unwrap(); + let config_file_deprecated = + ConfigFile::new(config_text_deprecated, config_specifier).unwrap(); + + fn unpack_options(config_file: ConfigFile) -> FmtOptionsConfig { + unpack_object(config_file.to_fmt_config(), "fmt").options + } + + let fmt_options_both = unpack_options(config_file_both); + assert_eq!(fmt_options_both.semi_colons, Some(false)); + + let fmt_options_deprecated = unpack_options(config_file_deprecated); + assert_eq!(fmt_options_deprecated.semi_colons, Some(true)); + } + + #[test] + fn test_parse_config_with_empty_file() { + let config_text = ""; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + config_file.to_compiler_options().unwrap(); // no panic + } + + #[test] + fn test_parse_config_with_commented_file() { + let config_text = r#"//{"foo":"bar"}"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + config_file.to_compiler_options().unwrap(); // no panic + } + + #[test] + fn test_parse_config_with_global_files() { + let config_text = r#"{ + "exclude": ["foo/"], + "test": { + "exclude": ["npm/"], + }, + "bench": {} + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + + config_file.to_compiler_options().unwrap(); // no panic + + let test_config = config_file.to_test_config().unwrap(); + assert_eq!(test_config.files.include, None); + assert_eq!( + test_config.files.exclude, + PathOrPatternSet::from_absolute_paths(&[ + "/deno/foo/".to_string(), + "/deno/npm/".to_string(), + ]) + .unwrap() + ); + + let bench_config = config_file.to_bench_config().unwrap(); + assert_eq!( + bench_config.files.exclude, + PathOrPatternSet::from_absolute_paths(&["/deno/foo/".to_string()]) + .unwrap() + ); + } + + #[test] + fn test_parse_config_publish() { + let config_text = r#"{ + "exclude": ["foo/"], + "publish": { + "exclude": ["npm/"], + } + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + + config_file.to_compiler_options().unwrap(); // no panic + + let publish_config = config_file.to_publish_config().unwrap(); + assert_eq!(publish_config.files.include, None); + assert_eq!( + publish_config.files.exclude, + PathOrPatternSet::from_absolute_paths(&[ + "/deno/foo/".to_string(), + "/deno/npm/".to_string(), + ]) + .unwrap() + ); + } + + #[test] + fn test_parse_config_with_global_files_only() { + let config_text = r#"{ + "exclude": ["npm/"] + }"#; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + + config_file.to_compiler_options().unwrap(); // no panic + + let files_config = config_file.to_exclude_files_config().unwrap(); + assert_eq!(files_config.include, None); + assert_eq!( + files_config.exclude, + PathOrPatternSet::from_absolute_paths(&["/deno/npm/".to_string()]) + .unwrap() + ); + + let lint_config = config_file.to_lint_config().unwrap(); + assert_eq!(lint_config.files.include, None); + assert_eq!( + lint_config.files.exclude, + PathOrPatternSet::from_absolute_paths(&["/deno/npm/".to_string()]) + .unwrap() + ); + + let fmt_config = config_file.to_fmt_config().unwrap(); + assert_eq!(fmt_config.files.include, None); + assert_eq!( + fmt_config.files.exclude, + PathOrPatternSet::from_absolute_paths(&["/deno/npm/".to_string()]) + .unwrap() + ); + } + + #[test] + fn test_parse_config_with_invalid_file() { + let config_text = "{foo:bar}"; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + // Emit error: Unable to parse config file JSON "" because of Unexpected token on line 1 column 6. + assert!(ConfigFile::new(config_text, config_specifier,).is_err()); + } + + #[test] + fn test_parse_config_with_not_object_file() { + let config_text = "[]"; + let config_specifier = Url::parse("file:///deno/tsconfig.json").unwrap(); + // Emit error: config file JSON "" should be an object + assert!(ConfigFile::new(config_text, config_specifier,).is_err()); + } + + #[test] + fn task_name_invalid_chars() { + run_task_error_test( + r#"{ + "tasks": { + "build": "deno test", + "some%test": "deno bundle mod.ts" + } + }"#, + concat!( + "Configuration file task names must only contain alpha-numeric ", + "characters, colons (:), underscores (_), or dashes (-). Task: some%test", + ), + ); + } + + #[test] + fn task_name_non_alpha_starting_char() { + run_task_error_test( + r#"{ + "tasks": { + "build": "deno test", + "1test": "deno bundle mod.ts" + } + }"#, + concat!( + "Configuration file task names must start with an ", + "alphabetic character. Task: 1test", + ), + ); + } + + #[test] + fn task_name_empty() { + run_task_error_test( + r#"{ + "tasks": { + "build": "deno test", + "": "deno bundle mod.ts" + } + }"#, + "Configuration file task names cannot be empty", + ); + } + + #[track_caller] + fn run_task_error_test(config_text: &str, expected_error: &str) { + let config_dir = Url::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + assert_eq!( + config_file.resolve_tasks_config().unwrap_err().to_string(), + expected_error, + ); + } + + #[test] + fn files_pattern_matches_remote() { + assert!(FilePatterns::new_with_base(PathBuf::from("/")) + .matches_specifier(&Url::parse("https://example.com/mod.ts").unwrap())); + } + + #[test] + fn resolve_lockfile_path_from_unix_path() { + let config_file = + ConfigFile::new("{}", Url::parse("file:///root/deno.json").unwrap()) + .unwrap(); + let lockfile_path = config_file.resolve_lockfile_path().unwrap(); + let lockfile_path = lockfile_path.unwrap(); + assert_eq!(lockfile_path, PathBuf::from("/root/deno.lock")); + } + + #[test] + fn exports() { + fn get_exports(config_text: &str) -> ExportsConfig { + let config_dir = Url::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + config_file.to_exports_config().unwrap() + } + + // no exports + assert_eq!( + get_exports("{}").into_map(), + IndexMap::::new() + ); + // string export + assert_eq!( + get_exports(r#"{ "exports": "./mod.ts" }"#).into_map(), + IndexMap::from([(".".to_string(), "./mod.ts".to_string())]) + ); + // map export + assert_eq!( + get_exports(r#"{ "exports": { "./export": "./mod.ts" } }"#).into_map(), + IndexMap::from([("./export".to_string(), "./mod.ts".to_string())]) + ); + // resolve an export + let exports = get_exports(r#"{ "exports": { "./export": "./mod.ts" } }"#); + assert_eq!( + exports + .get_resolved("./export") + .unwrap() + .unwrap() + .to_string(), + "file:///deno/mod.ts" + ); + assert!(exports.get_resolved("./non-existent").unwrap().is_none()); + } + + #[test] + fn exports_errors() { + #[track_caller] + fn run_test(config_text: &str, expected_error: &str) { + let config_dir = Url::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + assert_eq!( + config_file.to_exports_config().unwrap_err().to_string(), + expected_error, + ); + } + + // empty key + run_test( + r#"{ "exports": { "": "./mod.ts" } }"#, + "The '' export must not be empty. Use '.' if you meant the root export.", + ); + // no ./ at start of key + run_test( + r#"{ "exports": { "mod": "./mod.ts" } }"#, + "The 'mod' export must start with './'. Did you mean './mod'?", + ); + // trailing slash in key + run_test( + r#"{ "exports": { "./mod/": "./mod.ts" } }"#, + "The './mod/' export must not end with '/'. Did you mean './mod'?", + ); + // multiple trailing slash in key + run_test( + r#"{ "exports": { "./mod//": "./mod.ts" } }"#, + "The './mod//' export must not end with '/'. Did you mean './mod'?", + ); + // unsupported characters in key + run_test( + r#"{ "exports": { "./mod*": "./mod.ts" } }"#, + "The './mod*' export must only contain alphanumeric characters, underscores (_), dashes (-), dots (.), and slashes (/).", + ); + // double slash in key + run_test( + r#"{ "exports": { "./mod//bar": "./mod.ts" } }"#, + "The './mod//bar' export must not contain double slashes (//), or parts consisting entirely of dots (.).", + ); + // . part in key + run_test( + r#"{ "exports": { "././mod": "./mod.ts" } }"#, + "The '././mod' export must not contain double slashes (//), or parts consisting entirely of dots (.).", + ); + // .. part in key + run_test( + r#"{ "exports": { "./../mod": "./mod.ts" } }"#, + "The './../mod' export must not contain double slashes (//), or parts consisting entirely of dots (.).", + ); + // ...... part in key + run_test( + r#"{ "exports": { "./....../mod": "./mod.ts" } }"#, + "The './....../mod' export must not contain double slashes (//), or parts consisting entirely of dots (.).", + ); + + // empty value + run_test( + r#"{ "exports": { "./mod": "" } }"#, + "The path for the './mod' export must not be empty.", + ); + // value without ./ at start + run_test( + r#"{ "exports": { "./mod": "mod.ts" } }"#, + "The path 'mod.ts' at the './mod' export could not be resolved as a relative path from the config file. Did you mean './mod.ts'?", + ); + // value with a trailing slash + run_test( + r#"{ "exports": { "./mod": "./folder/" } }"#, + "The path './folder/' at the './mod' export must not end with '/'. Did you mean './folder'?", + ); + // value without an extension + run_test( + r#"{ "exports": { "./mod": "./folder" } }"#, + "The path './folder' at the './mod' export is missing a file extension. Add a file extension such as '.js' or '.ts'.", + ); + // boolean key value + run_test( + r#"{ "exports": { "./mod": true } }"#, + "The path of the './mod' export must be a string, found invalid value 'true'.", + ); + // object key value + run_test( + r#"{ "exports": { "./mod": {} } }"#, + "The path of the './mod' export must be a string, found invalid value '{}'. Exports in deno.json do not support conditional exports.", + ); + // non-map or string value + run_test( + r#"{ "exports": [] }"#, + "The 'exports' key must be a string or object, found invalid value '[]'.", + ); + // null + run_test( + r#"{ "exports": { "./mod": null } }"#, + "The path of the './mod' export must be a string, found invalid value 'null'.", + ); + } + + #[test] + fn resolve_export_value_urls() { + fn get_exports(config_text: &str) -> Vec { + let config_dir = Url::parse("file:///deno/").unwrap(); + let config_specifier = config_dir.join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + config_file + .resolve_export_value_urls() + .unwrap() + .into_iter() + .map(|u| u.to_string()) + .collect() + } + + // no exports + assert_eq!(get_exports("{}"), Vec::::new()); + // string export + assert_eq!( + get_exports(r#"{ "exports": "./mod.ts" }"#), + vec!["file:///deno/mod.ts".to_string()] + ); + // map export + assert_eq!( + get_exports(r#"{ "exports": { "./export": "./mod.ts" } }"#), + vec!["file:///deno/mod.ts".to_string()] + ); + // multiple + assert_eq!( + get_exports( + r#"{ "exports": { "./export": "./mod.ts", "./other": "./other.ts" } }"# + ), + vec![ + "file:///deno/mod.ts".to_string(), + "file:///deno/other.ts".to_string(), + ] + ); + } + + #[test] + fn test_is_package() { + fn get_for_config(config_text: &str) -> bool { + let config_specifier = root_url().join("tsconfig.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + config_file.is_package() + } + + assert!(!get_for_config("{}")); + assert!(!get_for_config( + r#"{ + "name": "test" + }"# + )); + assert!(!get_for_config( + r#"{ + "name": "test", + "version": "1.0.0" + }"# + )); + assert!(get_for_config( + r#"{ + "name": "test", + "exports": "./mod.ts" + }"# + )); + assert!(!get_for_config( + r#"{ + "version": "1.0.0", + "exports": "./mod.ts" + }"# + )); + assert!(get_for_config( + r#"{ + "name": "test", + "version": "1.0.0", + "exports": "./mod.ts" + }"# + )); + } + + #[test] + fn test_to_import_map_from_imports() { + let config_text = r#"{ + "imports": { + "@std/test": "jsr:@std/test@0.2.0" + } + }"#; + let config_specifier = root_url().join("deno.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + let result = config_file.to_import_map_from_imports().unwrap(); + + assert_eq!( + json!(result.import_map.imports()), + // imports should be expanded + json!({ + "@std/test/": "jsr:/@std/test@0.2.0/", + "@std/test": "jsr:@std/test@0.2.0", + }) + ); + } + + #[test] + fn test_to_import_map_imports_entry() { + let config_text = r#"{ + "imports": { "@std/test": "jsr:@std/test@0.2.0" }, + // will be ignored because imports and scopes takes precedence + "importMap": "import_map.json", + }"#; + let config_specifier = root_url().join("deno.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + let result = config_file.to_import_map(&UnreachableSys).unwrap().unwrap(); + + assert_eq!( + result.import_map.base_url(), + &root_url().join("deno.json").unwrap() + ); + assert_eq!( + json!(result.import_map.imports()), + // imports should be expanded + json!({ + "@std/test/": "jsr:/@std/test@0.2.0/", + "@std/test": "jsr:@std/test@0.2.0", + }) + ); + } + + #[test] + fn test_to_import_map_scopes_entry() { + let config_text = r#"{ + "scopes": { "https://deno.land/x/test/mod.ts": { "@std/test": "jsr:@std/test@0.2.0" } }, + // will be ignored because imports and scopes takes precedence + "importMap": "import_map.json", + }"#; + let config_specifier = root_url().join("deno.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + let result = config_file.to_import_map(&UnreachableSys).unwrap().unwrap(); + + assert_eq!( + result.import_map.base_url(), + &root_url().join("deno.json").unwrap() + ); + assert_eq!( + json!(result.import_map), + // imports should be expanded + json!({ + "imports": {}, + "scopes": { + "https://deno.land/x/test/mod.ts": { + "@std/test/": "jsr:/@std/test@0.2.0/", + "@std/test": "jsr:@std/test@0.2.0", + } + } + }) + ); + } + + #[test] + fn test_to_import_map_import_map_entry() { + struct MockFs; + + impl sys_traits::BaseFsRead for MockFs { + fn base_fs_read( + &self, + path: &Path, + ) -> std::io::Result> { + assert_eq!( + path, + root_url().to_file_path().unwrap().join("import_map.json") + ); + Ok(Cow::Borrowed( + r#"{ "imports": { "@std/test": "jsr:@std/test@0.2.0" } }"#.as_bytes(), + )) + } + } + + let config_text = r#"{ + "importMap": "import_map.json", + }"#; + let config_specifier = root_url().join("deno.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + let result = config_file.to_import_map(&MockFs).unwrap().unwrap(); + + assert_eq!( + result.import_map.base_url(), + &root_url().join("import_map.json").unwrap() + ); + assert_eq!( + json!(result.import_map.imports()), + // imports should NOT be expanded + json!({ + "@std/test": "jsr:@std/test@0.2.0", + }) + ); + } + + #[test] + fn test_to_import_map_import_map_remote() { + let config_text = r#"{ + "importMap": "https://deno.land/import_map.json", + }"#; + let config_specifier = root_url().join("deno.json").unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + let err = config_file.to_import_map(&UnreachableSys).unwrap_err(); + assert_eq!( + err.to_string(), + concat!( + "Only file: specifiers are supported for security reasons in ", + "import maps stored in a deno.json. To use a remote import map, ", + "use the --import-map flag and \"deno.importMap\" in the ", + "language server config" + ) + ); + } + + fn root_url() -> Url { + if cfg!(windows) { + Url::parse("file://C:/deno/").unwrap() + } else { + Url::parse("file:///deno/").unwrap() + } + } + + #[test] + fn task_comments() { + let config_text = r#"{ + "tasks": { + // dev task + "dev": "deno run -A --watch mod.ts", + // run task + // with multiple line comments + "run": "deno run -A mod.ts", // comments not supported here + /* + * test task + * + * with multi-line comments + */ + "test": "deno test", + /* we should */ /* ignore these */ "fmt": "deno fmt", + "lint": "deno lint" + // trailing comments + }, + }"#; + + let config = + ConfigFile::new(config_text, root_url().join("deno.jsonc").unwrap()) + .unwrap(); + assert_eq!( + config.resolve_tasks_config().unwrap(), + IndexMap::from([ + ("dev".into(), "deno run -A --watch mod.ts".into(),), + ("run".into(), "deno run -A mod.ts".into(),), + ("test".into(), "deno test".into(),), + ("fmt".into(), "deno fmt".into(),), + ("lint".into(), "deno lint".into(),) + ]) + ); + } + + #[test] + fn resolve_import_map_url_parent() { + let config_text = r#"{ "importMap": "../import_map.json" }"#; + let file_path = root_url() + .join("sub/deno.json") + .unwrap() + .to_file_path() + .unwrap(); + let config_specifier = Url::from_file_path(&file_path).unwrap(); + let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); + assert_eq!( + config_file.to_import_map_path().unwrap().unwrap(), + file_path + .parent() + .unwrap() + .parent() + .unwrap() + .join("import_map.json"), + ); + } + + #[test] + fn lock_object() { + fn root_joined(path: &str) -> PathBuf { + root_url().join(path).unwrap().to_file_path().unwrap() + } + let cases = [ + ( + r#"{ "lock": { "path": "mydeno.lock", "frozen": true } }"#, + (true, root_joined("mydeno.lock")), + ), + ( + r#"{ "lock": { "frozen": false } }"#, + (false, root_joined("deno.lock")), + ), + ( + r#"{ "lock": { "path": "mydeno.lock" } }"#, + (false, root_joined("mydeno.lock")), + ), + (r#"{ "lock": {} }"#, (false, root_joined("deno.lock"))), + ]; + for (config_text, (frozen, resolved_path)) in cases { + let config_file = + ConfigFile::new(config_text, root_url().join("deno.json").unwrap()) + .unwrap(); + let lock_config = config_file.to_lock_config().unwrap().unwrap(); + assert_eq!( + config_file.resolve_lockfile_path().unwrap().unwrap(), + resolved_path, + ); + assert_eq!(lock_config.frozen(), frozen); + } + } + + #[test] + fn node_modules_dir_mode() { + let cases = [ + (json!("auto"), Ok(NodeModulesDirMode::Auto)), + (json!("manual"), Ok(NodeModulesDirMode::Manual)), + (json!("none"), Ok(NodeModulesDirMode::None)), + (json!(true), Ok(NodeModulesDirMode::Auto)), + (json!(false), Ok(NodeModulesDirMode::None)), + (json!("other"), Err(r#"invalid value: string "other", expected "auto", "manual", or "none""#.into())) + ]; + + for (input, expected) in cases { + assert_eq!( + NodeModulesDirMode::deserialize(input).map_err(|e| e.to_string()), + expected + ); + } + } +} diff --git a/libs/config/deno_json/ts.rs b/libs/config/deno_json/ts.rs new file mode 100644 index 0000000000..d87eaa4b30 --- /dev/null +++ b/libs/config/deno_json/ts.rs @@ -0,0 +1,229 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::fmt; + +use serde::Deserialize; +use serde::Serialize; +use serde::Serializer; +use serde_json::Value; +use url::Url; + +#[derive(Debug, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct RawJsxCompilerOptions { + pub jsx: Option, + pub jsx_import_source: Option, + pub jsx_import_source_types: Option, +} + +/// The transpile options that are significant out of a user provided tsconfig +/// file, that we want to deserialize out of the final config for a transpile. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EmitConfigOptions { + pub check_js: bool, + pub experimental_decorators: bool, + pub emit_decorator_metadata: bool, + pub imports_not_used_as_values: String, + pub inline_source_map: bool, + pub inline_sources: bool, + pub source_map: bool, + pub jsx: String, + pub jsx_factory: String, + pub jsx_fragment_factory: String, + pub jsx_import_source: Option, + pub jsx_precompile_skip_elements: Option>, +} + +/// A structure that represents a set of options that were ignored and the +/// path those options came from. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct IgnoredCompilerOptions { + pub items: Vec, + pub maybe_specifier: Option, +} + +impl fmt::Display for IgnoredCompilerOptions { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut codes = self.items.clone(); + codes.sort_unstable(); + if let Some(specifier) = &self.maybe_specifier { + write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", specifier, codes.join(", ")) + } else { + write!(f, "Unsupported compiler options provided.\n The following options were ignored:\n {}", codes.join(", ")) + } + } +} + +impl Serialize for IgnoredCompilerOptions { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + Serialize::serialize(&self.items, serializer) + } +} + +/// A set of all the compiler options that should be allowed; +static ALLOWED_COMPILER_OPTIONS: phf::Set<&'static str> = phf::phf_set! { + "allowUnreachableCode", + "allowUnusedLabels", + "checkJs", + "erasableSyntaxOnly", + "emitDecoratorMetadata", + "exactOptionalPropertyTypes", + "experimentalDecorators", + "isolatedDeclarations", + "jsx", + "jsxFactory", + "jsxFragmentFactory", + "jsxImportSource", + "jsxPrecompileSkipElements", + "lib", + "noErrorTruncation", + "noFallthroughCasesInSwitch", + "noImplicitAny", + "noImplicitOverride", + "noImplicitReturns", + "noImplicitThis", + "noPropertyAccessFromIndexSignature", + "noUncheckedIndexedAccess", + "noUnusedLocals", + "noUnusedParameters", + "rootDirs", + "strict", + "strictBindCallApply", + "strictBuiltinIteratorReturn", + "strictFunctionTypes", + "strictNullChecks", + "strictPropertyInitialization", + "types", + "useUnknownInCatchVariables", + "verbatimModuleSyntax", +}; + +#[derive(Debug, Default, Clone)] +pub struct ParsedTsConfigOptions { + pub options: serde_json::Map, + pub maybe_ignored: Option, +} + +pub fn parse_compiler_options( + compiler_options: serde_json::Map, + maybe_specifier: Option<&Url>, +) -> ParsedTsConfigOptions { + let mut allowed: serde_json::Map = + serde_json::Map::with_capacity(compiler_options.len()); + let mut ignored: Vec = Vec::new(); // don't pre-allocate because it's rare + + for (key, value) in compiler_options { + // We don't pass "types" entries to typescript via the compiler + // options and instead provide those to tsc as "roots". This is + // because our "types" behavior is at odds with how TypeScript's + // "types" works. + // We also don't pass "jsxImportSourceTypes" to TypeScript as it doesn't + // know about this option. It will still take this option into account + // because the graph resolves the JSX import source to the types for TSC. + if key != "types" && key != "jsxImportSourceTypes" { + if ALLOWED_COMPILER_OPTIONS.contains(key.as_str()) { + allowed.insert(key, value.to_owned()); + } else { + ignored.push(key); + } + } + } + let maybe_ignored = if !ignored.is_empty() { + Some(IgnoredCompilerOptions { + items: ignored, + maybe_specifier: maybe_specifier.cloned(), + }) + } else { + None + }; + + ParsedTsConfigOptions { + options: allowed, + maybe_ignored, + } +} + +/// A structure for managing the configuration of TypeScript +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TsConfig(pub Value); + +impl Default for TsConfig { + fn default() -> Self { + Self(serde_json::Value::Object(Default::default())) + } +} + +impl TsConfig { + /// Create a new `TsConfig` with the base being the `value` supplied. + pub fn new(value: Value) -> Self { + TsConfig(value) + } + + pub fn merge_mut(&mut self, value: TsConfig) { + json_merge(&mut self.0, value.0); + } + + /// Merge a serde_json value into the configuration. + pub fn merge_object_mut( + &mut self, + value: serde_json::Map, + ) { + json_merge(&mut self.0, serde_json::Value::Object(value)); + } +} + +impl Serialize for TsConfig { + /// Serializes inner hash map which is ordered by the key + fn serialize(&self, serializer: S) -> std::result::Result + where + S: Serializer, + { + Serialize::serialize(&self.0, serializer) + } +} + +/// A function that works like JavaScript's `Object.assign()`. +fn json_merge(a: &mut Value, b: Value) { + match (a, b) { + (&mut Value::Object(ref mut a), Value::Object(b)) => { + for (k, v) in b { + json_merge(a.entry(k).or_insert(Value::Null), v); + } + } + (a, b) => { + *a = b; + } + } +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + + #[test] + fn test_json_merge() { + let mut value_a = json!({ + "a": true, + "b": "c" + }); + let value_b = json!({ + "b": "d", + "e": false, + }); + json_merge(&mut value_a, value_b); + assert_eq!( + value_a, + json!({ + "a": true, + "b": "d", + "e": false, + }) + ); + } +} diff --git a/libs/config/glob/collector.rs b/libs/config/glob/collector.rs new file mode 100644 index 0000000000..23dd173300 --- /dev/null +++ b/libs/config/glob/collector.rs @@ -0,0 +1,374 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::collections::HashSet; +use std::collections::VecDeque; +use std::path::Path; +use std::path::PathBuf; + +use deno_path_util::normalize_path; +use sys_traits::FsDirEntry; +use sys_traits::FsMetadata; +use sys_traits::FsMetadataValue; +use sys_traits::FsRead; +use sys_traits::FsReadDir; + +use super::FilePatterns; +use crate::glob::gitignore::DirGitIgnores; +use crate::glob::gitignore::GitIgnoreTree; +use crate::glob::FilePatternsMatch; +use crate::glob::PathKind; +use crate::glob::PathOrPattern; + +#[derive(Debug, Clone)] +pub struct WalkEntry<'a> { + pub path: &'a Path, + pub metadata: &'a dyn FsMetadataValue, + pub patterns: &'a FilePatterns, +} + +/// Collects file paths that satisfy the given predicate, by recursively walking `files`. +/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory. +pub struct FileCollector bool> { + file_filter: TFilter, + ignore_git_folder: bool, + ignore_node_modules: bool, + vendor_folder: Option, + use_gitignore: bool, +} + +impl bool> FileCollector { + pub fn new(file_filter: TFilter) -> Self { + Self { + file_filter, + ignore_git_folder: false, + ignore_node_modules: false, + vendor_folder: None, + use_gitignore: false, + } + } + + pub fn ignore_node_modules(mut self) -> Self { + self.ignore_node_modules = true; + self + } + + pub fn set_vendor_folder(mut self, vendor_folder: Option) -> Self { + self.vendor_folder = vendor_folder; + self + } + + pub fn ignore_git_folder(mut self) -> Self { + self.ignore_git_folder = true; + self + } + + pub fn use_gitignore(mut self) -> Self { + self.use_gitignore = true; + self + } + + pub fn collect_file_patterns( + &self, + sys: &TSys, + file_patterns: FilePatterns, + ) -> Vec { + fn is_pattern_matched( + maybe_git_ignore: Option<&DirGitIgnores>, + path: &Path, + is_dir: bool, + file_patterns: &FilePatterns, + ) -> bool { + let path_kind = match is_dir { + true => PathKind::Directory, + false => PathKind::File, + }; + match file_patterns.matches_path_detail(path, path_kind) { + FilePatternsMatch::Passed => { + // check gitignore + let is_gitignored = maybe_git_ignore + .as_ref() + .map(|git_ignore| git_ignore.is_ignored(path, is_dir)) + .unwrap_or(false); + !is_gitignored + } + FilePatternsMatch::PassedOptedOutExclude => true, + FilePatternsMatch::Excluded => false, + } + } + + let mut maybe_git_ignores = if self.use_gitignore { + // Override explicitly specified include paths in the + // .gitignore file. This does not apply to globs because + // that is way too complicated to reason about. + let include_paths = file_patterns + .include + .as_ref() + .map(|include| { + include + .inner() + .iter() + .filter_map(|path_or_pattern| { + if let PathOrPattern::Path(p) = path_or_pattern { + Some(p.clone()) + } else { + None + } + }) + .collect::>() + }) + .unwrap_or_default(); + Some(GitIgnoreTree::new(sys, include_paths)) + } else { + None + }; + let mut target_files = Vec::new(); + let mut visited_paths: HashSet = HashSet::default(); + let file_patterns_by_base = file_patterns.split_by_base(); + for file_patterns in file_patterns_by_base { + let specified_path = normalize_path(&file_patterns.base); + let mut pending_dirs = VecDeque::new(); + let mut handle_entry = + |path: PathBuf, + metadata: &dyn FsMetadataValue, + pending_dirs: &mut VecDeque| { + let maybe_gitignore = + maybe_git_ignores.as_mut().and_then(|git_ignores| { + if metadata.file_type().is_dir() { + git_ignores.get_resolved_git_ignore_for_dir(&path) + } else { + git_ignores.get_resolved_git_ignore_for_file(&path) + } + }); + if !is_pattern_matched( + maybe_gitignore.as_deref(), + &path, + metadata.file_type().is_dir(), + &file_patterns, + ) { + // ignore + } else if metadata.file_type().is_dir() { + // allow the user to opt out of ignoring by explicitly specifying the dir + let opt_out_ignore = specified_path == path; + let should_ignore_dir = + !opt_out_ignore && self.is_ignored_dir(&path); + if !should_ignore_dir && visited_paths.insert(path.clone()) { + pending_dirs.push_back(path); + } + } else if (self.file_filter)(WalkEntry { + path: &path, + metadata, + patterns: &file_patterns, + }) && visited_paths.insert(path.clone()) + { + target_files.push(path); + } + }; + + if let Ok(metadata) = sys.fs_metadata(&specified_path) { + handle_entry(specified_path.clone(), &metadata, &mut pending_dirs); + } + + // use an iterator in order to minimize the number of file system operations + while let Some(next_dir) = pending_dirs.pop_front() { + let Ok(entries) = sys.fs_read_dir(&next_dir) else { + continue; + }; + for entry in entries { + let Ok(entry) = entry else { + continue; + }; + let Ok(metadata) = entry.metadata() else { + continue; + }; + handle_entry(entry.path().into_owned(), &metadata, &mut pending_dirs) + } + } + } + target_files + } + + fn is_ignored_dir(&self, path: &Path) -> bool { + path + .file_name() + .map(|dir_name| { + let dir_name = dir_name.to_string_lossy().to_lowercase(); + let is_ignored_file = match dir_name.as_str() { + "node_modules" => self.ignore_node_modules, + ".git" => self.ignore_git_folder, + _ => false, + }; + is_ignored_file + }) + .unwrap_or(false) + || self.is_vendor_folder(path) + } + + fn is_vendor_folder(&self, path: &Path) -> bool { + self + .vendor_folder + .as_ref() + .map(|vendor_folder| path == *vendor_folder) + .unwrap_or(false) + } +} + +#[cfg(test)] +mod test { + use std::path::PathBuf; + + use sys_traits::impls::RealSys; + use tempfile::TempDir; + + use super::*; + use crate::glob::FilePatterns; + use crate::glob::PathOrPattern; + use crate::glob::PathOrPatternSet; + + #[allow(clippy::disallowed_methods)] // allow fs methods + #[test] + fn test_collect_files() { + fn create_files(dir_path: &PathBuf, files: &[&str]) { + std::fs::create_dir_all(dir_path).unwrap(); + for f in files { + std::fs::write(dir_path.join(f), "").unwrap(); + } + } + + // dir.ts + // ├── a.ts + // ├── b.js + // ├── child + // | ├── git + // | | └── git.js + // | ├── node_modules + // | | └── node_modules.js + // | ├── vendor + // | | └── vendor.js + // │ ├── e.mjs + // │ ├── f.mjsx + // │ ├── .foo.TS + // │ └── README.md + // ├── c.tsx + // ├── d.jsx + // └── ignore + // ├── g.d.ts + // └── .gitignore + + let t = TempDir::new().unwrap(); + + let root_dir_path = t.path().join("dir.ts"); + let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"]; + create_files(&root_dir_path, &root_dir_files); + + let child_dir_path = root_dir_path.join("child"); + let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"]; + create_files(&child_dir_path, &child_dir_files); + + std::fs::create_dir_all(t.path().join("dir.ts/child/node_modules")) + .unwrap(); + std::fs::write( + t.path().join("dir.ts/child/node_modules/node_modules.js"), + "", + ) + .unwrap(); + std::fs::create_dir_all(t.path().join("dir.ts/child/.git")).unwrap(); + std::fs::write(t.path().join("dir.ts/child/.git/git.js"), "").unwrap(); + std::fs::create_dir_all(t.path().join("dir.ts/child/vendor")).unwrap(); + std::fs::write(t.path().join("dir.ts/child/vendor/vendor.js"), "").unwrap(); + + let ignore_dir_path = root_dir_path.join("ignore"); + let ignore_dir_files = ["g.d.ts", ".gitignore"]; + create_files(&ignore_dir_path, &ignore_dir_files); + + let file_patterns = FilePatterns { + base: root_dir_path.to_path_buf(), + include: None, + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + ignore_dir_path.to_path_buf(), + )]), + }; + let file_collector = FileCollector::new(|e| { + // exclude dotfiles + e.path + .file_name() + .and_then(|f| f.to_str()) + .map(|f| !f.starts_with('.')) + .unwrap_or(false) + }); + + let result = + file_collector.collect_file_patterns(&RealSys, file_patterns.clone()); + let expected = [ + "README.md", + "a.ts", + "b.js", + "c.tsx", + "d.jsx", + "e.mjs", + "f.mjsx", + "git.js", + "node_modules.js", + "vendor.js", + ]; + let mut file_names = result + .into_iter() + .map(|r| r.file_name().unwrap().to_string_lossy().to_string()) + .collect::>(); + file_names.sort(); + assert_eq!(file_names, expected); + + // test ignoring the .git and node_modules folder + let file_collector = file_collector + .ignore_git_folder() + .ignore_node_modules() + .set_vendor_folder(Some(child_dir_path.join("vendor").to_path_buf())); + let result = + file_collector.collect_file_patterns(&RealSys, file_patterns.clone()); + let expected = [ + "README.md", + "a.ts", + "b.js", + "c.tsx", + "d.jsx", + "e.mjs", + "f.mjsx", + ]; + let mut file_names = result + .into_iter() + .map(|r| r.file_name().unwrap().to_string_lossy().to_string()) + .collect::>(); + file_names.sort(); + assert_eq!(file_names, expected); + + // test opting out of ignoring by specifying the dir + let file_patterns = FilePatterns { + base: root_dir_path.to_path_buf(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir_path.to_path_buf()), + PathOrPattern::Path( + root_dir_path.to_path_buf().join("child/node_modules/"), + ), + ])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + ignore_dir_path.to_path_buf(), + )]), + }; + let result = file_collector.collect_file_patterns(&RealSys, file_patterns); + let expected = [ + "README.md", + "a.ts", + "b.js", + "c.tsx", + "d.jsx", + "e.mjs", + "f.mjsx", + "node_modules.js", + ]; + let mut file_names = result + .into_iter() + .map(|r| r.file_name().unwrap().to_string_lossy().to_string()) + .collect::>(); + file_names.sort(); + assert_eq!(file_names, expected); + } +} diff --git a/libs/config/glob/gitignore.rs b/libs/config/glob/gitignore.rs new file mode 100644 index 0000000000..dfc6a91da5 --- /dev/null +++ b/libs/config/glob/gitignore.rs @@ -0,0 +1,180 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::collections::HashMap; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; + +use sys_traits::FsMetadata; +use sys_traits::FsRead; + +/// Resolved gitignore for a directory. +pub struct DirGitIgnores { + current: Option>, + parent: Option>, +} + +impl DirGitIgnores { + pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool { + let mut is_ignored = false; + if let Some(parent) = &self.parent { + is_ignored = parent.is_ignored(path, is_dir); + } + if let Some(current) = &self.current { + match current.matched(path, is_dir) { + ignore::Match::None => {} + ignore::Match::Ignore(_) => { + is_ignored = true; + } + ignore::Match::Whitelist(_) => { + is_ignored = false; + } + } + } + is_ignored + } +} + +/// Resolves gitignores in a directory tree taking into account +/// ancestor gitignores that may be found in a directory. +pub struct GitIgnoreTree<'a, Sys: FsRead + FsMetadata> { + sys: &'a Sys, + ignores: HashMap>>, + include_paths: Vec, +} + +impl<'a, Sys: FsRead + FsMetadata> GitIgnoreTree<'a, Sys> { + pub fn new( + sys: &'a Sys, + // paths that should override what's in the gitignore + include_paths: Vec, + ) -> Self { + Self { + sys, + ignores: Default::default(), + include_paths, + } + } + + pub fn get_resolved_git_ignore_for_dir( + &mut self, + dir_path: &Path, + ) -> Option> { + // for directories, provide itself in order to tell + // if it should stop searching for gitignores because + // maybe this dir_path is a .git directory + let parent = dir_path.parent()?; + self.get_resolved_git_ignore_inner(parent, Some(dir_path)) + } + + pub fn get_resolved_git_ignore_for_file( + &mut self, + file_path: &Path, + ) -> Option> { + let dir_path = file_path.parent()?; + self.get_resolved_git_ignore_inner(dir_path, None) + } + + fn get_resolved_git_ignore_inner( + &mut self, + dir_path: &Path, + maybe_parent: Option<&Path>, + ) -> Option> { + let maybe_resolved = self.ignores.get(dir_path).cloned(); + if let Some(resolved) = maybe_resolved { + resolved + } else { + let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent); + self.ignores.insert(dir_path.to_owned(), resolved.clone()); + resolved + } + } + + fn resolve_gitignore_in_dir( + &mut self, + dir_path: &Path, + maybe_parent: Option<&Path>, + ) -> Option> { + if let Some(parent) = maybe_parent { + // stop searching if the parent dir had a .git directory in it + if self.sys.fs_exists_no_err(parent.join(".git")) { + return None; + } + } + + let parent = dir_path.parent().and_then(|parent| { + self.get_resolved_git_ignore_inner(parent, Some(dir_path)) + }); + let current = self + .sys + .fs_read_to_string_lossy(dir_path.join(".gitignore")) + .ok() + .and_then(|text| { + let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path); + for line in text.lines() { + builder.add_line(None, line).ok()?; + } + // override the gitignore contents to include these paths + for path in &self.include_paths { + if let Ok(suffix) = path.strip_prefix(dir_path) { + let suffix = suffix.to_string_lossy().replace('\\', "/"); + let _ignore = builder.add_line(None, &format!("!/{}", suffix)); + if !suffix.ends_with('/') { + let _ignore = builder.add_line(None, &format!("!/{}/", suffix)); + } + } + } + let gitignore = builder.build().ok()?; + Some(Rc::new(gitignore)) + }); + if parent.is_none() && current.is_none() { + None + } else { + Some(Rc::new(DirGitIgnores { current, parent })) + } + } +} + +#[cfg(test)] +mod test { + use sys_traits::impls::InMemorySys; + use sys_traits::FsCreateDirAll; + use sys_traits::FsWrite; + + use super::*; + + #[test] + fn git_ignore_tree() { + let sys = InMemorySys::default(); + sys.fs_create_dir_all("/sub_dir/sub_dir").unwrap(); + sys.fs_write("/.gitignore", "file.txt").unwrap(); + sys.fs_write("/sub_dir/.gitignore", "data.txt").unwrap(); + sys + .fs_write("/sub_dir/sub_dir/.gitignore", "!file.txt\nignore.txt") + .unwrap(); + let mut ignore_tree = GitIgnoreTree::new(&sys, Vec::new()); + let mut run_test = |path: &str, expected: bool| { + let path = PathBuf::from(path); + let gitignore = + ignore_tree.get_resolved_git_ignore_for_file(&path).unwrap(); + assert_eq!( + gitignore.is_ignored(&path, /* is_dir */ false), + expected, + "Path: {}", + path.display() + ); + }; + run_test("/file.txt", true); + run_test("/other.txt", false); + run_test("/data.txt", false); + run_test("/sub_dir/file.txt", true); + run_test("/sub_dir/other.txt", false); + run_test("/sub_dir/data.txt", true); + run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here + run_test("/sub_dir/sub_dir/sub_dir/file.txt", false); + run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true); + run_test("/sub_dir/sub_dir/ignore.txt", true); + run_test("/sub_dir/ignore.txt", false); + run_test("/ignore.txt", false); + } +} diff --git a/libs/config/glob/mod.rs b/libs/config/glob/mod.rs new file mode 100644 index 0000000000..42f7595c8c --- /dev/null +++ b/libs/config/glob/mod.rs @@ -0,0 +1,1626 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::path::Path; +use std::path::PathBuf; + +use deno_error::JsError; +use deno_path_util::normalize_path; +use deno_path_util::url_to_file_path; +use indexmap::IndexMap; +use thiserror::Error; +use url::Url; + +use crate::UrlToFilePathError; + +mod collector; +mod gitignore; + +pub use collector::FileCollector; +pub use collector::WalkEntry; + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum FilePatternsMatch { + /// File passes as matching, but further exclude matching (ex. .gitignore) + /// may be necessary. + Passed, + /// File passes matching and further exclude matching (ex. .gitignore) + /// should NOT be done. + PassedOptedOutExclude, + /// File was excluded. + Excluded, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum PathKind { + File, + Directory, +} + +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub struct FilePatterns { + /// Default traversal base used when calling `split_by_base()` without + /// any `include` patterns. + pub base: PathBuf, + pub include: Option, + pub exclude: PathOrPatternSet, +} + +impl FilePatterns { + pub fn new_with_base(base: PathBuf) -> Self { + Self { + base, + include: Default::default(), + exclude: Default::default(), + } + } + + pub fn with_new_base(self, new_base: PathBuf) -> Self { + Self { + base: new_base, + ..self + } + } + + pub fn matches_specifier(&self, specifier: &Url) -> bool { + self.matches_specifier_detail(specifier) != FilePatternsMatch::Excluded + } + + pub fn matches_specifier_detail(&self, specifier: &Url) -> FilePatternsMatch { + if specifier.scheme() != "file" { + // can't do .gitignore on a non-file specifier + return FilePatternsMatch::PassedOptedOutExclude; + } + let path = match url_to_file_path(specifier) { + Ok(path) => path, + Err(_) => return FilePatternsMatch::PassedOptedOutExclude, + }; + self.matches_path_detail(&path, PathKind::File) // use file matching behavior + } + + pub fn matches_path(&self, path: &Path, path_kind: PathKind) -> bool { + self.matches_path_detail(path, path_kind) != FilePatternsMatch::Excluded + } + + pub fn matches_path_detail( + &self, + path: &Path, + path_kind: PathKind, + ) -> FilePatternsMatch { + // if there's an include list, only include files that match it + // the include list is a closed set + if let Some(include) = &self.include { + match path_kind { + PathKind::File => { + if include.matches_path_detail(path) != PathOrPatternsMatch::Matched { + return FilePatternsMatch::Excluded; + } + } + PathKind::Directory => { + // for now ignore the include list unless there's a negated + // glob for the directory + for p in include.0.iter().rev() { + match p.matches_path(path) { + PathGlobMatch::Matched => { + break; + } + PathGlobMatch::MatchedNegated => { + return FilePatternsMatch::Excluded + } + PathGlobMatch::NotMatched => { + // keep going + } + } + } + } + } + } + + // the exclude list is an open set and we skip files not in the exclude list + match self.exclude.matches_path_detail(path) { + PathOrPatternsMatch::Matched => FilePatternsMatch::Excluded, + PathOrPatternsMatch::NotMatched => FilePatternsMatch::Passed, + PathOrPatternsMatch::Excluded => FilePatternsMatch::PassedOptedOutExclude, + } + } + + /// Creates a collection of `FilePatterns` where the containing patterns + /// are only the ones applicable to the base. + /// + /// The order these are returned in is the order that the directory traversal + /// should occur in. + pub fn split_by_base(&self) -> Vec { + let negated_excludes = self + .exclude + .0 + .iter() + .filter(|e| e.is_negated()) + .collect::>(); + let include = match &self.include { + Some(include) => Cow::Borrowed(include), + None => { + if negated_excludes.is_empty() { + return vec![self.clone()]; + } else { + Cow::Owned(PathOrPatternSet::new(vec![PathOrPattern::Path( + self.base.clone(), + )])) + } + } + }; + + let mut include_paths = Vec::with_capacity(include.0.len()); + let mut include_patterns = Vec::with_capacity(include.0.len()); + let mut exclude_patterns = + Vec::with_capacity(include.0.len() + self.exclude.0.len()); + + for path_or_pattern in &include.0 { + match path_or_pattern { + PathOrPattern::Path(path) => include_paths.push(path), + PathOrPattern::NegatedPath(path) => { + exclude_patterns.push(PathOrPattern::Path(path.clone())); + } + PathOrPattern::Pattern(pattern) => { + if pattern.is_negated() { + exclude_patterns.push(PathOrPattern::Pattern(pattern.as_negated())); + } else { + include_patterns.push(pattern.clone()); + } + } + PathOrPattern::RemoteUrl(_) => {} + } + } + + let capacity = include_patterns.len() + negated_excludes.len(); + let mut include_patterns_by_base_path = include_patterns.into_iter().fold( + IndexMap::with_capacity(capacity), + |mut map: IndexMap<_, Vec<_>>, p| { + map.entry(p.base_path()).or_default().push(p); + map + }, + ); + for p in &negated_excludes { + if let Some(base_path) = p.base_path() { + if !include_patterns_by_base_path.contains_key(&base_path) { + let has_any_base_parent = include_patterns_by_base_path + .keys() + .any(|k| base_path.starts_with(k)) + || include_paths.iter().any(|p| base_path.starts_with(p)); + // don't include an orphaned negated pattern + if has_any_base_parent { + include_patterns_by_base_path.insert(base_path, Vec::new()); + } + } + } + } + + let exclude_by_base_path = exclude_patterns + .iter() + .chain(self.exclude.0.iter()) + .filter_map(|s| Some((s.base_path()?, s))) + .collect::>(); + let get_applicable_excludes = |base_path: &PathBuf| -> Vec { + exclude_by_base_path + .iter() + .filter_map(|(exclude_base_path, exclude)| { + match exclude { + PathOrPattern::RemoteUrl(_) => None, + PathOrPattern::Path(exclude_path) + | PathOrPattern::NegatedPath(exclude_path) => { + // include paths that's are sub paths or an ancestor path + if base_path.starts_with(exclude_path) + || exclude_path.starts_with(base_path) + { + Some((*exclude).clone()) + } else { + None + } + } + PathOrPattern::Pattern(_) => { + // include globs that's are sub paths or an ancestor path + if exclude_base_path.starts_with(base_path) + || base_path.starts_with(exclude_base_path) + { + Some((*exclude).clone()) + } else { + None + } + } + } + }) + .collect::>() + }; + + let mut result = Vec::with_capacity( + include_paths.len() + include_patterns_by_base_path.len(), + ); + for path in include_paths { + let applicable_excludes = get_applicable_excludes(path); + result.push(Self { + base: path.clone(), + include: if self.include.is_none() { + None + } else { + Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + path.clone(), + )])) + }, + exclude: PathOrPatternSet::new(applicable_excludes), + }); + } + + // todo(dsherret): This could be further optimized by not including + // patterns that will only ever match another base. + for base_path in include_patterns_by_base_path.keys() { + let applicable_excludes = get_applicable_excludes(base_path); + let mut applicable_includes = Vec::new(); + // get all patterns that apply to the current or ancestor directories + for path in base_path.ancestors() { + if let Some(patterns) = include_patterns_by_base_path.get(path) { + applicable_includes.extend( + patterns + .iter() + .map(|p| PathOrPattern::Pattern((*p).clone())), + ); + } + } + result.push(Self { + base: base_path.clone(), + include: if self.include.is_none() + || applicable_includes.is_empty() + && self + .include + .as_ref() + .map(|i| !i.0.is_empty()) + .unwrap_or(false) + { + None + } else { + Some(PathOrPatternSet::new(applicable_includes)) + }, + exclude: PathOrPatternSet::new(applicable_excludes), + }); + } + + // Sort by the longest base path first. This ensures that we visit opted into + // nested directories first before visiting the parent directory. The directory + // traverser will handle not going into directories it's already been in. + result.sort_by(|a, b| { + // try looking at the parents first so that files in the same + // folder are kept in the same order that they're provided + let (a, b) = + if let (Some(a), Some(b)) = (a.base.parent(), b.base.parent()) { + (a, b) + } else { + (a.base.as_path(), b.base.as_path()) + }; + b.as_os_str().len().cmp(&a.as_os_str().len()) + }); + + result + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub enum PathOrPatternsMatch { + Matched, + NotMatched, + Excluded, +} + +#[derive(Debug, Error, JsError)] +pub enum FromExcludeRelativePathOrPatternsError { + #[class(type)] + #[error("The negation of '{negated_entry}' is never reached due to the higher priority '{entry}' exclude. Move '{negated_entry}' after '{entry}'.")] + HigherPriorityExclude { + negated_entry: String, + entry: String, + }, + #[class(inherit)] + #[error("{0}")] + PathOrPatternParse(#[from] PathOrPatternParseError), +} + +#[derive(Clone, Default, Debug, Hash, Eq, PartialEq)] +pub struct PathOrPatternSet(Vec); + +impl PathOrPatternSet { + pub fn new(elements: Vec) -> Self { + Self(elements) + } + + pub fn from_absolute_paths( + paths: &[String], + ) -> Result { + Ok(Self( + paths + .iter() + .map(|p| PathOrPattern::new(p)) + .collect::, _>>()?, + )) + } + + /// Builds the set of path and patterns for an "include" list. + pub fn from_include_relative_path_or_patterns( + base: &Path, + entries: &[String], + ) -> Result { + Ok(Self( + entries + .iter() + .map(|p| PathOrPattern::from_relative(base, p)) + .collect::, _>>()?, + )) + } + + /// Builds the set and ensures no negations are overruled by + /// higher priority entries. + pub fn from_exclude_relative_path_or_patterns( + base: &Path, + entries: &[String], + ) -> Result { + // error when someone does something like: + // exclude: ["!./a/b", "./a"] as it should be the opposite + fn validate_entry( + found_negated_paths: &Vec<(&str, PathBuf)>, + entry: &str, + entry_path: &Path, + ) -> Result<(), FromExcludeRelativePathOrPatternsError> { + for (negated_entry, negated_path) in found_negated_paths { + if negated_path.starts_with(entry_path) { + return Err( + FromExcludeRelativePathOrPatternsError::HigherPriorityExclude { + negated_entry: negated_entry.to_string(), + entry: entry.to_string(), + }, + ); + } + } + Ok(()) + } + + let mut found_negated_paths: Vec<(&str, PathBuf)> = + Vec::with_capacity(entries.len()); + let mut result = Vec::with_capacity(entries.len()); + for entry in entries { + let p = PathOrPattern::from_relative(base, entry)?; + match &p { + PathOrPattern::Path(p) => { + validate_entry(&found_negated_paths, entry, p)?; + } + PathOrPattern::NegatedPath(p) => { + found_negated_paths.push((entry.as_str(), p.clone())); + } + PathOrPattern::RemoteUrl(_) => { + // ignore + } + PathOrPattern::Pattern(p) => { + if p.is_negated() { + let base_path = p.base_path(); + found_negated_paths.push((entry.as_str(), base_path)); + } + } + } + result.push(p); + } + Ok(Self(result)) + } + + pub fn inner(&self) -> &Vec { + &self.0 + } + + pub fn inner_mut(&mut self) -> &mut Vec { + &mut self.0 + } + + pub fn into_path_or_patterns(self) -> Vec { + self.0 + } + + pub fn matches_path(&self, path: &Path) -> bool { + self.matches_path_detail(path) == PathOrPatternsMatch::Matched + } + + pub fn matches_path_detail(&self, path: &Path) -> PathOrPatternsMatch { + for p in self.0.iter().rev() { + match p.matches_path(path) { + PathGlobMatch::Matched => return PathOrPatternsMatch::Matched, + PathGlobMatch::MatchedNegated => return PathOrPatternsMatch::Excluded, + PathGlobMatch::NotMatched => { + // ignore + } + } + } + PathOrPatternsMatch::NotMatched + } + + pub fn base_paths(&self) -> Vec { + let mut result = Vec::with_capacity(self.0.len()); + for element in &self.0 { + match element { + PathOrPattern::Path(path) | PathOrPattern::NegatedPath(path) => { + result.push(path.to_path_buf()); + } + PathOrPattern::RemoteUrl(_) => { + // ignore + } + PathOrPattern::Pattern(pattern) => { + result.push(pattern.base_path()); + } + } + } + result + } + + pub fn push(&mut self, item: PathOrPattern) { + self.0.push(item); + } + + pub fn append(&mut self, items: impl Iterator) { + self.0.extend(items) + } +} + +#[derive(Debug, Error, JsError, Clone)] +#[class(inherit)] +#[error("Invalid URL '{}'", url)] +pub struct UrlParseError { + url: String, + #[source] + #[inherit] + source: url::ParseError, +} + +#[derive(Debug, Error, JsError)] +pub enum PathOrPatternParseError { + #[class(inherit)] + #[error(transparent)] + UrlParse(#[from] UrlParseError), + #[class(inherit)] + #[error(transparent)] + UrlToFilePathError(#[from] UrlToFilePathError), + #[class(inherit)] + #[error(transparent)] + GlobParse(#[from] GlobPatternParseError), +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)] +pub enum PathOrPattern { + Path(PathBuf), + NegatedPath(PathBuf), + RemoteUrl(Url), + Pattern(GlobPattern), +} + +impl PathOrPattern { + pub fn new(path: &str) -> Result { + if has_url_prefix(path) { + let url = Url::parse(path).map_err(|err| UrlParseError { + url: path.to_string(), + source: err, + })?; + if url.scheme() == "file" { + let path = url_to_file_path(&url)?; + return Ok(Self::Path(path)); + } else { + return Ok(Self::RemoteUrl(url)); + } + } + + GlobPattern::new_if_pattern(path) + .map(|maybe_pattern| { + maybe_pattern + .map(PathOrPattern::Pattern) + .unwrap_or_else(|| PathOrPattern::Path(normalize_path(path))) + }) + .map_err(|err| err.into()) + } + + pub fn from_relative( + base: &Path, + p: &str, + ) -> Result { + if is_glob_pattern(p) { + GlobPattern::from_relative(base, p) + .map(PathOrPattern::Pattern) + .map_err(|err| err.into()) + } else if has_url_prefix(p) { + PathOrPattern::new(p) + } else if let Some(path) = p.strip_prefix('!') { + Ok(PathOrPattern::NegatedPath(normalize_path(base.join(path)))) + } else { + Ok(PathOrPattern::Path(normalize_path(base.join(p)))) + } + } + + pub fn matches_path(&self, path: &Path) -> PathGlobMatch { + match self { + PathOrPattern::Path(p) => { + if path.starts_with(p) { + PathGlobMatch::Matched + } else { + PathGlobMatch::NotMatched + } + } + PathOrPattern::NegatedPath(p) => { + if path.starts_with(p) { + PathGlobMatch::MatchedNegated + } else { + PathGlobMatch::NotMatched + } + } + PathOrPattern::RemoteUrl(_) => PathGlobMatch::NotMatched, + PathOrPattern::Pattern(p) => p.matches_path(path), + } + } + + /// Returns the base path of the pattern if it's not a remote url pattern. + pub fn base_path(&self) -> Option { + match self { + PathOrPattern::Path(p) | PathOrPattern::NegatedPath(p) => Some(p.clone()), + PathOrPattern::RemoteUrl(_) => None, + PathOrPattern::Pattern(p) => Some(p.base_path()), + } + } + + /// If this is a negated pattern. + pub fn is_negated(&self) -> bool { + match self { + PathOrPattern::Path(_) => false, + PathOrPattern::NegatedPath(_) => true, + PathOrPattern::RemoteUrl(_) => false, + PathOrPattern::Pattern(p) => p.is_negated(), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PathGlobMatch { + Matched, + MatchedNegated, + NotMatched, +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +#[error("Failed to expand glob: \"{pattern}\"")] +pub struct GlobPatternParseError { + pattern: String, + #[source] + source: glob::PatternError, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct GlobPattern { + is_negated: bool, + pattern: glob::Pattern, +} + +impl GlobPattern { + pub fn new_if_pattern( + pattern: &str, + ) -> Result, GlobPatternParseError> { + if !is_glob_pattern(pattern) { + return Ok(None); + } + Self::new(pattern).map(Some) + } + + pub fn new(pattern: &str) -> Result { + let (is_negated, pattern) = match pattern.strip_prefix('!') { + Some(pattern) => (true, pattern), + None => (false, pattern), + }; + let pattern = escape_brackets(pattern).replace('\\', "/"); + let pattern = + glob::Pattern::new(&pattern).map_err(|source| GlobPatternParseError { + pattern: pattern.to_string(), + source, + })?; + Ok(Self { + is_negated, + pattern, + }) + } + + pub fn from_relative( + base: &Path, + p: &str, + ) -> Result { + let (is_negated, p) = match p.strip_prefix('!') { + Some(p) => (true, p), + None => (false, p), + }; + let base_str = base.to_string_lossy().replace('\\', "/"); + let p = p.strip_prefix("./").unwrap_or(p); + let p = p.strip_suffix('/').unwrap_or(p); + let pattern = capacity_builder::StringBuilder::::build(|builder| { + if is_negated { + builder.append('!'); + } + builder.append(&base_str); + if !base_str.ends_with('/') { + builder.append('/'); + } + builder.append(p); + }) + .unwrap(); + GlobPattern::new(&pattern) + } + + pub fn as_str(&self) -> Cow { + if self.is_negated { + Cow::Owned(format!("!{}", self.pattern.as_str())) + } else { + Cow::Borrowed(self.pattern.as_str()) + } + } + + pub fn matches_path(&self, path: &Path) -> PathGlobMatch { + if self.pattern.matches_path_with(path, match_options()) { + if self.is_negated { + PathGlobMatch::MatchedNegated + } else { + PathGlobMatch::Matched + } + } else { + PathGlobMatch::NotMatched + } + } + + pub fn base_path(&self) -> PathBuf { + let base_path = self + .pattern + .as_str() + .split('/') + .take_while(|c| !has_glob_chars(c)) + .collect::>() + .join(std::path::MAIN_SEPARATOR_STR); + PathBuf::from(base_path) + } + + pub fn is_negated(&self) -> bool { + self.is_negated + } + + fn as_negated(&self) -> GlobPattern { + Self { + is_negated: !self.is_negated, + pattern: self.pattern.clone(), + } + } +} + +pub fn is_glob_pattern(path: &str) -> bool { + !has_url_prefix(path) && has_glob_chars(path) +} + +fn has_url_prefix(pattern: &str) -> bool { + pattern.starts_with("http://") + || pattern.starts_with("https://") + || pattern.starts_with("file://") + || pattern.starts_with("npm:") + || pattern.starts_with("jsr:") +} + +fn has_glob_chars(pattern: &str) -> bool { + // we don't support [ and ] + pattern.chars().any(|c| matches!(c, '*' | '?')) +} + +fn escape_brackets(pattern: &str) -> String { + // Escape brackets - we currently don't support them, because with introduction + // of glob expansion paths like "pages/[id].ts" would suddenly start giving + // wrong results. We might want to revisit that in the future. + pattern.replace('[', "[[]").replace(']', "[]]") +} + +fn match_options() -> glob::MatchOptions { + // Matches what `deno_task_shell` does + glob::MatchOptions { + // false because it should work the same way on case insensitive file systems + case_sensitive: false, + // true because it copies what sh does + require_literal_separator: true, + // true because it copies with sh does—these files are considered "hidden" + require_literal_leading_dot: true, + } +} + +#[cfg(test)] +mod test { + use std::error::Error; + + use deno_path_util::url_from_directory_path; + use pretty_assertions::assert_eq; + use tempfile::TempDir; + + use super::*; + + // For easier comparisons in tests. + #[derive(Debug, PartialEq, Eq)] + struct ComparableFilePatterns { + base: String, + include: Option>, + exclude: Vec, + } + + impl ComparableFilePatterns { + pub fn new(root: &Path, file_patterns: &FilePatterns) -> Self { + fn path_to_string(root: &Path, path: &Path) -> String { + path + .strip_prefix(root) + .unwrap() + .to_string_lossy() + .replace('\\', "/") + } + + fn path_or_pattern_to_string( + root: &Path, + p: &PathOrPattern, + ) -> Option { + match p { + PathOrPattern::RemoteUrl(_) => None, + PathOrPattern::Path(p) => Some(path_to_string(root, p)), + PathOrPattern::NegatedPath(p) => { + Some(format!("!{}", path_to_string(root, p))) + } + PathOrPattern::Pattern(p) => { + let was_negated = p.is_negated(); + let p = if was_negated { + p.as_negated() + } else { + p.clone() + }; + let text = p + .as_str() + .strip_prefix(&format!( + "{}/", + root.to_string_lossy().replace('\\', "/") + )) + .unwrap_or_else(|| panic!("pattern: {:?}, root: {:?}", p, root)) + .to_string(); + Some(if was_negated { + format!("!{}", text) + } else { + text + }) + } + } + } + + Self { + base: path_to_string(root, &file_patterns.base), + include: file_patterns.include.as_ref().map(|p| { + p.0 + .iter() + .filter_map(|p| path_or_pattern_to_string(root, p)) + .collect() + }), + exclude: file_patterns + .exclude + .0 + .iter() + .filter_map(|p| path_or_pattern_to_string(root, p)) + .collect(), + } + } + + pub fn from_split( + root: &Path, + patterns_by_base: &[FilePatterns], + ) -> Vec { + patterns_by_base + .iter() + .map(|file_patterns| ComparableFilePatterns::new(root, file_patterns)) + .collect() + } + } + + #[test] + fn file_patterns_split_by_base_dir() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Pattern( + GlobPattern::new(&format!( + "{}/inner/**/*.ts", + temp_dir.path().to_string_lossy().replace('\\', "/") + )) + .unwrap(), + ), + PathOrPattern::Pattern( + GlobPattern::new(&format!( + "{}/inner/sub/deeper/**/*.js", + temp_dir.path().to_string_lossy().replace('\\', "/") + )) + .unwrap(), + ), + PathOrPattern::Pattern( + GlobPattern::new(&format!( + "{}/other/**/*.js", + temp_dir.path().to_string_lossy().replace('\\', "/") + )) + .unwrap(), + ), + PathOrPattern::from_relative(temp_dir.path(), "!./other/**/*.ts") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "sub/file.ts").unwrap(), + ])), + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::Pattern( + GlobPattern::new(&format!( + "{}/inner/other/**/*.ts", + temp_dir.path().to_string_lossy().replace('\\', "/") + )) + .unwrap(), + ), + PathOrPattern::Path( + temp_dir + .path() + .join("inner/sub/deeper/file.js") + .to_path_buf(), + ), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "inner/sub/deeper".to_string(), + include: Some(vec![ + "inner/sub/deeper/**/*.js".to_string(), + "inner/**/*.ts".to_string(), + ]), + exclude: vec!["inner/sub/deeper/file.js".to_string()], + }, + ComparableFilePatterns { + base: "sub/file.ts".to_string(), + include: Some(vec!["sub/file.ts".to_string()]), + exclude: vec![], + }, + ComparableFilePatterns { + base: "inner".to_string(), + include: Some(vec!["inner/**/*.ts".to_string()]), + exclude: vec![ + "inner/other/**/*.ts".to_string(), + "inner/sub/deeper/file.js".to_string(), + ], + }, + ComparableFilePatterns { + base: "other".to_string(), + include: Some(vec!["other/**/*.js".to_string()]), + exclude: vec!["other/**/*.ts".to_string()], + } + ] + ); + } + + #[test] + fn file_patterns_split_by_base_dir_unexcluded() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: None, + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::from_relative(temp_dir.path(), "./ignored").unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./ignored/unexcluded") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./ignored/test/**") + .unwrap(), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "ignored/unexcluded".to_string(), + include: None, + exclude: vec![ + // still keeps the higher level exclude for cases + // where these two are accidentally swapped + "ignored".to_string(), + // keep the glob for the current dir because it + // could be used to override the .gitignore + "!ignored/unexcluded".to_string(), + ], + }, + ComparableFilePatterns { + base: "ignored/test".to_string(), + include: None, + exclude: vec!["ignored".to_string(), "!ignored/test/**".to_string(),], + }, + ComparableFilePatterns { + base: "".to_string(), + include: None, + exclude: vec![ + "ignored".to_string(), + "!ignored/unexcluded".to_string(), + "!ignored/test/**".to_string(), + ], + }, + ] + ); + } + + #[test] + fn file_patterns_split_by_base_dir_unexcluded_with_path_includes() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::from_relative( + temp_dir.path(), + "./sub", + ) + .unwrap()])), + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/**") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "./orphan").unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./orphan/test/**") + .unwrap(), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "sub/ignored/test".to_string(), + include: None, + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test/**".to_string(), + ], + }, + ComparableFilePatterns { + base: "sub".to_string(), + include: Some(vec!["sub".to_string()]), + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test/**".to_string(), + ], + }, + ] + ); + } + + #[test] + fn file_patterns_split_by_base_dir_unexcluded_with_glob_includes() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::from_relative( + temp_dir.path(), + "./sub/**", + ) + .unwrap()])), + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/**") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./orphan/test/**") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!orphan/other").unwrap(), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "sub/ignored/test".to_string(), + include: Some(vec!["sub/**".to_string()]), + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test/**".to_string() + ], + }, + ComparableFilePatterns { + base: "sub".to_string(), + include: Some(vec!["sub/**".to_string()]), + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test/**".to_string(), + ], + } + ] + ); + } + + #[test] + fn file_patterns_split_by_base_dir_opposite_exclude() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: None, + // this will actually error before it gets here in integration, + // but it's best to ensure it's handled anyway + exclude: PathOrPatternSet::new(vec![ + // this won't be unexcluded because it's lower priority than the entry below + PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/") + .unwrap(), + // this is higher priority + PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "sub/ignored/test".to_string(), + include: None, + exclude: vec![ + "!sub/ignored/test".to_string(), + "sub/ignored".to_string(), + ], + }, + ComparableFilePatterns { + base: "".to_string(), + include: None, + exclude: vec![ + "!sub/ignored/test".to_string(), + "sub/ignored".to_string(), + ], + }, + ] + ); + } + + #[test] + fn file_patterns_split_by_base_dir_exclude_unexcluded_and_glob() { + let temp_dir = TempDir::new().unwrap(); + let patterns = FilePatterns { + base: temp_dir.path().to_path_buf(), + include: None, + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored").unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "!./sub/ignored/test/") + .unwrap(), + PathOrPattern::from_relative(temp_dir.path(), "./sub/ignored/**/*.ts") + .unwrap(), + ]), + }; + let split = ComparableFilePatterns::from_split( + temp_dir.path(), + &patterns.split_by_base(), + ); + assert_eq!( + split, + vec![ + ComparableFilePatterns { + base: "sub/ignored/test".to_string(), + include: None, + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test".to_string(), + "sub/ignored/**/*.ts".to_string() + ], + }, + ComparableFilePatterns { + base: "".to_string(), + include: None, + exclude: vec![ + "sub/ignored".to_string(), + "!sub/ignored/test".to_string(), + "sub/ignored/**/*.ts".to_string(), + ], + }, + ] + ); + } + + #[track_caller] + fn run_file_patterns_match_test( + file_patterns: &FilePatterns, + path: &Path, + kind: PathKind, + expected: FilePatternsMatch, + ) { + assert_eq!( + file_patterns.matches_path_detail(path, kind), + expected, + "path: {:?}, kind: {:?}", + path, + kind + ); + assert_eq!( + file_patterns.matches_path(path, kind), + match expected { + FilePatternsMatch::Passed + | FilePatternsMatch::PassedOptedOutExclude => true, + FilePatternsMatch::Excluded => false, + } + ) + } + + #[test] + fn file_patterns_include() { + let cwd = current_dir(); + // include is a closed set + let file_patterns = FilePatterns { + base: cwd.clone(), + include: Some(PathOrPatternSet(vec![ + PathOrPattern::from_relative(&cwd, "target").unwrap(), + PathOrPattern::from_relative(&cwd, "other/**/*.ts").unwrap(), + ])), + exclude: PathOrPatternSet(vec![]), + }; + let run_test = + |path: &Path, kind: PathKind, expected: FilePatternsMatch| { + run_file_patterns_match_test(&file_patterns, path, kind, expected); + }; + run_test(&cwd, PathKind::Directory, FilePatternsMatch::Passed); + run_test( + &cwd.join("other"), + PathKind::Directory, + FilePatternsMatch::Passed, + ); + run_test( + &cwd.join("other/sub_dir"), + PathKind::Directory, + FilePatternsMatch::Passed, + ); + run_test( + &cwd.join("not_matched"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + run_test( + &cwd.join("other/test.ts"), + PathKind::File, + FilePatternsMatch::Passed, + ); + run_test( + &cwd.join("other/test.js"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + } + + #[test] + fn file_patterns_exclude() { + let cwd = current_dir(); + let file_patterns = FilePatterns { + base: cwd.clone(), + include: None, + exclude: PathOrPatternSet(vec![ + PathOrPattern::from_relative(&cwd, "target").unwrap(), + PathOrPattern::from_relative(&cwd, "!not_excluded").unwrap(), + // lower items take priority + PathOrPattern::from_relative(&cwd, "excluded_then_not_excluded") + .unwrap(), + PathOrPattern::from_relative(&cwd, "!excluded_then_not_excluded") + .unwrap(), + PathOrPattern::from_relative(&cwd, "!not_excluded_then_excluded") + .unwrap(), + PathOrPattern::from_relative(&cwd, "not_excluded_then_excluded") + .unwrap(), + ]), + }; + let run_test = + |path: &Path, kind: PathKind, expected: FilePatternsMatch| { + run_file_patterns_match_test(&file_patterns, path, kind, expected); + }; + run_test(&cwd, PathKind::Directory, FilePatternsMatch::Passed); + run_test( + &cwd.join("target"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + run_test( + &cwd.join("not_excluded"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + run_test( + &cwd.join("excluded_then_not_excluded"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + run_test( + &cwd.join("not_excluded_then_excluded"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + } + + #[test] + fn file_patterns_include_exclude() { + let cwd = current_dir(); + let file_patterns = FilePatterns { + base: cwd.clone(), + include: Some(PathOrPatternSet(vec![ + PathOrPattern::from_relative(&cwd, "other").unwrap(), + PathOrPattern::from_relative(&cwd, "target").unwrap(), + PathOrPattern::from_relative(&cwd, "**/*.js").unwrap(), + PathOrPattern::from_relative(&cwd, "**/file.ts").unwrap(), + ])), + exclude: PathOrPatternSet(vec![ + PathOrPattern::from_relative(&cwd, "target").unwrap(), + PathOrPattern::from_relative(&cwd, "!target/unexcluded/").unwrap(), + PathOrPattern::from_relative(&cwd, "!target/other/**").unwrap(), + PathOrPattern::from_relative(&cwd, "**/*.ts").unwrap(), + PathOrPattern::from_relative(&cwd, "!**/file.ts").unwrap(), + ]), + }; + let run_test = + |path: &Path, kind: PathKind, expected: FilePatternsMatch| { + run_file_patterns_match_test(&file_patterns, path, kind, expected); + }; + // matches other + run_test( + &cwd.join("other/test.txt"), + PathKind::File, + FilePatternsMatch::Passed, + ); + // matches **/*.js + run_test( + &cwd.join("sub_dir/test.js"), + PathKind::File, + FilePatternsMatch::Passed, + ); + // not in include set + run_test( + &cwd.join("sub_dir/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + // .ts extension not matched + run_test( + &cwd.join("other/test.ts"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + // file.ts excluded from excludes + run_test( + &cwd.join("other/file.ts"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + // not allowed target dir + run_test( + &cwd.join("target/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + run_test( + &cwd.join("target/sub_dir/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + // but allowed target/other dir + run_test( + &cwd.join("target/other/test.txt"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + run_test( + &cwd.join("target/other/sub/dir/test.txt"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + // and in target/unexcluded + run_test( + &cwd.join("target/unexcluded/test.txt"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + } + + #[test] + fn file_patterns_include_excluded() { + let cwd = current_dir(); + let file_patterns = FilePatterns { + base: cwd.clone(), + include: None, + exclude: PathOrPatternSet(vec![ + PathOrPattern::from_relative(&cwd, "js/").unwrap(), + PathOrPattern::from_relative(&cwd, "!js/sub_dir/").unwrap(), + ]), + }; + let run_test = + |path: &Path, kind: PathKind, expected: FilePatternsMatch| { + run_file_patterns_match_test(&file_patterns, path, kind, expected); + }; + run_test( + &cwd.join("js/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + run_test( + &cwd.join("js/sub_dir/test.txt"), + PathKind::File, + FilePatternsMatch::PassedOptedOutExclude, + ); + } + + #[test] + fn file_patterns_opposite_incorrect_excluded_include() { + let cwd = current_dir(); + let file_patterns = FilePatterns { + base: cwd.clone(), + include: None, + exclude: PathOrPatternSet(vec![ + // this is lower priority + PathOrPattern::from_relative(&cwd, "!js/sub_dir/").unwrap(), + // this wins because it's higher priority + PathOrPattern::from_relative(&cwd, "js/").unwrap(), + ]), + }; + let run_test = + |path: &Path, kind: PathKind, expected: FilePatternsMatch| { + run_file_patterns_match_test(&file_patterns, path, kind, expected); + }; + run_test( + &cwd.join("js/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + run_test( + &cwd.join("js/sub_dir/test.txt"), + PathKind::File, + FilePatternsMatch::Excluded, + ); + } + + #[test] + fn from_relative() { + let cwd = current_dir(); + // leading dot slash + { + let pattern = PathOrPattern::from_relative(&cwd, "./**/*.ts").unwrap(); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("foo.js")), + PathGlobMatch::NotMatched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.js")), + PathGlobMatch::NotMatched + ); + } + // no leading dot slash + { + let pattern = PathOrPattern::from_relative(&cwd, "**/*.ts").unwrap(); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("foo.js")), + PathGlobMatch::NotMatched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.js")), + PathGlobMatch::NotMatched + ); + } + // exact file, leading dot slash + { + let pattern = PathOrPattern::from_relative(&cwd, "./foo.ts").unwrap(); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.ts")), + PathGlobMatch::NotMatched + ); + assert_eq!( + pattern.matches_path(&cwd.join("foo.js")), + PathGlobMatch::NotMatched + ); + } + // exact file, no leading dot slash + { + let pattern = PathOrPattern::from_relative(&cwd, "foo.ts").unwrap(); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&cwd.join("dir/foo.ts")), + PathGlobMatch::NotMatched + ); + assert_eq!( + pattern.matches_path(&cwd.join("foo.js")), + PathGlobMatch::NotMatched + ); + } + // error for invalid url + { + let err = PathOrPattern::from_relative(&cwd, "https://raw.githubusercontent.com%2Fdyedgreen%2Fdeno-sqlite%2Frework_api%2Fmod.ts").unwrap_err(); + assert_eq!(format!("{:#}", err), "Invalid URL 'https://raw.githubusercontent.com%2Fdyedgreen%2Fdeno-sqlite%2Frework_api%2Fmod.ts'"); + assert_eq!( + format!("{:#}", err.source().unwrap()), + "invalid international domain name" + ); + } + // sibling dir + { + let pattern = PathOrPattern::from_relative(&cwd, "../sibling").unwrap(); + let parent_dir = cwd.parent().unwrap(); + assert_eq!(pattern.base_path().unwrap(), parent_dir.join("sibling")); + assert_eq!( + pattern.matches_path(&parent_dir.join("sibling/foo.ts")), + PathGlobMatch::Matched + ); + assert_eq!( + pattern.matches_path(&parent_dir.join("./other/foo.js")), + PathGlobMatch::NotMatched + ); + } + } + + #[test] + fn from_relative_dot_slash() { + let cwd = current_dir(); + let pattern = PathOrPattern::from_relative(&cwd, "./").unwrap(); + match pattern { + PathOrPattern::Path(p) => assert_eq!(p, cwd), + _ => unreachable!(), + } + } + + #[test] + fn new_ctor() { + let cwd = current_dir(); + for scheme in &["http", "https"] { + let url = format!("{}://deno.land/x/test", scheme); + let pattern = PathOrPattern::new(&url).unwrap(); + match pattern { + PathOrPattern::RemoteUrl(p) => { + assert_eq!(p.as_str(), url) + } + _ => unreachable!(), + } + } + for scheme in &["npm", "jsr"] { + let url = format!("{}:@denotest/basic", scheme); + let pattern = PathOrPattern::new(&url).unwrap(); + match pattern { + PathOrPattern::RemoteUrl(p) => { + assert_eq!(p.as_str(), url) + } + _ => unreachable!(), + } + } + { + let file_specifier = url_from_directory_path(&cwd).unwrap(); + let pattern = PathOrPattern::new(file_specifier.as_str()).unwrap(); + match pattern { + PathOrPattern::Path(p) => { + assert_eq!(p, cwd); + } + _ => { + unreachable!() + } + } + } + } + + #[test] + fn from_relative_specifier() { + let cwd = current_dir(); + for scheme in &["http", "https"] { + let url = format!("{}://deno.land/x/test", scheme); + let pattern = PathOrPattern::from_relative(&cwd, &url).unwrap(); + match pattern { + PathOrPattern::RemoteUrl(p) => { + assert_eq!(p.as_str(), url) + } + _ => unreachable!(), + } + } + for scheme in &["npm", "jsr"] { + let url = format!("{}:@denotest/basic", scheme); + let pattern = PathOrPattern::from_relative(&cwd, &url).unwrap(); + match pattern { + PathOrPattern::RemoteUrl(p) => { + assert_eq!(p.as_str(), url) + } + _ => unreachable!(), + } + } + { + let file_specifier = url_from_directory_path(&cwd).unwrap(); + let pattern = + PathOrPattern::from_relative(&cwd, file_specifier.as_str()).unwrap(); + match pattern { + PathOrPattern::Path(p) => { + assert_eq!(p, cwd); + } + _ => { + unreachable!() + } + } + } + } + + #[test] + fn negated_globs() { + #[allow(clippy::disallowed_methods)] + let cwd = current_dir(); + { + let pattern = GlobPattern::from_relative(&cwd, "!./**/*.ts").unwrap(); + assert!(pattern.is_negated()); + assert_eq!(pattern.base_path(), cwd); + assert!(pattern.as_str().starts_with('!')); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::MatchedNegated + ); + assert_eq!( + pattern.matches_path(&cwd.join("foo.js")), + PathGlobMatch::NotMatched + ); + let pattern = pattern.as_negated(); + assert!(!pattern.is_negated()); + assert_eq!(pattern.base_path(), cwd); + assert!(!pattern.as_str().starts_with('!')); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::Matched + ); + let pattern = pattern.as_negated(); + assert!(pattern.is_negated()); + assert_eq!(pattern.base_path(), cwd); + assert!(pattern.as_str().starts_with('!')); + assert_eq!( + pattern.matches_path(&cwd.join("foo.ts")), + PathGlobMatch::MatchedNegated + ); + } + } + + #[test] + fn test_is_glob_pattern() { + assert!(!is_glob_pattern("npm:@scope/pkg@*")); + assert!(!is_glob_pattern("jsr:@scope/pkg@*")); + assert!(!is_glob_pattern("https://deno.land/x/?")); + assert!(!is_glob_pattern("http://deno.land/x/?")); + assert!(!is_glob_pattern("file:///deno.land/x/?")); + assert!(is_glob_pattern("**/*.ts")); + assert!(is_glob_pattern("test/?")); + assert!(!is_glob_pattern("test/test")); + } + + fn current_dir() -> PathBuf { + // ok because this is test code + #[allow(clippy::disallowed_methods)] + std::env::current_dir().unwrap() + } +} diff --git a/libs/config/lib.rs b/libs/config/lib.rs new file mode 100644 index 0000000000..843a1572ff --- /dev/null +++ b/libs/config/lib.rs @@ -0,0 +1,20 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +#![deny(clippy::print_stderr)] +#![deny(clippy::print_stdout)] +#![deny(clippy::unused_async)] +#![deny(clippy::unnecessary_wraps)] + +#[cfg(feature = "deno_json")] +pub mod deno_json; +#[cfg(feature = "deno_json")] +pub mod glob; +#[cfg(feature = "deno_json")] +mod sync; +#[cfg(feature = "deno_json")] +mod util; +#[cfg(feature = "workspace")] +pub mod workspace; + +#[cfg(feature = "deno_json")] +pub use deno_path_util::UrlToFilePathError; diff --git a/libs/config/sync.rs b/libs/config/sync.rs new file mode 100644 index 0000000000..0d4b29178c --- /dev/null +++ b/libs/config/sync.rs @@ -0,0 +1,20 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +pub use inner::*; + +#[cfg(feature = "sync")] +mod inner { + #![allow(clippy::disallowed_types)] + pub use std::sync::Arc as MaybeArc; +} + +#[cfg(not(feature = "sync"))] +mod inner { + pub use std::rc::Rc as MaybeArc; +} + +// ok for constructing +#[allow(clippy::disallowed_types)] +pub fn new_rc(value: T) -> MaybeArc { + MaybeArc::new(value) +} diff --git a/libs/config/testdata/additional_files/jsr.json b/libs/config/testdata/additional_files/jsr.json new file mode 100644 index 0000000000..597dacb790 --- /dev/null +++ b/libs/config/testdata/additional_files/jsr.json @@ -0,0 +1,3 @@ +{ + "name": "@foo/bar" +} \ No newline at end of file diff --git a/libs/config/testdata/deno.json b/libs/config/testdata/deno.json new file mode 100644 index 0000000000..60df565273 --- /dev/null +++ b/libs/config/testdata/deno.json @@ -0,0 +1 @@ +not a json file diff --git a/libs/config/testdata/fmt/with_config/deno.deprecated.jsonc b/libs/config/testdata/fmt/with_config/deno.deprecated.jsonc new file mode 100644 index 0000000000..e053233fd2 --- /dev/null +++ b/libs/config/testdata/fmt/with_config/deno.deprecated.jsonc @@ -0,0 +1,20 @@ +{ + "fmt": { + "files": { + "include": [ + "./subdir/" + ], + "exclude": [ + "./subdir/b.ts" + ] + }, + "options": { + "useTabs": true, + "lineWidth": 40, + "indentWidth": 8, + "singleQuote": true, + "proseWrap": "always", + "semiColons": false + } + } +} diff --git a/libs/config/testdata/fmt/with_config/deno.jsonc b/libs/config/testdata/fmt/with_config/deno.jsonc new file mode 100644 index 0000000000..ffd265dcdf --- /dev/null +++ b/libs/config/testdata/fmt/with_config/deno.jsonc @@ -0,0 +1,16 @@ +{ + "fmt": { + "include": [ + "./subdir/" + ], + "exclude": [ + "./subdir/b.ts" + ], + "useTabs": true, + "lineWidth": 40, + "indentWidth": 8, + "singleQuote": true, + "proseWrap": "always", + "semiColons": false + } +} diff --git a/libs/config/testdata/fmt/with_config/subdir/a.ts b/libs/config/testdata/fmt/with_config/subdir/a.ts new file mode 100644 index 0000000000..5474b3aa37 --- /dev/null +++ b/libs/config/testdata/fmt/with_config/subdir/a.ts @@ -0,0 +1,46 @@ +Deno.test( + { perms: { net: true } }, + async function responseClone() { + const response = + await fetch( + 'http://localhost:4545/assets/fixture.json', + ) + const response1 = + response.clone() + assert( + response !== + response1, + ) + assertEquals( + response.status, + response1 + .status, + ) + assertEquals( + response.statusText, + response1 + .statusText, + ) + const u8a = + new Uint8Array( + await response + .arrayBuffer(), + ) + const u8a1 = + new Uint8Array( + await response1 + .arrayBuffer(), + ) + for ( + let i = 0; + i < + u8a.byteLength; + i++ + ) { + assertEquals( + u8a[i], + u8a1[i], + ) + } + }, +) diff --git a/libs/config/testdata/fmt/with_config/subdir/b.ts b/libs/config/testdata/fmt/with_config/subdir/b.ts new file mode 100644 index 0000000000..d7eb08b094 --- /dev/null +++ b/libs/config/testdata/fmt/with_config/subdir/b.ts @@ -0,0 +1,15 @@ +// This file should be excluded from formatting +Deno.test( + { perms: { net: true } }, + async function fetchBodyUsedCancelStream() { + const response = await fetch( + "http://localhost:4545/assets/fixture.json", + ); + assert(response.body !== null); + + assertEquals(response.bodyUsed, false); + const promise = response.body.cancel(); + assertEquals(response.bodyUsed, true); + await promise; + }, +); \ No newline at end of file diff --git a/libs/config/testdata/fmt/with_config/subdir/c.md b/libs/config/testdata/fmt/with_config/subdir/c.md new file mode 100644 index 0000000000..012f7e3d49 --- /dev/null +++ b/libs/config/testdata/fmt/with_config/subdir/c.md @@ -0,0 +1,17 @@ +## Permissions + +Deno is secure by default. Therefore, +unless you specifically enable it, a +program run with Deno has no file, +network, or environment access. Access +to security sensitive functionality +requires that permisisons have been +granted to an executing script through +command line flags, or a runtime +permission prompt. + +For the following example `mod.ts` has +been granted read-only access to the +file system. It cannot write to the file +system, or perform any other security +sensitive functions. diff --git a/libs/config/testdata/module_graph/tsconfig.json b/libs/config/testdata/module_graph/tsconfig.json new file mode 100644 index 0000000000..a4c5f4f33d --- /dev/null +++ b/libs/config/testdata/module_graph/tsconfig.json @@ -0,0 +1,6 @@ +{ + "compilerOptions": { + "target": "ES5", + "jsx": "preserve" + } +} diff --git a/libs/config/util.rs b/libs/config/util.rs new file mode 100644 index 0000000000..0204d22a83 --- /dev/null +++ b/libs/config/util.rs @@ -0,0 +1,32 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +pub fn is_skippable_io_error(e: &std::io::Error) -> bool { + use std::io::ErrorKind::*; + + // skip over invalid filenames on windows + const ERROR_INVALID_NAME: i32 = 123; + if cfg!(windows) && e.raw_os_error() == Some(ERROR_INVALID_NAME) { + return true; + } + + match e.kind() { + InvalidInput | PermissionDenied | NotFound => { + // ok keep going + true + } + _ => { + const NOT_A_DIRECTORY: i32 = 20; + cfg!(unix) && e.raw_os_error() == Some(NOT_A_DIRECTORY) + } + } +} + +#[cfg(test)] +mod tests { + #[cfg(windows)] + #[test] + fn is_skippable_io_error_win_invalid_filename() { + let error = std::io::Error::from_raw_os_error(123); + assert!(super::is_skippable_io_error(&error)); + } +} diff --git a/libs/config/workspace/discovery.rs b/libs/config/workspace/discovery.rs new file mode 100644 index 0000000000..82ea4db696 --- /dev/null +++ b/libs/config/workspace/discovery.rs @@ -0,0 +1,1094 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::collections::HashSet; +use std::path::Path; +use std::path::PathBuf; + +use deno_package_json::PackageJson; +use deno_package_json::PackageJsonLoadError; +use deno_package_json::PackageJsonRc; +use deno_path_util::url_from_directory_path; +use deno_path_util::url_from_file_path; +use deno_path_util::url_parent; +use deno_path_util::url_to_file_path; +use indexmap::IndexSet; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; +use url::Url; + +use super::ResolveWorkspaceLinkError; +use super::ResolveWorkspaceLinkErrorKind; +use super::ResolveWorkspaceMemberError; +use super::ResolveWorkspaceMemberErrorKind; +use super::UrlRc; +use super::VendorEnablement; +use super::WorkspaceDiscoverError; +use super::WorkspaceDiscoverErrorKind; +use super::WorkspaceDiscoverOptions; +use super::WorkspaceDiscoverStart; +use super::WorkspaceRc; +use crate::deno_json::ConfigFile; +use crate::deno_json::ConfigFileRc; +use crate::glob::is_glob_pattern; +use crate::glob::FileCollector; +use crate::glob::FilePatterns; +use crate::glob::PathOrPattern; +use crate::glob::PathOrPatternSet; +use crate::sync::new_rc; +use crate::util::is_skippable_io_error; +use crate::workspace::ConfigReadError; +use crate::workspace::Workspace; + +#[derive(Debug)] +pub enum DenoOrPkgJson { + Deno(ConfigFileRc), + PkgJson(PackageJsonRc), +} + +impl DenoOrPkgJson { + pub fn specifier(&self) -> Cow { + match self { + Self::Deno(config) => Cow::Borrowed(&config.specifier), + Self::PkgJson(pkg_json) => Cow::Owned(pkg_json.specifier()), + } + } +} + +#[derive(Debug)] +pub enum ConfigFolder { + Single(DenoOrPkgJson), + Both { + deno_json: ConfigFileRc, + pkg_json: PackageJsonRc, + }, +} + +impl ConfigFolder { + pub fn folder_url(&self) -> Url { + match self { + Self::Single(DenoOrPkgJson::Deno(config)) => { + url_parent(&config.specifier) + } + Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => { + url_from_directory_path(pkg_json.path.parent().unwrap()).unwrap() + } + Self::Both { deno_json, .. } => url_parent(&deno_json.specifier), + } + } + + pub fn has_workspace_members(&self) -> bool { + match self { + Self::Single(DenoOrPkgJson::Deno(config)) => { + config.json.workspace.is_some() + } + Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => { + pkg_json.workspaces.is_some() + } + Self::Both { + deno_json, + pkg_json, + } => deno_json.json.workspace.is_some() || pkg_json.workspaces.is_some(), + } + } + + pub fn deno_json(&self) -> Option<&ConfigFileRc> { + match self { + Self::Single(DenoOrPkgJson::Deno(deno_json)) => Some(deno_json), + Self::Both { deno_json, .. } => Some(deno_json), + _ => None, + } + } + + pub fn pkg_json(&self) -> Option<&PackageJsonRc> { + match self { + Self::Single(DenoOrPkgJson::PkgJson(pkg_json)) => Some(pkg_json), + Self::Both { pkg_json, .. } => Some(pkg_json), + _ => None, + } + } + + pub fn from_maybe_both( + maybe_deno_json: Option, + maybe_pkg_json: Option, + ) -> Option { + match (maybe_deno_json, maybe_pkg_json) { + (Some(deno_json), Some(pkg_json)) => Some(Self::Both { + deno_json, + pkg_json, + }), + (Some(deno_json), None) => { + Some(Self::Single(DenoOrPkgJson::Deno(deno_json))) + } + (None, Some(pkg_json)) => { + Some(Self::Single(DenoOrPkgJson::PkgJson(pkg_json))) + } + (None, None) => None, + } + } +} + +#[derive(Debug)] +pub enum ConfigFileDiscovery { + None { maybe_vendor_dir: Option }, + Workspace { workspace: WorkspaceRc }, +} + +impl ConfigFileDiscovery { + fn root_config_specifier(&self) -> Option> { + match self { + Self::None { .. } => None, + Self::Workspace { workspace, .. } => { + let root_folder_configs = workspace.root_folder_configs(); + if let Some(deno_json) = &root_folder_configs.deno_json { + return Some(Cow::Borrowed(&deno_json.specifier)); + } + if let Some(pkg_json) = &root_folder_configs.pkg_json { + return Some(Cow::Owned(pkg_json.specifier())); + } + None + } + } + } +} + +fn config_folder_config_specifier(res: &ConfigFolder) -> Cow { + match res { + ConfigFolder::Single(config) => config.specifier(), + ConfigFolder::Both { deno_json, .. } => Cow::Borrowed(&deno_json.specifier), + } +} + +pub fn discover_workspace_config_files< + TSys: FsRead + FsMetadata + FsReadDir, +>( + sys: &TSys, + start: WorkspaceDiscoverStart, + opts: &WorkspaceDiscoverOptions, +) -> Result { + match start { + WorkspaceDiscoverStart::Paths(dirs) => match dirs.len() { + 0 => Ok(ConfigFileDiscovery::None { + maybe_vendor_dir: resolve_vendor_dir( + None, + opts.maybe_vendor_override.as_ref(), + ), + }), + 1 => { + let dir = &dirs[0]; + let start = DirOrConfigFile::Dir(dir); + discover_workspace_config_files_for_single_dir(sys, start, opts, None) + } + _ => { + let mut checked = HashSet::default(); + let mut final_workspace = ConfigFileDiscovery::None { + maybe_vendor_dir: resolve_vendor_dir( + None, + opts.maybe_vendor_override.as_ref(), + ), + }; + for dir in dirs { + let workspace = discover_workspace_config_files_for_single_dir( + sys, + DirOrConfigFile::Dir(dir), + opts, + Some(&mut checked), + )?; + if let Some(root_config_specifier) = workspace.root_config_specifier() + { + if let Some(final_workspace_config_specifier) = + final_workspace.root_config_specifier() + { + return Err(WorkspaceDiscoverError( + WorkspaceDiscoverErrorKind::MultipleWorkspaces { + base_workspace_url: final_workspace_config_specifier + .into_owned(), + other_workspace_url: root_config_specifier.into_owned(), + } + .into(), + )); + } + final_workspace = workspace; + } + } + Ok(final_workspace) + } + }, + WorkspaceDiscoverStart::ConfigFile(file) => { + let start = DirOrConfigFile::ConfigFile(file); + discover_workspace_config_files_for_single_dir(sys, start, opts, None) + } + } +} + +#[derive(Debug, Clone, Copy)] +enum DirOrConfigFile<'a> { + Dir(&'a Path), + ConfigFile(&'a Path), +} + +fn discover_workspace_config_files_for_single_dir< + TSys: FsRead + FsMetadata + FsReadDir, +>( + sys: &TSys, + start: DirOrConfigFile, + opts: &WorkspaceDiscoverOptions, + mut checked: Option<&mut HashSet>, +) -> Result { + fn strip_up_to_node_modules(path: &Path) -> PathBuf { + path + .components() + .take_while(|component| match component { + std::path::Component::Normal(name) => { + name.to_string_lossy() != "node_modules" + } + _ => true, + }) + .collect() + } + + if opts.workspace_cache.is_some() { + // it doesn't really make sense to use a workspace cache without config + // caches because that would mean the configs might change between calls + // causing strange behavior, so panic if someone does this + assert!( + opts.deno_json_cache.is_some() && opts.pkg_json_cache.is_some(), + "Using a workspace cache requires setting the deno.json and package.json caches" + ); + } + + let start_dir: Option<&Path>; + let mut first_config_folder_url: Option = None; + let mut found_config_folders: HashMap<_, ConfigFolder> = HashMap::new(); + let config_file_names = + ConfigFile::resolve_config_file_names(opts.additional_config_file_names); + let load_pkg_json_in_folder = |folder_path: &Path| { + if opts.discover_pkg_json { + let pkg_json_path = folder_path.join("package.json"); + match PackageJson::load_from_path( + sys, + opts.pkg_json_cache, + &pkg_json_path, + ) { + Ok(pkg_json) => { + log::debug!( + "package.json file found at '{}'", + pkg_json_path.display() + ); + Ok(Some(pkg_json)) + } + Err(PackageJsonLoadError::Io { source, .. }) + if is_skippable_io_error(&source) => + { + Ok(None) + } + Err(err) => Err(err), + } + } else { + Ok(None) + } + }; + let load_config_folder = |folder_path: &Path| -> Result<_, ConfigReadError> { + let maybe_config_file = ConfigFile::maybe_find_in_folder( + sys, + opts.deno_json_cache, + folder_path, + &config_file_names, + )?; + let maybe_pkg_json = load_pkg_json_in_folder(folder_path)?; + Ok(ConfigFolder::from_maybe_both( + maybe_config_file, + maybe_pkg_json, + )) + }; + match start { + DirOrConfigFile::Dir(dir) => { + start_dir = Some(dir); + } + DirOrConfigFile::ConfigFile(file) => { + let specifier = url_from_file_path(file)?; + let config_file = new_rc( + ConfigFile::from_specifier(sys, specifier.clone()) + .map_err(ConfigReadError::DenoJsonRead)?, + ); + + // see what config would be loaded if we just specified the parent directory + let natural_config_folder_result = + load_config_folder(file.parent().unwrap()); + let matching_config_folder = match natural_config_folder_result { + Ok(Some(natual_config_folder)) => { + if natual_config_folder + .deno_json() + .is_some_and(|d| d.specifier == config_file.specifier) + { + Some(natual_config_folder) + } else { + None + } + } + Ok(None) | Err(_) => None, + }; + + let parent_dir_url = url_parent(&config_file.specifier); + let config_folder = match matching_config_folder { + Some(config_folder) => config_folder, + None => { + // when loading the directory we would have loaded something else, so + // don't try to load a workspace and don't store this information in + // the workspace cache + let config_folder = + ConfigFolder::Single(DenoOrPkgJson::Deno(config_file)); + + if config_folder.has_workspace_members() { + return handle_workspace_folder_with_members( + sys, + config_folder, + Some(&parent_dir_url), + opts, + found_config_folders, + &load_config_folder, + ); + } + + let maybe_vendor_dir = resolve_vendor_dir( + config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + let links = resolve_link_config_folders( + sys, + &config_folder, + load_config_folder, + )?; + return Ok(ConfigFileDiscovery::Workspace { + workspace: new_rc(Workspace::new( + config_folder, + Default::default(), + links, + maybe_vendor_dir, + )), + }); + } + }; + + if let Some(workspace_cache) = &opts.workspace_cache { + if let Some(workspace) = workspace_cache.get(&config_file.dir_path()) { + if cfg!(debug_assertions) { + let expected_vendor_dir = resolve_vendor_dir( + config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + debug_assert_eq!( + expected_vendor_dir, workspace.vendor_dir, + "should not be using a different vendor dir across calls" + ); + } + return Ok(ConfigFileDiscovery::Workspace { + workspace: workspace.clone(), + }); + } + } + + if config_folder.has_workspace_members() { + return handle_workspace_folder_with_members( + sys, + config_folder, + Some(&parent_dir_url), + opts, + found_config_folders, + &load_config_folder, + ); + } + + found_config_folders.insert(parent_dir_url.clone(), config_folder); + first_config_folder_url = Some(parent_dir_url); + // start searching for a workspace in the parent directory + start_dir = file.parent().and_then(|p| p.parent()); + } + } + // do not auto-discover inside the node_modules folder (ex. when a + // user is running something directly within there) + let start_dir = start_dir.map(strip_up_to_node_modules); + for current_dir in start_dir.iter().flat_map(|p| p.ancestors()) { + if let Some(checked) = checked.as_mut() { + if !checked.insert(current_dir.to_path_buf()) { + // already visited here, so exit + return Ok(ConfigFileDiscovery::None { + maybe_vendor_dir: resolve_vendor_dir( + None, + opts.maybe_vendor_override.as_ref(), + ), + }); + } + } + + if let Some(workspace_with_members) = opts + .workspace_cache + .and_then(|c| c.get(current_dir)) + .filter(|w| w.config_folders.len() > 1) + { + if cfg!(debug_assertions) { + let expected_vendor_dir = resolve_vendor_dir( + workspace_with_members.root_deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + debug_assert_eq!( + expected_vendor_dir, workspace_with_members.vendor_dir, + "should not be using a different vendor dir across calls" + ); + } + + return handle_workspace_with_members( + sys, + workspace_with_members, + first_config_folder_url.as_ref(), + found_config_folders, + opts, + load_config_folder, + ); + } + + let maybe_config_folder = load_config_folder(current_dir)?; + let Some(root_config_folder) = maybe_config_folder else { + continue; + }; + if root_config_folder.has_workspace_members() { + return handle_workspace_folder_with_members( + sys, + root_config_folder, + first_config_folder_url.as_ref(), + opts, + found_config_folders, + &load_config_folder, + ); + } + + let config_folder_url = root_config_folder.folder_url(); + if first_config_folder_url.is_none() { + if let Some(workspace_cache) = &opts.workspace_cache { + if let Some(workspace) = workspace_cache.get(current_dir) { + if cfg!(debug_assertions) { + let expected_vendor_dir = resolve_vendor_dir( + root_config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + debug_assert_eq!( + expected_vendor_dir, workspace.vendor_dir, + "should not be using a different vendor dir across calls" + ); + } + return Ok(ConfigFileDiscovery::Workspace { + workspace: workspace.clone(), + }); + } + } + + first_config_folder_url = Some(config_folder_url.clone()); + } + found_config_folders.insert(config_folder_url, root_config_folder); + } + + if let Some(first_config_folder_url) = first_config_folder_url { + let config_folder = found_config_folders + .remove(&first_config_folder_url) + .unwrap(); + let maybe_vendor_dir = resolve_vendor_dir( + config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + let link = + resolve_link_config_folders(sys, &config_folder, load_config_folder)?; + let workspace = new_rc(Workspace::new( + config_folder, + Default::default(), + link, + maybe_vendor_dir, + )); + if let Some(cache) = opts.workspace_cache { + cache.set(workspace.root_dir_path(), workspace.clone()); + } + Ok(ConfigFileDiscovery::Workspace { workspace }) + } else { + Ok(ConfigFileDiscovery::None { + maybe_vendor_dir: resolve_vendor_dir( + None, + opts.maybe_vendor_override.as_ref(), + ), + }) + } +} + +fn handle_workspace_folder_with_members< + TSys: FsRead + FsMetadata + FsReadDir, +>( + sys: &TSys, + root_config_folder: ConfigFolder, + first_config_folder_url: Option<&Url>, + opts: &WorkspaceDiscoverOptions<'_>, + mut found_config_folders: HashMap, + load_config_folder: &impl Fn( + &Path, + ) -> Result, ConfigReadError>, +) -> Result { + let maybe_vendor_dir = resolve_vendor_dir( + root_config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + let raw_root_workspace = resolve_workspace_for_config_folder( + sys, + root_config_folder, + maybe_vendor_dir, + &mut found_config_folders, + load_config_folder, + )?; + let links = resolve_link_config_folders( + sys, + &raw_root_workspace.root, + load_config_folder, + )?; + let root_workspace = new_rc(Workspace::new( + raw_root_workspace.root, + raw_root_workspace.members, + links, + raw_root_workspace.vendor_dir, + )); + if let Some(cache) = opts.workspace_cache { + cache.set(root_workspace.root_dir_path(), root_workspace.clone()); + } + handle_workspace_with_members( + sys, + root_workspace, + first_config_folder_url, + found_config_folders, + opts, + load_config_folder, + ) +} + +fn handle_workspace_with_members( + sys: &TSys, + root_workspace: WorkspaceRc, + first_config_folder_url: Option<&Url>, + mut found_config_folders: HashMap, + opts: &WorkspaceDiscoverOptions, + load_config_folder: impl Fn( + &Path, + ) -> Result, ConfigReadError>, +) -> Result { + let is_root_deno_json_workspace = root_workspace + .root_deno_json() + .map(|d| d.json.workspace.is_some()) + .unwrap_or(false); + // if the root was an npm workspace that doesn't have the start config + // as a member then only resolve the start config + if !is_root_deno_json_workspace { + if let Some(first_config_folder) = &first_config_folder_url { + if !root_workspace + .config_folders + .contains_key(*first_config_folder) + { + if let Some(config_folder) = + found_config_folders.remove(first_config_folder) + { + let maybe_vendor_dir = resolve_vendor_dir( + config_folder.deno_json().map(|d| d.as_ref()), + opts.maybe_vendor_override.as_ref(), + ); + let links = resolve_link_config_folders( + sys, + &config_folder, + load_config_folder, + )?; + let workspace = new_rc(Workspace::new( + config_folder, + Default::default(), + links, + maybe_vendor_dir, + )); + if let Some(cache) = opts.workspace_cache { + cache.set(workspace.root_dir_path(), workspace.clone()); + } + return Ok(ConfigFileDiscovery::Workspace { workspace }); + } + } + } + } + + if is_root_deno_json_workspace { + for (key, config_folder) in &found_config_folders { + if !root_workspace.config_folders.contains_key(key) { + return Err( + WorkspaceDiscoverErrorKind::ConfigNotWorkspaceMember { + workspace_url: (**root_workspace.root_dir()).clone(), + config_url: config_folder_config_specifier(config_folder) + .into_owned(), + } + .into(), + ); + } + } + } + + // ensure no duplicate names in deno configuration files + let mut seen_names: HashMap<&str, &Url> = + HashMap::with_capacity(root_workspace.config_folders.len() + 1); + for deno_json in root_workspace.deno_jsons() { + if let Some(name) = deno_json.json.name.as_deref() { + if let Some(other_member_url) = seen_names.get(name) { + return Err( + ResolveWorkspaceMemberErrorKind::DuplicatePackageName { + name: name.to_string(), + deno_json_url: deno_json.specifier.clone(), + other_deno_json_url: (*other_member_url).clone(), + } + .into_box() + .into(), + ); + } else { + seen_names.insert(name, &deno_json.specifier); + } + } + } + + Ok(ConfigFileDiscovery::Workspace { + workspace: root_workspace, + }) +} + +struct RawResolvedWorkspace { + root: ConfigFolder, + members: BTreeMap, + vendor_dir: Option, +} + +fn resolve_workspace_for_config_folder< + TSys: FsRead + FsMetadata + FsReadDir, +>( + sys: &TSys, + root_config_folder: ConfigFolder, + maybe_vendor_dir: Option, + found_config_folders: &mut HashMap, + load_config_folder: impl Fn( + &Path, + ) -> Result, ConfigReadError>, +) -> Result { + let mut final_members = BTreeMap::new(); + let root_config_file_directory_url = root_config_folder.folder_url(); + let resolve_member_url = + |raw_member: &str| -> Result { + let member = ensure_trailing_slash(raw_member); + let member_dir_url = root_config_file_directory_url + .join(&member) + .map_err(|err| { + ResolveWorkspaceMemberErrorKind::InvalidMember { + base: root_config_folder.folder_url(), + member: raw_member.to_owned(), + source: err, + } + .into_box() + })?; + Ok(member_dir_url) + }; + let validate_member_url_is_descendant = + |member_dir_url: &Url| -> Result<(), ResolveWorkspaceMemberError> { + if !member_dir_url + .as_str() + .starts_with(root_config_file_directory_url.as_str()) + { + return Err( + ResolveWorkspaceMemberErrorKind::NonDescendant { + workspace_url: root_config_file_directory_url.clone(), + member_url: member_dir_url.clone(), + } + .into_box(), + ); + } + Ok(()) + }; + let mut find_member_config_folder = + |member_dir_url: &Url| -> Result<_, ResolveWorkspaceMemberError> { + // try to find the config folder in memory from the configs we already + // found on the file system + if let Some(config_folder) = found_config_folders.remove(member_dir_url) { + return Ok(config_folder); + } + + let maybe_config_folder = + load_config_folder(&url_to_file_path(member_dir_url)?)?; + maybe_config_folder.ok_or_else(|| { + // it's fine this doesn't use all the possible config file names + // as this is only used to enhance the error message + if member_dir_url.as_str().ends_with("/deno.json/") + || member_dir_url.as_str().ends_with("/deno.jsonc/") + || member_dir_url.as_str().ends_with("/package.json/") + { + ResolveWorkspaceMemberErrorKind::NotFoundMaybeSpecifiedFile { + dir_url: member_dir_url.clone(), + } + .into_box() + } else { + ResolveWorkspaceMemberErrorKind::NotFound { + dir_url: member_dir_url.clone(), + } + .into_box() + } + }) + }; + + let collect_member_config_folders = + |kind: &'static str, + pattern_members: Vec<&String>, + dir_path: &Path, + config_file_names: &'static [&'static str]| + -> Result, WorkspaceDiscoverErrorKind> { + let patterns = pattern_members + .iter() + .flat_map(|raw_member| { + config_file_names.iter().map(|config_file_name| { + PathOrPattern::from_relative( + dir_path, + &format!( + "{}{}", + ensure_trailing_slash(raw_member), + config_file_name + ), + ) + .map_err(|err| { + ResolveWorkspaceMemberErrorKind::MemberToPattern { + kind, + base: root_config_file_directory_url.clone(), + member: raw_member.to_string(), + source: err, + } + .into_box() + }) + }) + }) + .collect::, _>>()?; + + let paths = if patterns.is_empty() { + Vec::new() + } else { + FileCollector::new(|_| true) + .ignore_git_folder() + .ignore_node_modules() + .set_vendor_folder(maybe_vendor_dir.clone()) + .collect_file_patterns( + sys, + FilePatterns { + base: dir_path.to_path_buf(), + include: Some(PathOrPatternSet::new(patterns)), + exclude: PathOrPatternSet::new(Vec::new()), + }, + ) + }; + + Ok(paths) + }; + + if let Some(deno_json) = root_config_folder.deno_json() { + if let Some(workspace_config) = deno_json.to_workspace_config()? { + let (pattern_members, path_members): (Vec<_>, Vec<_>) = workspace_config + .members + .iter() + .partition(|member| is_glob_pattern(member) || member.starts_with('!')); + + // Deno workspaces can discover wildcard members that use either `deno.json`, `deno.jsonc` or `package.json`. + // But it only works for Deno workspaces, npm workspaces don't discover `deno.json(c)` files, otherwise + // we'd be incompatible with npm workspaces if we discovered more files. + let deno_json_paths = collect_member_config_folders( + "Deno", + pattern_members, + &deno_json.dir_path(), + &["deno.json", "deno.jsonc", "package.json"], + )?; + + let mut member_dir_urls = + IndexSet::with_capacity(path_members.len() + deno_json_paths.len()); + for path_member in path_members { + let member_dir_url = resolve_member_url(path_member)?; + member_dir_urls.insert((path_member.clone(), member_dir_url)); + } + for deno_json_path in deno_json_paths { + let member_dir_url = + url_from_directory_path(deno_json_path.parent().unwrap()).unwrap(); + member_dir_urls.insert(( + deno_json_path + .parent() + .unwrap() + .to_string_lossy() + .to_string(), + member_dir_url, + )); + } + + for (raw_member, member_dir_url) in member_dir_urls { + if member_dir_url == root_config_file_directory_url { + return Err( + ResolveWorkspaceMemberErrorKind::InvalidSelfReference { + member: raw_member.to_string(), + } + .into_box() + .into(), + ); + } + validate_member_url_is_descendant(&member_dir_url)?; + let member_config_folder = find_member_config_folder(&member_dir_url)?; + let previous_member = final_members + .insert(new_rc(member_dir_url.clone()), member_config_folder); + if previous_member.is_some() { + return Err( + ResolveWorkspaceMemberErrorKind::Duplicate { + member: raw_member.to_string(), + } + .into_box() + .into(), + ); + } + } + } + } + if let Some(pkg_json) = root_config_folder.pkg_json() { + if let Some(members) = &pkg_json.workspaces { + let (pattern_members, path_members): (Vec<_>, Vec<_>) = members + .iter() + .partition(|member| is_glob_pattern(member) || member.starts_with('!')); + + // npm workspaces can discover wildcard members `package.json` files, but not `deno.json(c)` files, otherwise + // we'd be incompatible with npm workspaces if we discovered more files than just `package.json`. + let pkg_json_paths = collect_member_config_folders( + "npm", + pattern_members, + pkg_json.dir_path(), + &["package.json"], + )?; + + let mut member_dir_urls = + IndexSet::with_capacity(path_members.len() + pkg_json_paths.len()); + for path_member in path_members { + let member_dir_url = resolve_member_url(path_member)?; + member_dir_urls.insert(member_dir_url); + } + for pkg_json_path in pkg_json_paths { + let member_dir_url = + url_from_directory_path(pkg_json_path.parent().unwrap())?; + member_dir_urls.insert(member_dir_url); + } + + for member_dir_url in member_dir_urls { + if member_dir_url == root_config_file_directory_url { + continue; // ignore self references + } + validate_member_url_is_descendant(&member_dir_url)?; + let member_config_folder = + match find_member_config_folder(&member_dir_url) { + Ok(config_folder) => config_folder, + Err(err) => { + return Err( + match err.into_kind() { + ResolveWorkspaceMemberErrorKind::NotFound { dir_url } => { + // enhance the error to say we didn't find a package.json + ResolveWorkspaceMemberErrorKind::NotFoundPackageJson { + dir_url, + } + .into_box() + } + err => err.into_box(), + } + .into(), + ); + } + }; + if member_config_folder.pkg_json().is_none() { + return Err( + ResolveWorkspaceMemberErrorKind::NotFoundPackageJson { + dir_url: member_dir_url, + } + .into_box() + .into(), + ); + } + // don't surface errors about duplicate members for + // package.json workspace members + final_members.insert(new_rc(member_dir_url), member_config_folder); + } + } + } + + Ok(RawResolvedWorkspace { + root: root_config_folder, + members: final_members, + vendor_dir: maybe_vendor_dir, + }) +} + +fn resolve_link_config_folders( + sys: &TSys, + root_config_folder: &ConfigFolder, + load_config_folder: impl Fn( + &Path, + ) -> Result, ConfigReadError>, +) -> Result, WorkspaceDiscoverError> { + let Some(workspace_deno_json) = root_config_folder.deno_json() else { + return Ok(Default::default()); + }; + let Some(link_members) = workspace_deno_json.to_link_config()? else { + return Ok(Default::default()); + }; + let root_config_file_directory_url = root_config_folder.folder_url(); + let resolve_link_dir_url = + |raw_link: &str| -> Result { + let link = ensure_trailing_slash(raw_link); + // support someone specifying an absolute path + if !cfg!(windows) && link.starts_with('/') + || cfg!(windows) && link.chars().any(|c| c == '\\') + { + if let Ok(value) = + deno_path_util::url_from_file_path(&PathBuf::from(link.as_ref())) + { + return Ok(value); + } + } + let link_dir_url = + root_config_file_directory_url.join(&link).map_err(|err| { + WorkspaceDiscoverErrorKind::ResolveLink { + base: root_config_file_directory_url.clone(), + link: raw_link.to_owned(), + source: err.into(), + } + })?; + Ok(link_dir_url) + }; + let mut final_config_folders = BTreeMap::new(); + for raw_member in &link_members { + let link_dir_url = resolve_link_dir_url(raw_member)?; + let link_configs = resolve_link_member_config_folders( + sys, + &link_dir_url, + &load_config_folder, + ) + .map_err(|err| WorkspaceDiscoverErrorKind::ResolveLink { + base: root_config_file_directory_url.clone(), + link: raw_member.to_string(), + source: err, + })?; + + for link_config_url in link_configs.keys() { + if *link_config_url.as_ref() == root_config_file_directory_url { + return Err(WorkspaceDiscoverError( + WorkspaceDiscoverErrorKind::ResolveLink { + base: root_config_file_directory_url.clone(), + link: raw_member.to_string(), + source: ResolveWorkspaceLinkErrorKind::WorkspaceMemberNotAllowed + .into_box(), + } + .into(), + )); + } + } + + final_config_folders.extend(link_configs); + } + + Ok(final_config_folders) +} + +fn resolve_link_member_config_folders( + sys: &TSys, + link_dir_url: &Url, + load_config_folder: impl Fn( + &Path, + ) -> Result, ConfigReadError>, +) -> Result, ResolveWorkspaceLinkError> { + let link_dir_path = url_to_file_path(link_dir_url)?; + let maybe_config_folder = load_config_folder(&link_dir_path)?; + let Some(config_folder) = maybe_config_folder else { + return Err( + ResolveWorkspaceLinkErrorKind::NotFound { + dir_url: link_dir_url.clone(), + } + .into_box(), + ); + }; + if config_folder.has_workspace_members() { + let maybe_vendor_dir = + resolve_vendor_dir(config_folder.deno_json().map(|d| d.as_ref()), None); + let mut raw_workspace = resolve_workspace_for_config_folder( + sys, + config_folder, + maybe_vendor_dir, + &mut HashMap::new(), + &load_config_folder, + ) + .map_err(|err| ResolveWorkspaceLinkErrorKind::Workspace(Box::new(err)))?; + raw_workspace + .members + .insert(new_rc(raw_workspace.root.folder_url()), raw_workspace.root); + Ok(raw_workspace.members) + } else { + // attempt to find the root workspace directory + for ancestor in link_dir_path.ancestors().skip(1) { + let Ok(Some(config_folder)) = load_config_folder(ancestor) else { + continue; + }; + if config_folder.has_workspace_members() { + let maybe_vendor_dir = resolve_vendor_dir( + config_folder.deno_json().map(|d| d.as_ref()), + None, + ); + let Ok(mut raw_workspace) = resolve_workspace_for_config_folder( + sys, + config_folder, + maybe_vendor_dir, + &mut HashMap::new(), + &load_config_folder, + ) else { + continue; + }; + if raw_workspace.members.contains_key(link_dir_url) { + raw_workspace.members.insert( + new_rc(raw_workspace.root.folder_url()), + raw_workspace.root, + ); + return Ok(raw_workspace.members); + } + } + } + Ok(BTreeMap::from([( + new_rc(link_dir_url.clone()), + config_folder, + )])) + } +} + +fn resolve_vendor_dir( + maybe_deno_json: Option<&ConfigFile>, + maybe_vendor_override: Option<&VendorEnablement>, +) -> Option { + if let Some(vendor_folder_override) = maybe_vendor_override { + match vendor_folder_override { + VendorEnablement::Disable => None, + VendorEnablement::Enable { cwd } => match maybe_deno_json { + Some(c) => Some(c.dir_path().join("vendor")), + None => Some(cwd.join("vendor")), + }, + } + } else { + let deno_json = maybe_deno_json?; + if deno_json.vendor() == Some(true) { + Some(deno_json.dir_path().join("vendor")) + } else { + None + } + } +} + +fn ensure_trailing_slash(path: &str) -> Cow { + if !path.ends_with('/') { + Cow::Owned(format!("{}/", path)) + } else { + Cow::Borrowed(path) + } +} diff --git a/libs/config/workspace/mod.rs b/libs/config/workspace/mod.rs new file mode 100644 index 0000000000..095a2f9e1d --- /dev/null +++ b/libs/config/workspace/mod.rs @@ -0,0 +1,6363 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::collections::HashSet; +use std::collections::VecDeque; +use std::io::ErrorKind; +use std::path::Path; +use std::path::PathBuf; + +use boxed_error::Boxed; +use deno_error::JsError; +use deno_package_json::PackageJson; +use deno_package_json::PackageJsonLoadError; +use deno_package_json::PackageJsonRc; +use deno_path_util::url_from_directory_path; +use deno_path_util::url_from_file_path; +use deno_path_util::url_parent; +use deno_path_util::url_to_file_path; +use deno_semver::package::PackageNv; +use deno_semver::package::PackageReq; +use deno_semver::RangeSetOrTag; +use deno_semver::Version; +use deno_semver::VersionReq; +use discovery::discover_workspace_config_files; +use discovery::ConfigFileDiscovery; +use discovery::ConfigFolder; +use discovery::DenoOrPkgJson; +use indexmap::IndexMap; +use indexmap::IndexSet; +use sys_traits::FsMetadata; +use sys_traits::FsRead; +use sys_traits::FsReadDir; +use thiserror::Error; +use url::Url; + +use crate::deno_json; +use crate::deno_json::get_base_ts_config_for_emit; +use crate::deno_json::BenchConfig; +use crate::deno_json::CompilerOptionTypesDeserializeError; +use crate::deno_json::CompilerOptionsParseError; +use crate::deno_json::ConfigFile; +use crate::deno_json::ConfigFileError; +use crate::deno_json::ConfigFileRc; +use crate::deno_json::ConfigFileReadError; +use crate::deno_json::FmtConfig; +use crate::deno_json::FmtOptionsConfig; +use crate::deno_json::LinkConfigParseError; +use crate::deno_json::LintRulesConfig; +use crate::deno_json::NodeModulesDirMode; +use crate::deno_json::NodeModulesDirParseError; +use crate::deno_json::ParsedTsConfigOptions; +use crate::deno_json::PublishConfig; +pub use crate::deno_json::TaskDefinition; +use crate::deno_json::TestConfig; +use crate::deno_json::ToInvalidConfigError; +use crate::deno_json::ToLockConfigError; +use crate::deno_json::TsConfig; +use crate::deno_json::TsConfigType; +use crate::deno_json::TsConfigWithIgnoredOptions; +use crate::deno_json::WorkspaceConfigParseError; +use crate::glob::FilePatterns; +use crate::glob::PathOrPattern; +use crate::glob::PathOrPatternParseError; +use crate::glob::PathOrPatternSet; +use crate::sync::new_rc; +use crate::UrlToFilePathError; + +mod discovery; + +#[allow(clippy::disallowed_types)] +type UrlRc = crate::sync::MaybeArc; +#[allow(clippy::disallowed_types)] +type WorkspaceRc = crate::sync::MaybeArc; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ResolverWorkspaceJsrPackage { + pub base: Url, + pub name: String, + pub version: Option, + pub exports: IndexMap, + pub is_link: bool, +} + +#[derive(Debug, Clone)] +pub struct JsrPackageConfig { + /// The package name. + pub name: String, + pub member_dir: WorkspaceDirectory, + pub config_file: ConfigFileRc, + pub license: Option, +} + +#[derive(Debug, Clone)] +pub struct NpmPackageConfig { + pub nv: PackageNv, + pub workspace_dir: WorkspaceDirectory, + pub pkg_json: PackageJsonRc, +} + +impl NpmPackageConfig { + pub fn matches_req(&self, req: &PackageReq) -> bool { + self.matches_name_and_version_req(&req.name, &req.version_req) + } + + pub fn matches_name_and_version_req( + &self, + name: &str, + version_req: &VersionReq, + ) -> bool { + if name != self.nv.name { + return false; + } + match version_req.inner() { + RangeSetOrTag::RangeSet(set) => set.satisfies(&self.nv.version), + RangeSetOrTag::Tag(tag) => tag == "workspace", + } + } +} + +#[derive(Clone, Debug, Default, Hash, PartialEq)] +pub struct WorkspaceLintConfig { + pub report: Option, +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +pub enum ToMaybeJsxImportSourceConfigError { + #[error("'jsxImportSource' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n at {0}")] + InvalidJsxImportSourceValue(Url), + #[error("'jsxImportSourceTypes' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n at {0}")] + InvalidJsxImportSourceTypesValue(Url), + #[error("Unsupported 'jsx' compiler option value '{value}'. Supported: 'react-jsx', 'react-jsxdev', 'react', 'precompile'\n at {specifier}")] + InvalidJsxCompilerOption { value: String, specifier: Url }, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct JsxImportSourceSpecifierConfig { + pub specifier: String, + pub base: Url, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct JsxImportSourceConfig { + pub module: String, + pub import_source: Option, + pub import_source_types: Option, +} + +#[derive(Debug, Clone, Error, JsError, PartialEq, Eq)] +#[class(type)] +pub enum WorkspaceDiagnosticKind { + #[error( + "\"{0}\" field can only be specified in the workspace root deno.json file." + )] + RootOnlyOption(&'static str), + #[error("\"{0}\" field can only be specified in a workspace member deno.json file and not the workspace root file.")] + MemberOnlyOption(&'static str), + #[error("\"workspaces\" field was ignored. Use \"workspace\" instead.")] + InvalidWorkspacesOption, + #[error("\"exports\" field should be specified when specifying a \"name\".")] + MissingExports, + #[error("\"importMap\" field is ignored when \"imports\" or \"scopes\" are specified in the config file.")] + ImportMapReferencingImportMap, + #[error("\"imports\" and \"scopes\" field is ignored when \"importMap\" is specified in the root config file.")] + MemberImportsScopesIgnored, + #[error("`\"nodeModulesDir\": {previous}` is deprecated in Deno 2.0. Use `\"nodeModulesDir\": \"{suggestion}\"` instead.")] + DeprecatedNodeModulesDirOption { + previous: bool, + suggestion: NodeModulesDirMode, + }, + #[error("\"patch\" property was renamed to \"links\".")] + DeprecatedPatch, + #[error("Invalid workspace member name \"{name}\". Ensure the name is in the format '@scope/name'.")] + InvalidMemberName { name: String }, +} + +#[derive(Debug, Error, JsError, Clone, PartialEq, Eq)] +#[class(inherit)] +#[error("{}\n at {}", .kind, .config_url)] +pub struct WorkspaceDiagnostic { + #[inherit] + pub kind: WorkspaceDiagnosticKind, + pub config_url: Url, +} + +#[derive(Debug, JsError, Boxed)] +pub struct ResolveWorkspaceLinkError(pub Box); + +#[derive(Debug, Error, JsError)] +pub enum ResolveWorkspaceLinkErrorKind { + #[class(inherit)] + #[error(transparent)] + ConfigRead(#[from] ConfigReadError), + #[class(type)] + #[error("Could not find link member in '{}'.", .dir_url)] + NotFound { dir_url: Url }, + #[class(type)] + #[error("Workspace member cannot be specified as a link.")] + WorkspaceMemberNotAllowed, + #[class(inherit)] + #[error(transparent)] + InvalidLink(#[from] url::ParseError), + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] deno_path_util::UrlToFilePathError), + #[class(inherit)] + #[error(transparent)] + Workspace(Box), +} + +#[derive(Debug, Error, JsError)] +pub enum ConfigReadError { + #[class(inherit)] + #[error(transparent)] + DenoJsonRead(#[from] ConfigFileReadError), + #[class(inherit)] + #[error(transparent)] + PackageJsonRead(#[from] PackageJsonLoadError), +} + +#[derive(Debug, JsError, Boxed)] +#[class(type)] +pub struct ResolveWorkspaceMemberError( + pub Box, +); + +#[derive(Debug, Error, JsError)] +#[class(type)] +pub enum ResolveWorkspaceMemberErrorKind { + #[class(inherit)] + #[error(transparent)] + ConfigRead(#[from] ConfigReadError), + #[error("Could not find config file for workspace member in '{}'.", .dir_url)] + NotFound { dir_url: Url }, + #[error("Could not find package.json for workspace member in '{}'.", .dir_url)] + NotFoundPackageJson { dir_url: Url }, + #[error("Could not find config file for workspace member in '{}'. Ensure you specify the directory and not the configuration file in the workspace member.", .dir_url)] + NotFoundMaybeSpecifiedFile { dir_url: Url }, + #[error("Workspace member must be nested in a directory under the workspace.\n Member: {member_url}\n Workspace: {workspace_url}")] + NonDescendant { workspace_url: Url, member_url: Url }, + #[error("Cannot specify a workspace member twice ('{}').", .member)] + Duplicate { member: String }, + #[error("The '{name}' package ('{deno_json_url}') cannot have the same name as the package at '{other_deno_json_url}'.")] + DuplicatePackageName { + name: String, + deno_json_url: Url, + other_deno_json_url: Url, + }, + #[error("Remove the reference to the current config file (\"{}\") in \"workspaces\".", .member)] + InvalidSelfReference { member: String }, + #[class(inherit)] + #[error("Invalid workspace member '{}' for config '{}'.", member, base)] + InvalidMember { + base: Url, + member: String, + #[source] + #[inherit] + source: url::ParseError, + }, + #[class(inherit)] + #[error( + "Failed converting {kind} workspace member '{}' to pattern for config '{}'.", + member, + base + )] + MemberToPattern { + kind: &'static str, + base: Url, + member: String, + // this error has the text that failed + #[source] + #[inherit] + source: PathOrPatternParseError, + }, + #[error(transparent)] + #[class(inherit)] + UrlToFilePath(#[from] deno_path_util::UrlToFilePathError), +} + +#[derive(Debug, JsError, Boxed)] +#[class(inherit)] +pub struct WorkspaceDiscoverError(pub Box); + +#[derive(Debug, Error, JsError)] +#[class(type)] +pub enum FailedResolvingStartDirectoryError { + #[error("No paths provided.")] + NoPathsProvided, + #[error("Could not resolve path: '{}'.", .0.display())] + CouldNotResolvePath(PathBuf), + #[error("Provided config file path ('{}') had no parent directory.", .0.display())] + PathHasNoParentDirectory(PathBuf), +} + +#[derive(Debug, Error, JsError)] +pub enum WorkspaceDiscoverErrorKind { + #[class(inherit)] + #[error("Failed resolving start directory.")] + FailedResolvingStartDirectory(#[source] FailedResolvingStartDirectoryError), + #[class(inherit)] + #[error(transparent)] + ConfigRead(#[from] ConfigReadError), + #[class(inherit)] + #[error(transparent)] + PackageJsonRead(#[from] PackageJsonLoadError), + #[class(inherit)] + #[error(transparent)] + LinkConfigParse(#[from] LinkConfigParseError), + #[class(inherit)] + #[error(transparent)] + WorkspaceConfigParse(#[from] WorkspaceConfigParseError), + #[class(inherit)] + #[error(transparent)] + ResolveMember(#[from] ResolveWorkspaceMemberError), + #[class(inherit)] + #[error("Failed loading link '{}' in config '{}'.", link, base)] + ResolveLink { + link: String, + base: Url, + #[source] + #[inherit] + source: ResolveWorkspaceLinkError, + }, + #[class(type)] + #[error("Command resolved to multiple config files. Ensure all specified paths are within the same workspace.\n First: {base_workspace_url}\n Second: {other_workspace_url}")] + MultipleWorkspaces { + base_workspace_url: Url, + other_workspace_url: Url, + }, + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] UrlToFilePathError), + #[class(inherit)] + #[error(transparent)] + PathToUrl(#[from] deno_path_util::PathToUrlError), + #[class(type)] + #[error("Config file must be a member of the workspace.\n Config: {config_url}\n Workspace: {workspace_url}")] + ConfigNotWorkspaceMember { workspace_url: Url, config_url: Url }, +} + +#[derive(Debug, Clone, Copy)] +pub enum WorkspaceDiscoverStart<'a> { + Paths(&'a [PathBuf]), + ConfigFile(&'a Path), +} + +#[derive(Debug, Clone, Copy)] +pub enum VendorEnablement<'a> { + Disable, + Enable { + /// The cwd, which will be used when no configuration file is + /// resolved in order to discover the vendor folder. + cwd: &'a Path, + }, +} + +pub trait WorkspaceCache { + fn get(&self, dir_path: &Path) -> Option; + fn set(&self, dir_path: PathBuf, workspace: WorkspaceRc); +} + +#[derive(Default, Clone)] +pub struct WorkspaceDiscoverOptions<'a> { + /// A cache for deno.json files. This is mostly only useful in the LSP where + /// workspace discovery may occur multiple times. + pub deno_json_cache: Option<&'a dyn crate::deno_json::DenoJsonCache>, + pub pkg_json_cache: Option<&'a dyn deno_package_json::PackageJsonCache>, + /// A cache for workspaces. This is mostly only useful in the LSP where + /// workspace discovery may occur multiple times. + pub workspace_cache: Option<&'a dyn WorkspaceCache>, + pub additional_config_file_names: &'a [&'a str], + pub discover_pkg_json: bool, + pub maybe_vendor_override: Option>, +} + +#[derive(Clone)] +pub struct WorkspaceDirectoryEmptyOptions<'a> { + pub root_dir: UrlRc, + pub use_vendor_dir: VendorEnablement<'a>, +} + +/// Configuration files found in a specific folder. +#[derive(Debug, Default, Clone)] +pub struct FolderConfigs { + pub deno_json: Option, + pub pkg_json: Option, +} + +impl FolderConfigs { + fn from_config_folder(config_folder: ConfigFolder) -> Self { + match config_folder { + ConfigFolder::Single(deno_or_pkg_json) => match deno_or_pkg_json { + DenoOrPkgJson::Deno(deno_json) => FolderConfigs { + deno_json: Some(deno_json), + pkg_json: None, + }, + DenoOrPkgJson::PkgJson(pkg_json) => FolderConfigs { + deno_json: None, + pkg_json: Some(pkg_json), + }, + }, + ConfigFolder::Both { + deno_json, + pkg_json, + } => FolderConfigs { + deno_json: Some(deno_json), + pkg_json: Some(pkg_json), + }, + } + } +} + +#[derive(Debug, Error, JsError)] +#[class(type)] +#[error("lint.report must be a string")] +pub struct LintConfigError; + +#[derive(Debug)] +pub struct Workspace { + root_dir: UrlRc, + config_folders: IndexMap, + links: BTreeMap, + pub(crate) vendor_dir: Option, +} + +impl Workspace { + pub(crate) fn new( + root: ConfigFolder, + members: BTreeMap, + link: BTreeMap, + vendor_dir: Option, + ) -> Self { + let root_dir = new_rc(root.folder_url()); + let mut config_folders = IndexMap::with_capacity(members.len() + 1); + config_folders + .insert(root_dir.clone(), FolderConfigs::from_config_folder(root)); + config_folders.extend(members.into_iter().map( + |(folder_url, config_folder)| { + (folder_url, FolderConfigs::from_config_folder(config_folder)) + }, + )); + Workspace { + root_dir, + config_folders, + links: link + .into_iter() + .map(|(url, folder)| (url, FolderConfigs::from_config_folder(folder))) + .collect(), + vendor_dir, + } + } + + pub fn root_dir(&self) -> &UrlRc { + &self.root_dir + } + + pub fn root_dir_path(&self) -> PathBuf { + url_to_file_path(&self.root_dir).unwrap() + } + + pub fn root_folder_configs(&self) -> &FolderConfigs { + self.config_folders.get(&self.root_dir).unwrap() + } + + pub fn root_deno_json(&self) -> Option<&ConfigFileRc> { + self.root_folder_configs().deno_json.as_ref() + } + + pub fn root_pkg_json(&self) -> Option<&PackageJsonRc> { + self.root_folder_configs().pkg_json.as_ref() + } + + pub fn config_folders(&self) -> &IndexMap { + &self.config_folders + } + + pub fn deno_jsons(&self) -> impl Iterator { + self + .config_folders + .values() + .filter_map(|f| f.deno_json.as_ref()) + } + + pub fn package_jsons(&self) -> impl Iterator { + self + .config_folders + .values() + .filter_map(|f| f.pkg_json.as_ref()) + } + + #[allow(clippy::needless_lifetimes)] // clippy issue + pub fn jsr_packages<'a>( + self: &'a WorkspaceRc, + ) -> impl Iterator + 'a { + self.deno_jsons().filter_map(|c| { + if !c.is_package() { + return None; + } + Some(JsrPackageConfig { + member_dir: self.resolve_member_dir(&c.specifier), + name: c.json.name.clone()?, + config_file: c.clone(), + license: c.to_license(), + }) + }) + } + + pub fn npm_packages(self: &WorkspaceRc) -> Vec { + self + .package_jsons() + .filter_map(|c| self.package_json_to_npm_package_config(c)) + .collect() + } + + fn package_json_to_npm_package_config( + self: &WorkspaceRc, + pkg_json: &PackageJsonRc, + ) -> Option { + Some(NpmPackageConfig { + workspace_dir: self.resolve_member_dir(&pkg_json.specifier()), + nv: PackageNv { + name: deno_semver::StackString::from(pkg_json.name.as_ref()?.as_str()), + version: { + let version = pkg_json.version.as_ref()?; + deno_semver::Version::parse_from_npm(version).ok()? + }, + }, + pkg_json: pkg_json.clone(), + }) + } + + pub fn link_folders(&self) -> &BTreeMap { + &self.links + } + + pub fn link_deno_jsons(&self) -> impl Iterator { + self.links.values().filter_map(|f| f.deno_json.as_ref()) + } + + pub fn link_pkg_jsons(&self) -> impl Iterator { + self.links.values().filter_map(|f| f.pkg_json.as_ref()) + } + + pub fn resolver_deno_jsons(&self) -> impl Iterator { + self + .deno_jsons() + .chain(self.links.values().filter_map(|f| f.deno_json.as_ref())) + } + + pub fn resolver_pkg_jsons( + &self, + ) -> impl Iterator { + self + .config_folders + .iter() + .filter_map(|(k, v)| Some((k, v.pkg_json.as_ref()?))) + } + + pub fn resolver_jsr_pkgs( + &self, + ) -> impl Iterator + '_ { + self + .config_folders + .iter() + .filter_map(|(dir_url, f)| Some((dir_url, f.deno_json.as_ref()?, false))) + .chain(self.links.iter().filter_map(|(dir_url, f)| { + Some((dir_url, f.deno_json.as_ref()?, true)) + })) + .filter_map(|(dir_url, config_file, is_link)| { + let name = config_file.json.name.as_ref()?; + let version = config_file + .json + .version + .as_ref() + .and_then(|v| Version::parse_standard(v).ok()); + let exports_config = config_file.to_exports_config().ok()?; + Some(ResolverWorkspaceJsrPackage { + is_link, + base: dir_url.as_ref().clone(), + name: name.to_string(), + version, + exports: exports_config.into_map(), + }) + }) + } + + /// Resolves a workspace directory, which can be used for deriving + /// configuration specific to a member. + pub fn resolve_member_dir( + self: &WorkspaceRc, + specifier: &Url, + ) -> WorkspaceDirectory { + WorkspaceDirectory::new(specifier, self.clone()) + } + + pub fn resolve_deno_json( + &self, + specifier: &Url, + ) -> Option<(&UrlRc, &ConfigFileRc)> { + self.resolve_deno_json_from_str(specifier.as_str()) + } + + fn resolve_deno_json_from_str( + &self, + specifier: &str, + ) -> Option<(&UrlRc, &ConfigFileRc)> { + let mut specifier = specifier; + if !specifier.ends_with('/') { + specifier = parent_specifier_str(specifier)?; + } + loop { + let (folder_url, folder) = self.resolve_folder_str(specifier)?; + if let Some(config) = folder.deno_json.as_ref() { + return Some((folder_url, config)); + } + specifier = parent_specifier_str(folder_url.as_str())?; + } + } + + fn resolve_pkg_json_from_str( + &self, + specifier: &str, + ) -> Option<(&UrlRc, &PackageJsonRc)> { + let mut specifier = specifier; + if !specifier.ends_with('/') { + specifier = parent_specifier_str(specifier)?; + } + loop { + let (folder_url, folder) = self.resolve_folder_str(specifier)?; + if let Some(pkg_json) = folder.pkg_json.as_ref() { + return Some((folder_url, pkg_json)); + } + specifier = parent_specifier_str(folder_url.as_str())?; + } + } + + pub fn resolve_folder( + &self, + specifier: &Url, + ) -> Option<(&UrlRc, &FolderConfigs)> { + self.resolve_folder_str(specifier.as_str()) + } + + fn resolve_folder_str( + &self, + specifier: &str, + ) -> Option<(&UrlRc, &FolderConfigs)> { + let mut best_match: Option<(&UrlRc, &FolderConfigs)> = None; + for (dir_url, config) in &self.config_folders { + if specifier.starts_with(dir_url.as_str()) + && (best_match.is_none() + || dir_url.as_str().len() > best_match.unwrap().0.as_str().len()) + { + best_match = Some((dir_url, config)); + } + } + best_match + } + + pub fn diagnostics(&self) -> Vec { + fn check_member_diagnostics( + member_config: &ConfigFile, + root_config: Option<&ConfigFile>, + diagnostics: &mut Vec, + ) { + if member_config.json.import_map.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("importMap"), + }); + } else if member_config.is_an_import_map() + && root_config + .map(|c| { + c.json.import_map.is_some() + && c.json.imports.is_none() + && c.json.scopes.is_none() + }) + .unwrap_or(false) + { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::MemberImportsScopesIgnored, + }); + } + if member_config.json.lock.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("lock"), + }); + } + if member_config.json.node_modules_dir.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("nodeModulesDir"), + }); + } + if member_config.json.links.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("links"), + }); + } + if member_config.json.scopes.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("scopes"), + }); + } + if !member_config.json.unstable.is_empty() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("unstable"), + }); + } + if member_config.json.vendor.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("vendor"), + }); + } + if member_config.json.workspace.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("workspace"), + }); + } + if let Some(value) = &member_config.json.lint { + if value.get("report").is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: member_config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("lint.report"), + }); + } + } + } + + fn check_all_configs( + config: &ConfigFile, + diagnostics: &mut Vec, + ) { + if let Some(name) = &config.json.name { + if !is_valid_jsr_pkg_name(name) { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::InvalidMemberName { + name: name.clone(), + }, + }); + } + } + if config.json.deprecated_workspaces.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::InvalidWorkspacesOption, + }); + } + if config.json.deprecated_patch.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::DeprecatedPatch, + }); + } + if config.json.name.is_some() && config.json.exports.is_none() { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::MissingExports, + }); + } + if config.is_an_import_map() && config.json.import_map.is_some() { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::ImportMapReferencingImportMap, + }); + } + if let Some(serde_json::Value::Bool(enabled)) = + &config.json.node_modules_dir + { + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::DeprecatedNodeModulesDirOption { + previous: *enabled, + suggestion: if config.json.unstable.iter().any(|v| v == "byonm") { + NodeModulesDirMode::Manual + } else if *enabled { + NodeModulesDirMode::Auto + } else { + NodeModulesDirMode::None + }, + }, + }) + } + } + + let mut diagnostics = Vec::new(); + for (url, folder) in &self.config_folders { + if let Some(config) = &folder.deno_json { + let is_root = url == &self.root_dir; + if !is_root { + check_member_diagnostics( + config, + self.root_deno_json().map(|r| r.as_ref()), + &mut diagnostics, + ); + } + + check_all_configs(config, &mut diagnostics); + } + } + + for folder in self.links.values() { + if let Some(config) = &folder.deno_json { + if config.json.links.is_some() { + // supporting linking in links is too complicated + diagnostics.push(WorkspaceDiagnostic { + config_url: config.specifier.clone(), + kind: WorkspaceDiagnosticKind::RootOnlyOption("links"), + }); + } + } + } + + diagnostics + } + + pub fn vendor_dir_path(&self) -> Option<&PathBuf> { + self.vendor_dir.as_ref() + } + + pub fn to_lint_config(&self) -> Result { + self + .with_root_config_only(|root_config| { + Ok(WorkspaceLintConfig { + report: match root_config + .json + .lint + .as_ref() + .and_then(|l| l.get("report")) + { + Some(report) => match report { + serde_json::Value::String(value) => Some(value.to_string()), + serde_json::Value::Null => None, + serde_json::Value::Bool(_) + | serde_json::Value::Number(_) + | serde_json::Value::Array(_) + | serde_json::Value::Object(_) => { + return Err(LintConfigError); + } + }, + None => None, + }, + }) + }) + .unwrap_or(Ok(Default::default())) + } + + pub fn to_import_map_path(&self) -> Result, ConfigFileError> { + self + .with_root_config_only(|root_config| root_config.to_import_map_path()) + .unwrap_or(Ok(None)) + } + + pub fn resolve_lockfile_path( + &self, + ) -> Result, ToLockConfigError> { + if let Some(deno_json) = self.root_deno_json() { + Ok(deno_json.resolve_lockfile_path()?) + } else if let Some(pkg_json) = self.root_pkg_json() { + Ok(pkg_json.path.parent().map(|p| p.join("deno.lock"))) + } else { + Ok(None) + } + } + + pub fn resolve_bench_config_for_members( + self: &WorkspaceRc, + cli_args: &FilePatterns, + ) -> Result, ToInvalidConfigError> { + self.resolve_config_for_members(cli_args, |dir, patterns| { + dir.to_bench_config(patterns) + }) + } + + pub fn resolve_lint_config_for_members( + self: &WorkspaceRc, + cli_args: &FilePatterns, + ) -> Result< + Vec<(WorkspaceDirectory, WorkspaceDirLintConfig)>, + ToInvalidConfigError, + > { + self.resolve_config_for_members(cli_args, |dir, patterns| { + dir.to_lint_config(patterns) + }) + } + + pub fn resolve_fmt_config_for_members( + self: &WorkspaceRc, + cli_args: &FilePatterns, + ) -> Result, ToInvalidConfigError> { + self.resolve_config_for_members(cli_args, |dir, patterns| { + dir.to_fmt_config(patterns) + }) + } + + pub fn resolve_test_config_for_members( + self: &WorkspaceRc, + cli_args: &FilePatterns, + ) -> Result, ToInvalidConfigError> { + self.resolve_config_for_members(cli_args, |dir, patterns| { + dir.to_test_config(patterns) + }) + } + + fn resolve_config_for_members( + self: &WorkspaceRc, + cli_args: &FilePatterns, + resolve_config: impl Fn(&WorkspaceDirectory, FilePatterns) -> Result, + ) -> Result, E> { + let cli_args_by_folder = self.split_cli_args_by_deno_json_folder(cli_args); + let mut result = Vec::with_capacity(cli_args_by_folder.len()); + for (folder_url, patterns) in cli_args_by_folder { + let dir = self.resolve_member_dir(&folder_url); + let config = resolve_config(&dir, patterns)?; + result.push((dir, config)); + } + Ok(result) + } + + fn split_cli_args_by_deno_json_folder( + &self, + cli_args: &FilePatterns, + ) -> IndexMap { + fn common_ancestor(a: &Path, b: &Path) -> PathBuf { + a.components() + .zip(b.components()) + .take_while(|(a, b)| a == b) + .map(|(a, _)| a) + .collect() + } + + let cli_arg_patterns = cli_args.split_by_base(); + let deno_json_folders = self + .config_folders + .iter() + .filter(|(_, folder)| folder.deno_json.is_some()) + .map(|(url, folder)| { + let dir_path = url_to_file_path(url).unwrap(); + (dir_path, (url, folder)) + }) + .collect::>(); + let mut results: IndexMap<_, FilePatterns> = + IndexMap::with_capacity(deno_json_folders.len() + 1); + for pattern in cli_arg_patterns { + let mut matches = Vec::with_capacity(deno_json_folders.len()); + for (dir_path, v) in deno_json_folders.iter() { + if pattern.base.starts_with(dir_path) + || dir_path.starts_with(&pattern.base) + { + matches.push((dir_path, *v)); + } + } + // remove any non-sub/current folders that start with another folder + let mut indexes_to_remove = VecDeque::with_capacity(matches.len()); + for (i, (m, _)) in matches.iter().enumerate() { + if !m.starts_with(&pattern.base) + && matches.iter().any(|(sub, _)| { + sub.starts_with(m) && sub != m && pattern.base.starts_with(m) + }) + { + indexes_to_remove.push_back(i); + } + } + let mut matched_folder_urls = + Vec::with_capacity(std::cmp::max(1, matches.len())); + if matches.is_empty() { + // This will occur when someone specifies a file that's outside + // the workspace directory. In this case, use the root directory's config + // so that it's consistent across the workspace. + matched_folder_urls.push(&self.root_dir); + } + for (i, (_dir_path, (folder_url, _config))) in matches.iter().enumerate() + { + if let Some(skip_index) = indexes_to_remove.front() { + if i == *skip_index { + indexes_to_remove.pop_front(); + continue; + } + } + matched_folder_urls.push(folder_url); + } + for folder_url in matched_folder_urls { + let entry = results.entry((*folder_url).clone()); + let folder_path = url_to_file_path(folder_url).unwrap(); + match entry { + indexmap::map::Entry::Occupied(entry) => { + let entry = entry.into_mut(); + let common_base = common_ancestor(&pattern.base, &entry.base); + if common_base.starts_with(&folder_path) + && entry.base.starts_with(&common_base) + { + entry.base = common_base; + } + match &mut entry.include { + Some(set) => { + if let Some(includes) = &pattern.include { + for include in includes.inner() { + if !set.inner().contains(include) { + set.push(include.clone()) + } + } + } + } + None => { + entry.include.clone_from(&pattern.include); + } + } + } + indexmap::map::Entry::Vacant(entry) => { + entry.insert(FilePatterns { + base: if pattern.base.starts_with(&folder_path) { + pattern.base.clone() + } else { + folder_path.clone() + }, + include: pattern.include.clone(), + exclude: pattern.exclude.clone(), + }); + } + } + } + } + results + } + + pub fn resolve_config_excludes( + &self, + ) -> Result { + // have the root excludes at the front because they're lower priority + let mut excludes = match &self.root_deno_json() { + Some(c) => c.to_exclude_files_config()?.exclude.into_path_or_patterns(), + None => Default::default(), + }; + for (dir_url, folder) in self.config_folders.iter() { + let Some(deno_json) = folder.deno_json.as_ref() else { + continue; + }; + if dir_url == &self.root_dir { + continue; + } + excludes.extend( + deno_json + .to_exclude_files_config()? + .exclude + .into_path_or_patterns(), + ); + } + Ok(PathOrPatternSet::new(excludes)) + } + + pub fn unstable_features(&self) -> &[String] { + self + .with_root_config_only(|deno_json| { + (&deno_json.json.unstable) as &[String] + }) + .unwrap_or(&[]) + } + + pub fn has_unstable(&self, name: &str) -> bool { + self + .with_root_config_only(|deno_json| deno_json.has_unstable(name)) + .unwrap_or(false) + } + + fn with_root_config_only<'a, R>( + &'a self, + with_root: impl Fn(&'a ConfigFile) -> R, + ) -> Option { + self.root_deno_json().map(|c| with_root(c)) + } + + pub fn node_modules_dir( + &self, + ) -> Result, deno_json::NodeModulesDirParseError> + { + self + .root_deno_json() + .and_then(|c| c.json.node_modules_dir.as_ref()) + .map(|v| { + serde_json::from_value::(v.clone()) + .map_err(|err| NodeModulesDirParseError { source: err }) + }) + .transpose() + } +} + +#[derive(Debug, Clone)] +struct WorkspaceDirConfig { + #[allow(clippy::disallowed_types)] + member: crate::sync::MaybeArc, + // will be None when it doesn't exist or the member config + // is the root config + #[allow(clippy::disallowed_types)] + root: Option>, +} + +#[derive(Debug, Error, JsError)] +#[class(inherit)] +#[error("Failed parsing '{specifier}'.")] +pub struct ToTasksConfigError { + specifier: Url, + #[source] + #[inherit] + error: ToInvalidConfigError, +} + +#[derive(Clone, Debug, Hash, PartialEq)] +pub struct WorkspaceDirLintConfig { + pub rules: LintRulesConfig, + pub plugins: Vec, + pub files: FilePatterns, +} + +#[derive(Debug, Clone)] +pub struct WorkspaceDirectory { + pub workspace: WorkspaceRc, + /// The directory that this context is for. This is generally the cwd. + dir_url: UrlRc, + pkg_json: Option>, + deno_json: Option>, +} + +impl WorkspaceDirectory { + pub fn empty(opts: WorkspaceDirectoryEmptyOptions) -> Self { + WorkspaceDirectory::new( + &opts.root_dir, + new_rc(Workspace { + config_folders: IndexMap::from([( + opts.root_dir.clone(), + FolderConfigs::default(), + )]), + root_dir: opts.root_dir.clone(), + links: BTreeMap::new(), + vendor_dir: match opts.use_vendor_dir { + VendorEnablement::Enable { cwd } => Some(cwd.join("vendor")), + VendorEnablement::Disable => None, + }, + }), + ) + } + + pub fn discover( + sys: &TSys, + start: WorkspaceDiscoverStart, + opts: &WorkspaceDiscoverOptions, + ) -> Result { + fn resolve_start_dir( + sys: &impl FsMetadata, + start: &WorkspaceDiscoverStart, + ) -> Result { + match start { + WorkspaceDiscoverStart::Paths(paths) => { + if paths.is_empty() { + Err( + WorkspaceDiscoverErrorKind::FailedResolvingStartDirectory( + FailedResolvingStartDirectoryError::NoPathsProvided, + ) + .into(), + ) + } else { + // just select the first one... this doesn't matter too much + // at the moment because we only use this for lint and fmt, + // so this is ok for now + let path = &paths[0]; + match sys.fs_is_dir(path) { + Ok(is_dir) => Ok( + url_from_directory_path(if is_dir { + path + } else { + path.parent().unwrap() + }) + .unwrap(), + ), + Err(_err) => { + // assume the parent is a directory + match path.parent() { + Some(parent) => Ok(url_from_directory_path(parent).unwrap()), + None => Err( + WorkspaceDiscoverErrorKind::FailedResolvingStartDirectory( + FailedResolvingStartDirectoryError::CouldNotResolvePath( + path.clone(), + ), + ) + .into(), + ), + } + } + } + } + } + WorkspaceDiscoverStart::ConfigFile(path) => { + let parent = path.parent().ok_or_else(|| { + WorkspaceDiscoverErrorKind::FailedResolvingStartDirectory( + FailedResolvingStartDirectoryError::PathHasNoParentDirectory( + path.to_path_buf(), + ), + ) + })?; + Ok(url_from_directory_path(parent).unwrap()) + } + } + } + + let start_dir = resolve_start_dir(sys, &start)?; + let config_file_discovery = + discover_workspace_config_files(sys, start, opts)?; + + let context = match config_file_discovery { + ConfigFileDiscovery::None { + maybe_vendor_dir: vendor_dir, + } => { + let start_dir = new_rc(start_dir); + let workspace = new_rc(Workspace { + config_folders: IndexMap::from([( + start_dir.clone(), + FolderConfigs::default(), + )]), + root_dir: start_dir.clone(), + links: BTreeMap::new(), + vendor_dir, + }); + WorkspaceDirectory::new(&start_dir, workspace) + } + ConfigFileDiscovery::Workspace { workspace } => { + WorkspaceDirectory::new(&start_dir, workspace) + } + }; + debug_assert!( + context + .workspace + .config_folders + .contains_key(&context.workspace.root_dir), + "root should always have a folder" + ); + Ok(context) + } + + fn new(specifier: &Url, workspace: WorkspaceRc) -> Self { + let maybe_folder = workspace.resolve_folder(specifier); + match maybe_folder { + Some((member_url, folder)) => { + if member_url == &workspace.root_dir { + Self::create_from_root_folder(workspace) + } else { + let maybe_deno_json = folder + .deno_json + .as_ref() + .map(|c| (member_url, c)) + .or_else(|| { + let parent = parent_specifier_str(member_url.as_str())?; + workspace.resolve_deno_json_from_str(parent) + }) + .or_else(|| { + let root = + workspace.config_folders.get(&workspace.root_dir).unwrap(); + root.deno_json.as_ref().map(|c| (&workspace.root_dir, c)) + }); + let maybe_pkg_json = folder + .pkg_json + .as_ref() + .map(|pkg_json| (member_url, pkg_json)) + .or_else(|| { + let parent = parent_specifier_str(member_url.as_str())?; + workspace.resolve_pkg_json_from_str(parent) + }) + .or_else(|| { + let root = + workspace.config_folders.get(&workspace.root_dir).unwrap(); + root.pkg_json.as_ref().map(|c| (&workspace.root_dir, c)) + }); + Self { + dir_url: member_url.clone(), + pkg_json: maybe_pkg_json.map(|(member_url, pkg_json)| { + WorkspaceDirConfig { + root: if workspace.root_dir == *member_url { + None + } else { + workspace + .config_folders + .get(&workspace.root_dir) + .unwrap() + .pkg_json + .clone() + }, + member: pkg_json.clone(), + } + }), + deno_json: maybe_deno_json.map(|(member_url, config)| { + WorkspaceDirConfig { + root: if workspace.root_dir == *member_url { + None + } else { + workspace + .config_folders + .get(&workspace.root_dir) + .unwrap() + .deno_json + .clone() + }, + member: config.clone(), + } + }), + workspace, + } + } + } + None => Self::create_from_root_folder(workspace), + } + } + + fn create_from_root_folder(workspace: WorkspaceRc) -> Self { + let root_folder = + workspace.config_folders.get(&workspace.root_dir).unwrap(); + let dir_url = workspace.root_dir.clone(); + WorkspaceDirectory { + dir_url, + pkg_json: root_folder.pkg_json.as_ref().map(|config| { + WorkspaceDirConfig { + member: config.clone(), + root: None, + } + }), + deno_json: root_folder.deno_json.as_ref().map(|config| { + WorkspaceDirConfig { + member: config.clone(), + root: None, + } + }), + workspace, + } + } + + pub fn jsr_packages_for_publish(&self) -> Vec { + // only publish the current folder if it's a package + if let Some(package_config) = self.maybe_package_config() { + return vec![package_config]; + } + if let Some(pkg_json) = &self.pkg_json { + let dir_path = url_to_file_path(&self.dir_url).unwrap(); + // don't publish anything if in a package.json only directory within + // a workspace + if pkg_json.member.dir_path().starts_with(&dir_path) + && dir_path != pkg_json.member.dir_path() + { + return Vec::new(); + } + } + if self.dir_url == self.workspace.root_dir { + self.workspace.jsr_packages().collect() + } else { + // nothing to publish + Vec::new() + } + } + + pub fn dir_url(&self) -> &UrlRc { + &self.dir_url + } + + pub fn dir_path(&self) -> PathBuf { + url_to_file_path(&self.dir_url).unwrap() + } + + pub fn has_deno_or_pkg_json(&self) -> bool { + self.has_pkg_json() || self.has_deno_json() + } + + pub fn has_deno_json(&self) -> bool { + self.deno_json.is_some() + } + + pub fn has_pkg_json(&self) -> bool { + self.pkg_json.is_some() + } + + pub fn maybe_deno_json(&self) -> Option<&ConfigFileRc> { + self.deno_json.as_ref().map(|c| &c.member) + } + + pub fn maybe_pkg_json(&self) -> Option<&PackageJsonRc> { + self.pkg_json.as_ref().map(|c| &c.member) + } + + pub fn maybe_package_config(&self) -> Option { + let deno_json = self.maybe_deno_json()?; + let pkg_name = deno_json.json.name.as_ref()?; + if !deno_json.is_package() { + return None; + } + Some(JsrPackageConfig { + name: pkg_name.clone(), + config_file: deno_json.clone(), + member_dir: self.clone(), + license: deno_json.to_license(), + }) + } + + pub fn check_js(&self) -> bool { + self + .deno_json + .as_ref() + .and_then(|c| { + // prefer member, then root + c.member + .check_js() + .or_else(|| c.root.as_ref().and_then(|d| d.check_js())) + }) + .unwrap_or(false) + } + + pub fn to_resolved_ts_config( + &self, + sys: &TSys, + config_type: TsConfigType, + ) -> Result { + let mut base_ts_config = get_base_ts_config_for_emit(config_type); + let TsConfigWithIgnoredOptions { + ts_config, + ignored_options, + } = self.to_raw_user_provided_tsconfig(sys)?; + // overwrite the base values with the user specified ones + base_ts_config.merge_mut(ts_config); + Ok(TsConfigWithIgnoredOptions { + ts_config: base_ts_config, + ignored_options, + }) + } + + fn is_config_at_root(&self) -> bool { + self + .deno_json + .as_ref() + .map(|p| p.root.is_none()) + .unwrap_or(true) + && self + .pkg_json + .as_ref() + .map(|p| p.root.is_none()) + .unwrap_or(true) + } + + /// Gets the combined tsconfig that the user provided, without any of + /// Deno's defaults. Use `to_resolved_ts_config()` to get the resolved + /// config instead. + pub fn to_raw_user_provided_tsconfig( + &self, + sys: &TSys, + ) -> Result { + let mut result = TsConfigWithIgnoredOptions { + ts_config: TsConfig::default(), + ignored_options: Vec::new(), + }; + let merge = |config: ParsedTsConfigOptions, + result: &mut TsConfigWithIgnoredOptions| { + if let Some(options) = config.maybe_ignored { + result.ignored_options.push(options); + } + result.ts_config.merge_object_mut(config.options); + }; + let try_merge_from_ts_config = + |dir_path: &Path, result: &mut TsConfigWithIgnoredOptions| { + if let Some(options) = + compiler_options_from_ts_config_next_to_pkg_json(sys, dir_path) + { + merge(options, result); + } + }; + + if let Some(config) = &self.deno_json { + // root first + if let Some(root) = &config.root { + // read from root deno.json + if let Some(compiler_options) = root.to_compiler_options()? { + merge(compiler_options, &mut result); + } else { + try_merge_from_ts_config(&root.dir_path(), &mut result); + } + } else if let Some(pkg_json) = &self.pkg_json { + // if root deno.json doesn't exist, but package.json does, try read from + // tsconfig.json next to pkg.json + if let Some(pkg_json) = &pkg_json.root { + try_merge_from_ts_config(pkg_json.dir_path(), &mut result); + } + } + + // then read from member deno.json + if let Some(compiler_options) = config.member.to_compiler_options()? { + merge(compiler_options, &mut result); + } else if self.is_config_at_root() { + // config is root, so try to discover tsconfig + try_merge_from_ts_config(&config.member.dir_path(), &mut result); + } + } else if let Some(pkg_json) = &self.pkg_json { + if let Some(pkg_json) = &pkg_json.root { + // try read from tsconfig.json next to root package.json + try_merge_from_ts_config(pkg_json.dir_path(), &mut result); + } else { + debug_assert!(self.is_config_at_root()); + // config is root, so try to read from that + try_merge_from_ts_config(pkg_json.member.dir_path(), &mut result); + } + } + + Ok(result) + } + + pub fn to_compiler_option_types( + &self, + ) -> Result)>, CompilerOptionTypesDeserializeError> { + let Some(config) = &self.deno_json else { + return Ok(Vec::new()); + }; + let mut result = Vec::with_capacity(2); + if let Some(root) = &config.root { + if let Some(types) = root.to_compiler_option_types()? { + result.push(types); + } + } + if let Some(types) = config.member.to_compiler_option_types()? { + result.push(types); + } + Ok(result) + } + + pub fn to_maybe_jsx_import_source_config( + &self, + ) -> Result, ToMaybeJsxImportSourceConfigError> + { + let Some(config) = &self.deno_json else { + return Ok(None); + }; + let base = config + .root + .as_ref() + .map(|r| r.to_raw_jsx_compiler_options()) + .unwrap_or_default(); + let member = config.member.to_raw_jsx_compiler_options(); + let is_jsx_automatic = matches!( + member.jsx.as_deref().or(base.jsx.as_deref()), + Some("react-jsx" | "react-jsxdev" | "precompile"), + ); + let import_source = member + .jsx_import_source + .map(|specifier| JsxImportSourceSpecifierConfig { + base: config.member.specifier.clone(), + specifier, + }) + .or_else(|| { + base.jsx_import_source.and_then(|specifier| { + Some(JsxImportSourceSpecifierConfig { + base: config.root.as_ref().map(|r| r.specifier.clone())?, + specifier, + }) + }) + }) + .or_else(|| { + is_jsx_automatic.then(|| JsxImportSourceSpecifierConfig { + base: config.member.specifier.clone(), + specifier: "react".to_string(), + }) + }); + let import_source_types = member + .jsx_import_source_types + .map(|specifier| JsxImportSourceSpecifierConfig { + base: config.member.specifier.clone(), + specifier, + }) + .or_else(|| { + base.jsx_import_source_types.and_then(|specifier| { + Some(JsxImportSourceSpecifierConfig { + base: config.root.as_ref().map(|r| r.specifier.clone())?, + specifier, + }) + }) + }) + .or_else(|| import_source.clone()); + let module = match member.jsx.as_deref().or(base.jsx.as_deref()) { + Some("react-jsx") => "jsx-runtime".to_string(), + Some("react-jsxdev") => "jsx-dev-runtime".to_string(), + Some("react") | None => { + if let Some(import_source) = &import_source { + return Err( + ToMaybeJsxImportSourceConfigError::InvalidJsxImportSourceValue( + import_source.base.clone(), + ), + ); + } + if let Some(import_source_types) = &import_source_types { + return Err( + ToMaybeJsxImportSourceConfigError::InvalidJsxImportSourceTypesValue( + import_source_types.base.clone(), + ), + ); + } + return Ok(None); + } + Some("precompile") => "jsx-runtime".to_string(), + Some(setting) => { + return Err( + ToMaybeJsxImportSourceConfigError::InvalidJsxCompilerOption { + value: setting.to_string(), + specifier: if member.jsx.is_some() { + config.member.specifier.clone() + } else { + config + .root + .as_ref() + .map(|r| r.specifier.clone()) + .unwrap_or_else(|| config.member.specifier.clone()) + }, + }, + ) + } + }; + Ok(Some(JsxImportSourceConfig { + module, + import_source, + import_source_types, + })) + } + + pub fn to_lint_config( + &self, + cli_args: FilePatterns, + ) -> Result { + let mut config = self.to_lint_config_inner()?; + self.exclude_includes_with_member_for_base_for_root(&mut config.files); + combine_files_config_with_cli_args(&mut config.files, cli_args); + self.append_workspace_members_to_exclude(&mut config.files); + Ok(config) + } + + fn to_lint_config_inner( + &self, + ) -> Result { + let Some(deno_json) = self.deno_json.as_ref() else { + return Ok(WorkspaceDirLintConfig { + rules: Default::default(), + plugins: Default::default(), + files: FilePatterns::new_with_base( + url_to_file_path(&self.dir_url).unwrap(), + ), + }); + }; + let member_config = deno_json.member.to_lint_config()?; + let root_config = deno_json + .root + .as_ref() + .map(|root| root.to_lint_config()) + .transpose()?; + + // 1. Merge workspace root + member plugins + // 2. Workspace member can filter out plugins by negating + // like this: `!my-plugin` + // 3. Remove duplicates in case a plugin was defined in both + // workspace root and member. + let excluded_plugins = member_config + .options + .plugins + .iter() + .filter(|plugin| plugin.specifier.starts_with('!')) + .map(|plugin| { + deno_json + .member + .specifier + .join(&plugin.specifier[1..]) + .map_err(|err| ToInvalidConfigError::InvalidConfig { + config: "lint", + source: err.into(), + }) + }) + .collect::, _>>()?; + + let plugins = root_config + .iter() + .flat_map(|root_config| &root_config.options.plugins) + .chain(&member_config.options.plugins) + .filter(|plugin| !plugin.specifier.starts_with('!')) + .map(|plugin| { + plugin.base.join(&plugin.specifier).map_err(|err| { + ToInvalidConfigError::InvalidConfig { + config: "lint", + source: err.into(), + } + }) + }) + .collect::, _>>()? + .into_iter() + .filter(|plugin| !excluded_plugins.contains(plugin)) + .collect::>(); + + let (rules, files) = match root_config { + Some(root_config) => ( + LintRulesConfig { + tags: combine_option_vecs( + root_config.options.rules.tags, + member_config.options.rules.tags, + ), + include: combine_option_vecs_with_override( + CombineOptionVecsWithOverride { + root: root_config.options.rules.include, + member: member_config + .options + .rules + .include + .as_ref() + .map(Cow::Borrowed), + member_override_root: member_config + .options + .rules + .exclude + .as_ref(), + }, + ), + exclude: combine_option_vecs_with_override( + CombineOptionVecsWithOverride { + root: root_config.options.rules.exclude, + member: member_config.options.rules.exclude.map(Cow::Owned), + member_override_root: member_config + .options + .rules + .include + .as_ref(), + }, + ), + }, + combine_patterns(root_config.files, member_config.files), + ), + None => (member_config.options.rules, member_config.files), + }; + + Ok(WorkspaceDirLintConfig { + plugins, + rules, + files, + }) + } + + pub fn to_fmt_config( + &self, + cli_args: FilePatterns, + ) -> Result { + let mut config = self.to_fmt_config_inner()?; + self.exclude_includes_with_member_for_base_for_root(&mut config.files); + combine_files_config_with_cli_args(&mut config.files, cli_args); + self.append_workspace_members_to_exclude(&mut config.files); + Ok(config) + } + + fn to_fmt_config_inner(&self) -> Result { + let Some(deno_json) = self.deno_json.as_ref() else { + return Ok(FmtConfig { + files: FilePatterns::new_with_base( + url_to_file_path(&self.dir_url).unwrap(), + ), + options: Default::default(), + }); + }; + let member_config = deno_json.member.to_fmt_config()?; + let root_config = match &deno_json.root { + Some(root) => root.to_fmt_config()?, + None => return Ok(member_config), + }; + + Ok(FmtConfig { + options: FmtOptionsConfig { + use_tabs: member_config + .options + .use_tabs + .or(root_config.options.use_tabs), + line_width: member_config + .options + .line_width + .or(root_config.options.line_width), + indent_width: member_config + .options + .indent_width + .or(root_config.options.indent_width), + single_quote: member_config + .options + .single_quote + .or(root_config.options.single_quote), + prose_wrap: member_config + .options + .prose_wrap + .or(root_config.options.prose_wrap), + semi_colons: member_config + .options + .semi_colons + .or(root_config.options.semi_colons), + quote_props: member_config + .options + .quote_props + .or(root_config.options.quote_props), + new_line_kind: member_config + .options + .new_line_kind + .or(root_config.options.new_line_kind), + use_braces: member_config + .options + .use_braces + .or(root_config.options.use_braces), + brace_position: member_config + .options + .brace_position + .or(root_config.options.brace_position), + single_body_position: member_config + .options + .single_body_position + .or(root_config.options.single_body_position), + next_control_flow_position: member_config + .options + .next_control_flow_position + .or(root_config.options.next_control_flow_position), + trailing_commas: member_config + .options + .trailing_commas + .or(root_config.options.trailing_commas), + operator_position: member_config + .options + .operator_position + .or(root_config.options.operator_position), + jsx_bracket_position: member_config + .options + .jsx_bracket_position + .or(root_config.options.jsx_bracket_position), + jsx_force_new_lines_surrounding_content: member_config + .options + .jsx_force_new_lines_surrounding_content + .or(root_config.options.jsx_force_new_lines_surrounding_content), + jsx_multi_line_parens: member_config + .options + .jsx_multi_line_parens + .or(root_config.options.jsx_multi_line_parens), + type_literal_separator_kind: member_config + .options + .type_literal_separator_kind + .or(root_config.options.type_literal_separator_kind), + space_around: member_config + .options + .space_around + .or(root_config.options.space_around), + space_surrounding_properties: member_config + .options + .space_surrounding_properties + .or(root_config.options.space_surrounding_properties), + }, + files: combine_patterns(root_config.files, member_config.files), + }) + } + + pub fn to_bench_config( + &self, + cli_args: FilePatterns, + ) -> Result { + let mut config = self.to_bench_config_inner()?; + self.exclude_includes_with_member_for_base_for_root(&mut config.files); + combine_files_config_with_cli_args(&mut config.files, cli_args); + self.append_workspace_members_to_exclude(&mut config.files); + Ok(config) + } + + fn to_bench_config_inner(&self) -> Result { + let Some(deno_json) = self.deno_json.as_ref() else { + return Ok(BenchConfig { + files: FilePatterns::new_with_base( + url_to_file_path(&self.dir_url).unwrap(), + ), + }); + }; + let member_config = deno_json.member.to_bench_config()?; + let root_config = match &deno_json.root { + Some(root) => root.to_bench_config()?, + None => return Ok(member_config), + }; + Ok(BenchConfig { + files: combine_patterns(root_config.files, member_config.files), + }) + } + + pub fn to_tasks_config( + &self, + ) -> Result { + fn to_member_tasks_config( + maybe_deno_json: Option<&ConfigFileRc>, + maybe_pkg_json: Option<&PackageJsonRc>, + ) -> Result, ToTasksConfigError> { + let config = WorkspaceMemberTasksConfig { + deno_json: match maybe_deno_json { + Some(deno_json) => deno_json + .to_tasks_config() + .map(|tasks| { + tasks.map(|tasks| WorkspaceMemberTasksConfigFile { + folder_url: url_parent(&deno_json.specifier), + tasks, + }) + }) + .map_err(|error| ToTasksConfigError { + specifier: deno_json.specifier.clone(), + error, + })?, + None => None, + }, + package_json: match maybe_pkg_json { + Some(pkg_json) => pkg_json.scripts.clone().map(|scripts| { + WorkspaceMemberTasksConfigFile { + folder_url: url_parent(&pkg_json.specifier()), + tasks: scripts, + } + }), + None => None, + }, + }; + if config.deno_json.is_none() && config.package_json.is_none() { + return Ok(None); + } + Ok(Some(config)) + } + + Ok(WorkspaceTasksConfig { + root: to_member_tasks_config( + self.deno_json.as_ref().and_then(|d| d.root.as_ref()), + self.pkg_json.as_ref().and_then(|d| d.root.as_ref()), + )?, + member: to_member_tasks_config( + self.deno_json.as_ref().map(|d| &d.member), + self.pkg_json.as_ref().map(|d| &d.member), + )?, + }) + } + + pub fn to_publish_config( + &self, + ) -> Result { + let mut config = self.to_publish_config_inner()?; + self.exclude_includes_with_member_for_base_for_root(&mut config.files); + self.append_workspace_members_to_exclude(&mut config.files); + Ok(config) + } + + fn to_publish_config_inner( + &self, + ) -> Result { + let Some(deno_json) = self.deno_json.as_ref() else { + return Ok(PublishConfig { + files: FilePatterns::new_with_base( + url_to_file_path(&self.dir_url).unwrap(), + ), + }); + }; + let member_config = deno_json.member.to_publish_config()?; + let root_config = match &deno_json.root { + Some(root) => root.to_publish_config()?, + None => return Ok(member_config), + }; + Ok(PublishConfig { + files: combine_patterns(root_config.files, member_config.files), + }) + } + + pub fn to_test_config( + &self, + cli_args: FilePatterns, + ) -> Result { + let mut config = self.to_test_config_inner()?; + self.exclude_includes_with_member_for_base_for_root(&mut config.files); + combine_files_config_with_cli_args(&mut config.files, cli_args); + self.append_workspace_members_to_exclude(&mut config.files); + Ok(config) + } + + fn to_test_config_inner(&self) -> Result { + let Some(deno_json) = self.deno_json.as_ref() else { + return Ok(TestConfig { + files: FilePatterns::new_with_base( + url_to_file_path(&self.dir_url).unwrap(), + ), + }); + }; + let member_config = deno_json.member.to_test_config()?; + let root_config = match &deno_json.root { + Some(root) => root.to_test_config()?, + None => return Ok(member_config), + }; + + Ok(TestConfig { + files: combine_patterns(root_config.files, member_config.files), + }) + } + + /// Removes any "include" patterns from the root files that have + /// a base in another workspace member. + fn exclude_includes_with_member_for_base_for_root( + &self, + files: &mut FilePatterns, + ) { + let Some(include) = &mut files.include else { + return; + }; + let root_url = self.workspace.root_dir(); + if self.dir_url != *root_url { + return; // only do this for the root config + } + + let root_folder_configs = self.workspace.root_folder_configs(); + let maybe_root_deno_json = root_folder_configs.deno_json.as_ref(); + let non_root_deno_jsons = match maybe_root_deno_json { + Some(root_deno_json) => self + .workspace + .deno_jsons() + .filter(|d| d.specifier != root_deno_json.specifier) + .collect::>(), + None => self.workspace.deno_jsons().collect::>(), + }; + + let include = include.inner_mut(); + for i in (0..include.len()).rev() { + let Some(path) = include[i].base_path() else { + continue; + }; + for deno_json in non_root_deno_jsons.iter() { + if path.starts_with(deno_json.dir_path()) { + include.remove(i); + break; + } + } + } + } + + fn append_workspace_members_to_exclude(&self, files: &mut FilePatterns) { + files.exclude.append( + self + .workspace + .deno_jsons() + .filter(|member_deno_json| { + let member_dir = member_deno_json.dir_path(); + member_dir != files.base && member_dir.starts_with(&files.base) + }) + .map(|d| PathOrPattern::Path(d.dir_path())), + ); + } +} + +/// Reads compilerOptions from tsconfig.json file next to the package.json +/// See https://github.com/denoland/deno/issues/28455#issuecomment-2734956368 +fn compiler_options_from_ts_config_next_to_pkg_json( + sys: &TSys, + dir_path: &Path, +) -> Option { + let path = dir_path.join("tsconfig.json"); + let warn = |err: &dyn std::fmt::Display| { + let path = path.display(); + log::warn!("Failed to read tsconfig.json from {}: {}", path, err); + }; + let text = sys + .fs_read_to_string(&path) + .inspect_err(|e| { + if !matches!(e.kind(), ErrorKind::NotFound | ErrorKind::IsADirectory) { + warn(e) + } + }) + .ok()?; + let url = url_from_file_path(&path).inspect_err(|e| warn(e)).ok()?; + let config = ConfigFile::new(&text, url).inspect_err(|e| warn(e)).ok()?; + config.to_compiler_options().inspect_err(|e| warn(e)).ok()? +} + +pub enum TaskOrScript<'a> { + /// A task from a deno.json. + Task(&'a IndexMap, &'a TaskDefinition), + /// A script from a package.json. + Script(&'a IndexMap, &'a str), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WorkspaceMemberTasksConfigFile { + pub folder_url: Url, + pub tasks: IndexMap, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WorkspaceMemberTasksConfig { + pub deno_json: Option>, + pub package_json: Option>, +} + +impl WorkspaceMemberTasksConfig { + pub fn with_only_pkg_json(self) -> Self { + WorkspaceMemberTasksConfig { + deno_json: None, + package_json: self.package_json, + } + } + + pub fn is_empty(&self) -> bool { + self + .deno_json + .as_ref() + .map(|d| d.tasks.is_empty()) + .unwrap_or(true) + && self + .package_json + .as_ref() + .map(|d| d.tasks.is_empty()) + .unwrap_or(true) + } + + pub fn task_names(&self) -> impl Iterator { + self + .deno_json + .as_ref() + .into_iter() + .flat_map(|d| d.tasks.keys()) + .chain( + self + .package_json + .as_ref() + .into_iter() + .flat_map(|d| d.tasks.keys()) + .filter(|pkg_json_key| { + self + .deno_json + .as_ref() + .map(|d| !d.tasks.contains_key(pkg_json_key.as_str())) + .unwrap_or(true) + }), + ) + .map(|s| s.as_str()) + } + + pub fn tasks_count(&self) -> usize { + self.deno_json.as_ref().map(|d| d.tasks.len()).unwrap_or(0) + + self + .package_json + .as_ref() + .map(|d| d.tasks.len()) + .unwrap_or(0) + } + + pub fn task(&self, name: &str) -> Option<(&Url, TaskOrScript)> { + self + .deno_json + .as_ref() + .and_then(|config| { + config + .tasks + .get(name) + .map(|t| (&config.folder_url, TaskOrScript::Task(&config.tasks, t))) + }) + .or_else(|| { + self.package_json.as_ref().and_then(|config| { + config.tasks.get(name).map(|task| { + ( + &config.folder_url, + TaskOrScript::Script(&config.tasks, task), + ) + }) + }) + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct WorkspaceTasksConfig { + pub root: Option, + pub member: Option, +} + +impl WorkspaceTasksConfig { + pub fn with_only_pkg_json(self) -> Self { + WorkspaceTasksConfig { + root: self.root.map(|c| c.with_only_pkg_json()), + member: self.member.map(|c| c.with_only_pkg_json()), + } + } + + pub fn task_names(&self) -> impl Iterator { + self + .member + .as_ref() + .into_iter() + .flat_map(|r| r.task_names()) + .chain( + self + .root + .as_ref() + .into_iter() + .flat_map(|m| m.task_names()) + .filter(|root_key| { + self + .member + .as_ref() + .map(|m| m.task(root_key).is_none()) + .unwrap_or(true) + }), + ) + } + + pub fn task(&self, name: &str) -> Option<(&Url, TaskOrScript)> { + self + .member + .as_ref() + .and_then(|m| m.task(name)) + .or_else(|| self.root.as_ref().and_then(|r| r.task(name))) + } + + pub fn is_empty(&self) -> bool { + self.root.as_ref().map(|r| r.is_empty()).unwrap_or(true) + && self.member.as_ref().map(|r| r.is_empty()).unwrap_or(true) + } + + pub fn tasks_count(&self) -> usize { + self.root.as_ref().map(|r| r.tasks_count()).unwrap_or(0) + + self.member.as_ref().map(|r| r.tasks_count()).unwrap_or(0) + } +} + +fn combine_patterns( + root_patterns: FilePatterns, + member_patterns: FilePatterns, +) -> FilePatterns { + FilePatterns { + include: { + match root_patterns.include { + Some(root) => { + let filtered_root = + root.into_path_or_patterns().into_iter().filter(|p| { + match p.base_path() { + Some(base) => base.starts_with(&member_patterns.base), + None => true, + } + }); + match member_patterns.include { + Some(member) => Some( + filtered_root + .chain(member.into_path_or_patterns()) + .collect(), + ), + None => { + let matching_root = filtered_root.collect::>(); + if matching_root.is_empty() { + // member was None and nothing in the root include list + // has a base within this member, so use None to discover + // files in here + None + } else { + Some(matching_root) + } + } + } + .map(PathOrPatternSet::new) + } + None => member_patterns.include, + } + }, + exclude: { + // have the root excludes at the front because they're lower priority + let patterns = root_patterns + .exclude + .into_path_or_patterns() + .into_iter() + .filter(|p| match p { + PathOrPattern::Path(path) | + PathOrPattern::NegatedPath(path) => path.starts_with(&member_patterns.base), + PathOrPattern::RemoteUrl(_) | + // always include patterns because they may be something like ./**/*.ts in the root + PathOrPattern::Pattern(_) => true, + }) + .chain(member_patterns.exclude.into_path_or_patterns()) + .collect::>(); + PathOrPatternSet::new(patterns) + }, + base: member_patterns.base, + } +} + +fn combine_files_config_with_cli_args( + files_config: &mut FilePatterns, + cli_arg_patterns: FilePatterns, +) { + if cli_arg_patterns.base.starts_with(&files_config.base) + || !files_config.base.starts_with(&cli_arg_patterns.base) + { + files_config.base = cli_arg_patterns.base; + } + if let Some(include) = cli_arg_patterns.include { + if !include.inner().is_empty() { + files_config.include = Some(include); + } + } + if !cli_arg_patterns.exclude.inner().is_empty() { + files_config.exclude = cli_arg_patterns.exclude; + } +} + +struct CombineOptionVecsWithOverride<'a, T: Clone> { + root: Option>, + member: Option>>, + member_override_root: Option<&'a Vec>, +} + +fn combine_option_vecs_with_override( + opts: CombineOptionVecsWithOverride, +) -> Option> { + let root = opts.root.map(|r| { + let member_override_root = opts + .member_override_root + .map(|p| p.iter().collect::>()) + .unwrap_or_default(); + r.into_iter() + .filter(|p| !member_override_root.contains(p)) + .collect::>() + }); + match (root, opts.member) { + (Some(root), Some(member)) => { + let capacity = root.len() + member.len(); + Some(match member { + Cow::Owned(m) => { + remove_duplicates_iterator(root.into_iter().chain(m), capacity) + } + Cow::Borrowed(m) => remove_duplicates_iterator( + root.into_iter().chain(m.iter().map(|c| (*c).clone())), + capacity, + ), + }) + } + (Some(root), None) => Some(root), + (None, Some(member)) => Some(match member { + Cow::Owned(m) => m, + Cow::Borrowed(m) => m.iter().map(|c| (*c).clone()).collect(), + }), + (None, None) => None, + } +} + +fn combine_option_vecs( + root_option: Option>, + member_option: Option>, +) -> Option> { + match (root_option, member_option) { + (Some(root), Some(member)) => { + if root.is_empty() { + return Some(member); + } + if member.is_empty() { + return Some(root); + } + let capacity = root.len() + member.len(); + Some(remove_duplicates_iterator( + root.into_iter().chain(member), + capacity, + )) + } + (Some(root), None) => Some(root), + (None, Some(member)) => Some(member), + (None, None) => None, + } +} + +fn remove_duplicates_iterator( + iterator: impl IntoIterator, + capacity: usize, +) -> Vec { + let mut seen = HashSet::with_capacity(capacity); + let mut result = Vec::with_capacity(capacity); + for item in iterator { + if seen.insert(item.clone()) { + result.push(item); + } + } + result +} + +fn parent_specifier_str(specifier: &str) -> Option<&str> { + let specifier = specifier.strip_suffix('/').unwrap_or(specifier); + if let Some(index) = specifier.rfind('/') { + Some(&specifier[..index + 1]) + } else { + None + } +} + +fn is_valid_jsr_pkg_name(name: &str) -> bool { + let jsr = deno_semver::jsr::JsrPackageReqReference::from_str(&format!( + "jsr:{}@*", + name + )); + match jsr { + Ok(jsr) => jsr.sub_path().is_none(), + Err(_) => false, + } +} + +#[cfg(test)] +pub mod test { + use std::cell::RefCell; + use std::collections::HashMap; + + use deno_path_util::normalize_path; + use deno_path_util::url_from_directory_path; + use deno_path_util::url_from_file_path; + use pretty_assertions::assert_eq; + use serde_json::json; + use sys_traits::impls::InMemorySys; + + use super::*; + use crate::assert_contains; + use crate::deno_json::BracePosition; + use crate::deno_json::BracketPosition; + use crate::deno_json::DenoJsonCache; + use crate::deno_json::MultiLineParens; + use crate::deno_json::NewLineKind; + use crate::deno_json::NextControlFlowPosition; + use crate::deno_json::OperatorPosition; + use crate::deno_json::ProseWrap; + use crate::deno_json::QuoteProps; + use crate::deno_json::SeparatorKind; + use crate::deno_json::SingleBodyPosition; + use crate::deno_json::TrailingCommas; + use crate::deno_json::UseBraces; + use crate::glob::FileCollector; + use crate::glob::GlobPattern; + use crate::glob::PathKind; + use crate::glob::PathOrPattern; + + pub struct UnreachableSys; + + impl sys_traits::BaseFsMetadata for UnreachableSys { + type Metadata = sys_traits::impls::RealFsMetadata; + + #[doc(hidden)] + fn base_fs_metadata( + &self, + _path: &Path, + ) -> std::io::Result { + unreachable!() + } + + #[doc(hidden)] + fn base_fs_symlink_metadata( + &self, + _path: &Path, + ) -> std::io::Result { + unreachable!() + } + } + + impl sys_traits::BaseFsRead for UnreachableSys { + fn base_fs_read( + &self, + _path: &Path, + ) -> std::io::Result> { + unreachable!() + } + } + + fn root_dir() -> PathBuf { + if cfg!(windows) { + PathBuf::from("C:\\Users\\user") + } else { + PathBuf::from("/home/user") + } + } + + #[test] + fn test_empty_workspaces() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": [] + }), + ); + sys.fs_insert_json( + root_dir().join("sub_dir").join("deno.json"), + json!({ + "workspace": [] + }), + ); + + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir().join("sub_dir")]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .unwrap(); + + assert_eq!( + workspace_dir + .workspace + .deno_jsons() + .map(|d| d.specifier.to_file_path().unwrap()) + .collect::>(), + vec![root_dir().join("sub_dir").join("deno.json")] + ); + } + + #[test] + fn test_duplicate_members() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member/a", "./member/../member/a"], + }), + ); + sys.fs_insert_json(root_dir().join("member/a/deno.json"), json!({})); + + let workspace_config_err = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .err() + .unwrap(); + + assert_contains!( + workspace_config_err.to_string(), + "Cannot specify a workspace member twice ('./member/../member/a')." + ); + } + + #[test] + fn test_workspace_invalid_self_reference() { + for reference in [".", "../sub_dir"] { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("sub_dir").join("deno.json"), + json!({ + "workspace": [reference], + }), + ); + + let workspace_config_err = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir().join("sub_dir")]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .err() + .unwrap(); + + assert_contains!( + workspace_config_err.to_string(), + &format!("Remove the reference to the current config file (\"{reference}\") in \"workspaces\".") + ); + } + } + + #[test] + fn test_workspaces_outside_root_config_dir() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["../a"] + }), + ); + + let workspace_config_err = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .err() + .unwrap(); + + assert_contains!( + workspace_config_err.to_string(), + "Workspace member must be nested in a directory under the workspace." + ); + } + + #[test] + fn test_workspaces_json_jsonc() { + let sys = InMemorySys::default(); + let config_text = json!({ + "workspace": [ + "./a", + "./b", + ], + }); + let config_text_a = json!({ + "name": "a", + "version": "0.1.0" + }); + let config_text_b = json!({ + "name": "b", + "version": "0.2.0" + }); + + sys.fs_insert_json(root_dir().join("deno.json"), config_text); + sys.fs_insert_json(root_dir().join("a/deno.json"), config_text_a); + sys.fs_insert_json(root_dir().join("b/deno.jsonc"), config_text_b); + + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.config_folders.len(), 3); + } + + #[test] + fn test_tasks() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member", "./pkg_json"], + "tasks": { + "hi": "echo hi", + "overwrite": "echo overwrite" + } + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "tasks": { + "overwrite": "echo overwritten", + "bye": "echo bye" + } + }), + ); + sys.fs_insert_json( + root_dir().join("pkg_json/package.json"), + json!({ + "scripts": { + "script": "echo 1" + } + }), + ); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + // start at root for this test + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let root_deno_json = Some(WorkspaceMemberTasksConfigFile { + folder_url: url_from_directory_path(&root_dir()).unwrap(), + tasks: IndexMap::from([ + ("hi".to_string(), "echo hi".into()), + ("overwrite".to_string(), "echo overwrite".into()), + ]), + }); + let root = Some(WorkspaceMemberTasksConfig { + deno_json: root_deno_json.clone(), + package_json: None, + }); + // root + { + let tasks_config = workspace_dir.to_tasks_config().unwrap(); + assert_eq!( + tasks_config, + WorkspaceTasksConfig { + root: None, + // the root context will have the root config as the member config + member: root.clone(), + } + ); + assert_eq!( + tasks_config.task_names().collect::>(), + ["hi", "overwrite"] + ); + } + // member + { + let member_dir = workspace_dir.workspace.resolve_member_dir( + &url_from_directory_path(&root_dir().join("member/deno.json")).unwrap(), + ); + let tasks_config = member_dir.to_tasks_config().unwrap(); + assert_eq!( + tasks_config, + WorkspaceTasksConfig { + root: root.clone(), + member: Some(WorkspaceMemberTasksConfig { + deno_json: Some(WorkspaceMemberTasksConfigFile { + folder_url: url_from_directory_path(&root_dir().join("member")) + .unwrap(), + tasks: IndexMap::from([ + ("overwrite".to_string(), "echo overwritten".into()), + ("bye".to_string(), "echo bye".into()), + ]), + }), + package_json: None, + }), + } + ); + assert_eq!( + tasks_config.task_names().collect::>(), + ["overwrite", "bye", "hi"] + ); + } + // pkg json + { + let member_dir = workspace_dir.workspace.resolve_member_dir( + &url_from_directory_path(&root_dir().join("pkg_json/package.json")) + .unwrap(), + ); + let tasks_config = member_dir.to_tasks_config().unwrap(); + assert_eq!( + tasks_config, + WorkspaceTasksConfig { + root: None, + member: Some(WorkspaceMemberTasksConfig { + deno_json: root_deno_json.clone(), + package_json: Some(WorkspaceMemberTasksConfigFile { + folder_url: url_from_directory_path(&root_dir().join("pkg_json")) + .unwrap(), + tasks: IndexMap::from([( + "script".to_string(), + "echo 1".to_string() + )]), + }), + }) + } + ); + assert_eq!( + tasks_config.task_names().collect::>(), + ["hi", "overwrite", "script"] + ); + } + } + + #[test] + fn test_root_member_compiler_options() { + let sys = in_memory_fs_for_root_and_member( + json!({ + "compilerOptions": { + "checkJs": false + }, + }), + json!({ + "compilerOptions": { + "checkJs": true, + "types": ["./types.d.ts"], + "jsx": "react-jsx", + "jsxImportSource": "npm:react", + "jsxImportSourceTypes": "npm:@types/react", + }, + }), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!( + workspace_dir.to_compiler_option_types().unwrap(), + vec![( + Url::from_file_path(root_dir().join("member/deno.json")).unwrap(), + vec!["./types.d.ts".to_string()] + )], + ); + assert_eq!( + workspace_dir + .to_maybe_jsx_import_source_config() + .unwrap() + .unwrap(), + JsxImportSourceConfig { + module: "jsx-runtime".to_string(), + import_source: Some(JsxImportSourceSpecifierConfig { + specifier: "npm:react".to_string(), + base: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap() + }), + import_source_types: Some(JsxImportSourceSpecifierConfig { + specifier: "npm:@types/react".to_string(), + base: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap() + }), + }, + ); + assert_eq!(workspace_dir.check_js(), true); + assert_eq!( + workspace_dir + .to_resolved_ts_config(&sys, TsConfigType::Emit) + .unwrap(), + TsConfigWithIgnoredOptions { + ts_config: TsConfig(json!({ + "allowImportingTsExtensions": true, + "checkJs": true, + "emitDecoratorMetadata": false, + "experimentalDecorators": false, + "importsNotUsedAsValues": "remove", + "inlineSourceMap": true, + "inlineSources": true, + "sourceMap": false, + "jsx": "react-jsx", + "jsxFactory": "React.createElement", + "jsxFragmentFactory": "React.Fragment", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "resolveJsonModule": true, + "jsxImportSource": "npm:react" + })), + ignored_options: Vec::new(), + } + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + } + + #[test] + fn test_compiler_options_deno_json() { + let sys = InMemorySys::default(); + sys.fs_insert_json(root_dir().join("deno.json"), json!({})); + sys.fs_insert_json( + root_dir().join("tsconfig.json"), + json!({ + "compilerOptions": { + "lib": ["dom", "esnext"], + }, + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let raw = workspace_dir.to_raw_user_provided_tsconfig(&sys).unwrap(); + assert_eq!( + raw.ts_config.0.get("lib").unwrap().clone(), + json!(["dom", "esnext"]) + ); + } + + #[test] + fn test_compiler_options_deno_json_has_compiler_options() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "compilerOptions": { + "lib": ["dom"] + } + }), + ); + sys.fs_insert_json( + root_dir().join("tsconfig.json"), + json!({ + "compilerOptions": { + "strict": false, + "lib": ["dom", "esnext"], + }, + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let raw = workspace_dir.to_raw_user_provided_tsconfig(&sys).unwrap(); + assert_eq!(raw.ts_config.0, json!({ "lib": ["dom"] })); + } + + #[test] + fn test_compiler_options_not_discovered_member_deno_json() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"], + "compilerOptions": { + "strict": false, + } + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "name": "member", + "exports": ".", + }), + ); + sys.fs_insert_json( + root_dir().join("member/tsconfig.json"), + json!({ + "compilerOptions": { + "lib": ["dom", "esnext"], + }, + }), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + let raw = workspace_dir.to_raw_user_provided_tsconfig(&sys).unwrap(); + // we currently don't discover tsconfigs in member folders because we + // need to decide how this is going to work. For example, what happens + // if the root folder has a tsconfig.json, but also compilerOptions so it + // no longer loads the compilerOptions and then the member just has a + // tsconfig.json? Should it load the tsconfig in the member directory? + assert_eq!(raw.ts_config.0, json!({ "strict": false })); + } + + #[test] + fn test_compiler_options_from_member_ts_config() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"], + "compilerOptions": { + "strict": false, + } + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "name": "member", + "exports": ".", + }), + ); + sys.fs_insert_json( + root_dir().join("member/package.json"), + json!({ + "name": "member", + }), + ); + sys.fs_insert_json( + root_dir().join("member/tsconfig.json"), + json!({ + "compilerOptions": { + "lib": ["dom", "esnext"], + }, + }), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + let raw = workspace_dir.to_raw_user_provided_tsconfig(&sys).unwrap(); + assert_eq!(raw.ts_config.0, json!({ "strict": false })); + } + + #[test] + fn test_compiler_options_from_root_and_member_ts_configs() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"], + }), + ); + sys.fs_insert_json( + root_dir().join("tsconfig.json"), + json!({ + "compilerOptions": { + "lib": ["dom", "esnext"], + }, + }), + ); + sys.fs_insert_json( + root_dir().join("member/package.json"), + json!({ "name": "member" }), + ); + // we don't currently discover tsconfigs in workspace members + sys.fs_insert_json( + root_dir().join("member/tsconfig.json"), + json!({ + "compilerOptions": { + "jsx": "react-dev", + }, + }), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + let raw = workspace_dir.to_raw_user_provided_tsconfig(&sys).unwrap(); + assert_eq!( + raw.ts_config.0, + json!({ + "lib": ["dom", "esnext"] }) + ); + } + + #[test] + fn test_compiler_options_from_root_ts_config() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "name": "member", + }), + ); + sys.fs_insert_json( + root_dir().join("tsconfig.json"), + json!({ + "compilerOptions": { + "lib": ["dom", "esnext"], + }, + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!( + workspace_dir + .to_resolved_ts_config( + &sys, + TsConfigType::Check { + lib: deno_json::TsTypeLib::DenoWindow + } + ) + .unwrap(), + TsConfigWithIgnoredOptions { + ts_config: TsConfig(json!({ + "allowJs": true, + "allowImportingTsExtensions": true, + "allowSyntheticDefaultImports": true, + "checkJs": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": false, + "incremental": true, + "jsx": "react", + "importsNotUsedAsValues": "remove", + "inlineSourceMap": true, + "inlineSources": true, + "isolatedModules": true, + "lib": ["dom", "esnext"], + "module": "NodeNext", + "moduleResolution": "NodeNext", + "moduleDetection": "force", + "noEmit": true, + "noImplicitOverride": true, + "resolveJsonModule": true, + "sourceMap": false, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "internal:///.tsbuildinfo", + "useDefineForClassFields": true, + })), + ignored_options: Vec::new(), + } + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + } + + #[test] + fn test_root_member_import_map() { + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "importMap": "./other.json", + }), + json!({ + "importMap": "./member.json", + }), + |fs| { + fs.fs_insert_json(root_dir().join("other.json"), json!({})); + fs.fs_insert_json(root_dir().join("member/member.json"), json!({})); + }, + ); + assert_eq!( + workspace_dir + .workspace + .to_import_map_path() + .unwrap() + .unwrap(), + root_dir().join("other.json"), + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("importMap"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + } + + #[test] + fn test_root_member_link() { + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "links": ["../dir"], + }), + json!({ + "links": [ + "../../dir" + ], + }), + |fs| { + fs.fs_insert_json(root_dir().join("../dir/deno.json"), json!({})); + }, + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("links"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + } + + #[test] + fn test_link_of_link() { + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "links": ["../dir"], + }), + json!({}), + |fs| { + fs.fs_insert_json( + root_dir().join("../dir/deno.json"), + json!({ + "links": ["./subdir"] // will be ignored + }), + ); + }, + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("links"), + config_url: url_from_directory_path(&root_dir()) + .unwrap() + .join("../dir/deno.json") + .unwrap(), + }] + ); + } + + #[test] + fn test_link_not_exists() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "links": ["./member"] + }), + ); + let err = workspace_at_start_dir_err(&sys, &root_dir()); + match err.into_kind() { + WorkspaceDiscoverErrorKind::ResolveLink { link, base, source } => { + assert_eq!(link, "./member"); + assert_eq!(base, url_from_directory_path(&root_dir()).unwrap()); + match source.into_kind() { + ResolveWorkspaceLinkErrorKind::NotFound { dir_url } => { + assert_eq!( + dir_url, + url_from_directory_path(&root_dir().join("member")).unwrap() + ); + } + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + + #[test] + fn test_link_workspace_member() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"], + "links": ["./member"] + }), + ); + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + let err = workspace_at_start_dir_err(&sys, &root_dir()); + match err.into_kind() { + WorkspaceDiscoverErrorKind::ResolveLink { link, base, source } => { + assert_eq!(link, "./member"); + assert_eq!(base, url_from_directory_path(&root_dir()).unwrap()); + assert!(matches!( + source.into_kind(), + ResolveWorkspaceLinkErrorKind::WorkspaceMemberNotAllowed + )); + } + _ => unreachable!(), + } + } + + #[test] + fn test_link_npm_package() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("pkg/deno.json"), + json!({ + "links": ["../dir"] + }), + ); + sys.fs_insert_json(root_dir().join("dir/package.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("pkg")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let link_folders = workspace_dir + .workspace + .link_folders() + .values() + .collect::>(); + assert_eq!(link_folders.len(), 1); + assert_eq!( + link_folders[0].pkg_json.as_ref().unwrap().specifier(), + url_from_file_path(&root_dir().join("dir/package.json")).unwrap() + ) + } + + #[test] + fn test_link_absolute_path() { + let root_path = root_dir().join("../dir"); + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "links": [root_path.to_string_lossy().to_string()], + }), + json!({}), + |fs| { + fs.fs_insert_json(root_dir().join("../dir/deno.json"), json!({})); + }, + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let link_folders = workspace_dir + .workspace + .link_folders() + .values() + .collect::>(); + assert_eq!(link_folders.len(), 1); + assert_eq!( + link_folders[0].deno_json.as_ref().unwrap().specifier, + url_from_file_path(&root_dir().join("../dir/deno.json")).unwrap() + ) + } + + #[test] + fn test_root_member_imports_and_scopes() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "imports": { + "@scope/pkg": "jsr:@scope/pkg@1" + }, + "scopes": { + "https://deno.land/x/": { + "@scope/pkg": "jsr:@scope/pkg@2" + } + } + }), + json!({ + "imports": { + "@scope/pkg": "jsr:@scope/pkg@3" + }, + // will ignore this scopes because it's not in the root + "scopes": { + "https://deno.land/x/other": { + "@scope/pkg": "jsr:@scope/pkg@4" + } + } + }), + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("scopes"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + } + + #[test] + fn test_deprecated_patch() { + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "patch": ["../dir"], + }), + json!({}), + |fs| { + fs.fs_insert_json(root_dir().join("../dir/deno.json"), json!({})); + }, + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::DeprecatedPatch, + config_url: Url::from_file_path(root_dir().join("deno.json")).unwrap(), + }] + ); + assert_eq!(workspace_dir.workspace.link_folders().len(), 1); // should still work though + } + + #[test] + fn test_imports_with_import_map() { + let workspace_dir = workspace_for_root_and_member_with_fs( + json!({ + "imports": {}, + "importMap": "./other.json", + }), + json!({}), + |fs| { + fs.fs_insert_json(root_dir().join("other.json"), json!({})); + }, + ); + assert_eq!( + workspace_dir + .workspace + .to_import_map_path() + .unwrap() + .unwrap(), + root_dir().join("other.json") + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::ImportMapReferencingImportMap, + config_url: Url::from_file_path(root_dir().join("deno.json")).unwrap(), + }] + ); + } + + #[test] + fn test_root_import_map_with_member_imports_and_scopes() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "importMap": "./other.json" + }), + json!({ + "imports": { + "@scope/pkg": "jsr:@scope/pkg@3" + } + }), + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::MemberImportsScopesIgnored, + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + } + + #[test] + fn test_root_member_exclude() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "exclude": [ + "./root", + "./member/vendor", + "./**/*.js" + ] + }), + json!({ + "exclude": [ + "./member_exclude", + // unexclude from root + "!./vendor" + ] + }), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let lint_config = workspace_dir + .to_lint_config(FilePatterns::new_with_base(workspace_dir.dir_path())) + .unwrap(); + assert_eq!( + lint_config.files, + FilePatterns { + base: root_dir().join("member"), + include: None, + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member").join("vendor")), + PathOrPattern::Pattern( + GlobPattern::from_relative(&root_dir(), "./**/*.js").unwrap() + ), + PathOrPattern::Path(root_dir().join("member").join("member_exclude")), + PathOrPattern::NegatedPath(root_dir().join("member").join("vendor")), + ]), + } + ); + + // will match because it was unexcluded in the member + assert!(lint_config + .files + .matches_path(&root_dir().join("member/vendor"), PathKind::Directory)) + } + + #[test] + fn test_root_member_lint_combinations() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "lint": { + "report": "json", + "rules": { + "tags": ["tag1"], + "include": ["rule1"], + "exclude": ["rule2"], + }, + "plugins": ["jsr:@deno/test-plugin1", "jsr:@deno/test-plugin3"] + } + }), + json!({ + "lint": { + "report": "pretty", + "include": ["subdir"], + "rules": { + "tags": ["tag1"], + "include": ["rule2"], + }, + "plugins": [ + "jsr:@deno/test-plugin1", + "jsr:@deno/test-plugin2", + "!jsr:@deno/test-plugin3" + ] + } + }), + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("lint.report"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + assert_eq!( + workspace_dir.workspace.to_lint_config().unwrap(), + WorkspaceLintConfig { + report: Some("json".to_string()), + } + ); + let lint_config = workspace_dir + .to_lint_config(FilePatterns::new_with_base(workspace_dir.dir_path())) + .unwrap(); + assert_eq!( + lint_config, + WorkspaceDirLintConfig { + rules: LintRulesConfig { + tags: Some(vec!["tag1".to_string()]), + include: Some(vec!["rule1".to_string(), "rule2".to_string()]), + exclude: Some(vec![]), + }, + plugins: vec![ + Url::parse("jsr:@deno/test-plugin1").unwrap(), + Url::parse("jsr:@deno/test-plugin2").unwrap(), + ], + files: FilePatterns { + base: root_dir().join("member/"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("subdir") + )])), + exclude: Default::default(), + }, + }, + ); + + // check the root context + let root_ctx = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()); + let root_lint_config = root_ctx + .to_lint_config(FilePatterns::new_with_base(root_ctx.dir_path())) + .unwrap(); + assert_eq!( + root_lint_config, + WorkspaceDirLintConfig { + rules: LintRulesConfig { + tags: Some(vec!["tag1".to_string()]), + include: Some(vec!["rule1".to_string()]), + exclude: Some(vec!["rule2".to_string()]), + }, + plugins: vec![ + Url::parse("jsr:@deno/test-plugin1").unwrap(), + Url::parse("jsr:@deno/test-plugin3").unwrap(), + ], + files: FilePatterns { + base: root_dir(), + include: None, + // the workspace member will be excluded because that needs + // to be resolved separately + exclude: PathOrPatternSet::new(Vec::from([PathOrPattern::Path( + root_dir().join("member") + )])), + }, + }, + ); + } + + #[test] + fn test_root_member_fmt_combinations() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "fmt": { + "useTabs": true, + "indentWidth": 4, + "lineWidth": 80, + "proseWrap": "never", + "singleQuote": false, + "semiColons": false, + "quoteProps": "asNeeded", + "newLineKind": "auto", + "useBraces": "preferNone", + "bracePosition": "maintain", + "singleBodyPosition": "sameLine", + "nextControlFlowPosition": "nextLine", + "trailingCommas": "always", + "operatorPosition": "sameLine", + "jsx.bracketPosition": "sameLine", + "jsx.forceNewLinesSurroundingContent": false, + "jsx.multiLineParens": "prefer", + "typeLiteral.separatorKind": "comma", + "spaceAround": false, + "spaceSurroundingProperties": false, + } + }), + json!({ + "fmt": { + "exclude": ["subdir"], + "useTabs": false, + "indentWidth": 8, + "lineWidth": 120, + "proseWrap": "always", + "singleQuote": true, + "semiColons": true, + "quoteProps": "consistent", + "newLineKind": "lf", + "useBraces": "always", + "bracePosition": "nextLine", + "singleBodyPosition": "maintain", + "nextControlFlowPosition": "maintain", + "trailingCommas": "onlyMultiLine", + "operatorPosition": "nextLine", + "jsx.bracketPosition": "nextLine", + "jsx.forceNewLinesSurroundingContent": true, + "jsx.multiLineParens": "always", + "typeLiteral.separatorKind": "semiColon", + "spaceAround": true, + "spaceSurroundingProperties": true, + } + }), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let fmt_config = workspace_dir + .to_fmt_config(FilePatterns::new_with_base(workspace_dir.dir_path())) + .unwrap(); + assert_eq!( + fmt_config, + FmtConfig { + options: FmtOptionsConfig { + use_tabs: Some(false), + line_width: Some(120), + indent_width: Some(8), + prose_wrap: Some(ProseWrap::Always), + single_quote: Some(true), + semi_colons: Some(true), + quote_props: Some(QuoteProps::Consistent), + new_line_kind: Some(NewLineKind::LineFeed), + use_braces: Some(UseBraces::Always), + brace_position: Some(BracePosition::NextLine), + single_body_position: Some(SingleBodyPosition::Maintain), + next_control_flow_position: Some(NextControlFlowPosition::Maintain), + trailing_commas: Some(TrailingCommas::OnlyMultiLine), + operator_position: Some(OperatorPosition::NextLine), + jsx_bracket_position: Some(BracketPosition::NextLine), + jsx_force_new_lines_surrounding_content: Some(true), + jsx_multi_line_parens: Some(MultiLineParens::Always), + type_literal_separator_kind: Some(SeparatorKind::SemiColon), + space_around: Some(true), + space_surrounding_properties: Some(true), + }, + files: FilePatterns { + base: root_dir().join("member"), + include: None, + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("subdir") + )]), + }, + } + ); + + // check the root context + let root_ctx = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()); + let root_fmt_config = root_ctx + .to_fmt_config(FilePatterns::new_with_base(root_ctx.dir_path())) + .unwrap(); + assert_eq!( + root_fmt_config, + FmtConfig { + options: FmtOptionsConfig { + use_tabs: Some(true), + line_width: Some(80), + indent_width: Some(4), + prose_wrap: Some(ProseWrap::Never), + single_quote: Some(false), + semi_colons: Some(false), + quote_props: Some(QuoteProps::AsNeeded), + new_line_kind: Some(NewLineKind::Auto), + use_braces: Some(UseBraces::PreferNone), + brace_position: Some(BracePosition::Maintain), + single_body_position: Some(SingleBodyPosition::SameLine), + next_control_flow_position: Some(NextControlFlowPosition::NextLine), + trailing_commas: Some(TrailingCommas::Always), + operator_position: Some(OperatorPosition::SameLine), + jsx_bracket_position: Some(BracketPosition::SameLine), + jsx_force_new_lines_surrounding_content: Some(false), + jsx_multi_line_parens: Some(MultiLineParens::Prefer), + type_literal_separator_kind: Some(SeparatorKind::Comma), + space_around: Some(false), + space_surrounding_properties: Some(false), + }, + files: FilePatterns { + base: root_dir(), + include: None, + // the workspace member will be excluded because that needs + // to be resolved separately + exclude: PathOrPatternSet::new(Vec::from([PathOrPattern::Path( + root_dir().join("member") + )])), + }, + } + ); + } + + #[test] + fn test_root_member_bench_combinations() { + let workspace_dir = workspace_for_root_and_member( + json!({}), + json!({ + "bench": { + "exclude": ["subdir"], + } + }), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let bench_config = workspace_dir + .to_bench_config(FilePatterns::new_with_base(workspace_dir.dir_path())) + .unwrap(); + assert_eq!( + bench_config, + BenchConfig { + files: FilePatterns { + base: root_dir().join("member"), + include: None, + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("subdir") + )]), + }, + } + ); + + // check the root context + let root_ctx = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()); + let root_bench_config = root_ctx + .to_bench_config(FilePatterns::new_with_base(root_ctx.dir_path())) + .unwrap(); + assert_eq!( + root_bench_config, + BenchConfig { + files: FilePatterns { + base: root_dir(), + include: None, + // the workspace member will be excluded because that needs + // to be resolved separately + exclude: PathOrPatternSet::new(Vec::from([PathOrPattern::Path( + root_dir().join("member") + )])), + }, + } + ); + } + + #[test] + fn test_root_member_test_combinations() { + let workspace_dir = workspace_for_root_and_member( + json!({}), + json!({ + "test": { + "include": ["subdir"], + } + }), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let config = workspace_dir + .to_test_config(FilePatterns::new_with_base(workspace_dir.dir_path())) + .unwrap(); + assert_eq!( + config, + TestConfig { + files: FilePatterns { + base: root_dir().join("member"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("subdir") + )])), + exclude: Default::default(), + }, + } + ); + + // check the root context + let root_ctx = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()); + let root_test_config = root_ctx + .to_test_config(FilePatterns::new_with_base(root_ctx.dir_path())) + .unwrap(); + assert_eq!( + root_test_config, + TestConfig { + files: FilePatterns { + base: root_dir(), + include: None, + // the workspace member will be excluded because that needs + // to be resolved separately + exclude: PathOrPatternSet::new(Vec::from([PathOrPattern::Path( + root_dir().join("member") + )])), + }, + } + ); + } + + #[test] + fn test_root_member_publish_combinations() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "publish": { + "exclude": ["other"] + } + }), + json!({ + "publish": { + "include": ["subdir"], + }, + "exclude": [ + "./exclude_dir" + ], + }), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let config = workspace_dir.to_publish_config().unwrap(); + assert_eq!( + config, + PublishConfig { + files: FilePatterns { + base: root_dir().join("member"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("subdir") + )])), + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member").join("exclude_dir") + ),]), + }, + } + ); + + // check the root context + let root_publish_config = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()) + .to_publish_config() + .unwrap(); + assert_eq!( + root_publish_config, + PublishConfig { + files: FilePatterns { + base: root_dir(), + include: None, + exclude: PathOrPatternSet::new(Vec::from([ + PathOrPattern::Path(root_dir().join("other")), + // the workspace member will be excluded because that needs + // to be resolved separately + PathOrPattern::Path(root_dir().join("member")), + ])), + }, + } + ); + } + + #[test] + fn test_root_member_empty_config_resolves_excluded_members() { + let workspace_dir = workspace_for_root_and_member(json!({}), json!({})); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let expected_root_files = FilePatterns { + base: root_dir(), + include: None, + // the workspace member will be excluded because that needs + // to be resolved separately + exclude: PathOrPatternSet::new(Vec::from([PathOrPattern::Path( + root_dir().join("member"), + )])), + }; + let root_ctx = workspace_dir + .workspace + .resolve_member_dir(&url_from_directory_path(&root_dir()).unwrap()); + let expected_member_files = FilePatterns { + base: root_dir().join("member"), + include: None, + exclude: Default::default(), + }; + + for (expected_files, ctx) in [ + (expected_root_files, root_ctx), + (expected_member_files, workspace_dir), + ] { + assert_eq!( + ctx + .to_bench_config(FilePatterns::new_with_base(ctx.dir_path())) + .unwrap(), + BenchConfig { + files: expected_files.clone(), + } + ); + assert_eq!( + ctx + .to_fmt_config(FilePatterns::new_with_base(ctx.dir_path())) + .unwrap(), + FmtConfig { + options: Default::default(), + files: expected_files.clone(), + } + ); + assert_eq!( + ctx + .to_lint_config(FilePatterns::new_with_base(ctx.dir_path())) + .unwrap(), + WorkspaceDirLintConfig { + rules: Default::default(), + plugins: Default::default(), + files: expected_files.clone(), + }, + ); + assert_eq!( + ctx + .to_test_config(FilePatterns::new_with_base(ctx.dir_path())) + .unwrap(), + TestConfig { + files: expected_files.clone(), + } + ); + assert_eq!( + ctx.to_publish_config().unwrap(), + PublishConfig { + files: expected_files.clone(), + } + ); + } + } + + #[test] + fn test_root_member_root_only_in_member() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "unstable": ["byonm"], + "lock": false, + "nodeModulesDir": false, + "vendor": true, + }), + json!({ + "unstable": ["sloppy-imports"], + "lock": true, + "nodeModulesDir": "auto", + "vendor": false, + }), + ); + // ignores member config + assert_eq!( + workspace_dir.workspace.unstable_features(), + &["byonm".to_string()] + ); + assert!(workspace_dir.workspace.has_unstable("byonm")); + assert!(!workspace_dir.workspace.has_unstable("sloppy-imports")); + assert_eq!( + workspace_dir.workspace.resolve_lockfile_path().unwrap(), + None + ); + assert_eq!( + workspace_dir.workspace.node_modules_dir().unwrap(), + Some(NodeModulesDirMode::None) + ); + assert_eq!( + workspace_dir.workspace.resolve_lockfile_path().unwrap(), + None + ); + assert_eq!( + workspace_dir.workspace.vendor_dir_path().unwrap(), + &root_dir().join("vendor") + ); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![ + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::DeprecatedNodeModulesDirOption { + previous: false, + suggestion: NodeModulesDirMode::Manual, + }, + config_url: Url::from_file_path(root_dir().join("deno.json")) + .unwrap(), + }, + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("lock"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }, + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("nodeModulesDir"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }, + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("unstable"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }, + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("vendor"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }, + ] + ); + } + + #[test] + fn test_root_member_node_modules_dir_suggestions() { + fn suggest( + previous: bool, + suggestion: NodeModulesDirMode, + ) -> WorkspaceDiagnostic { + WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::DeprecatedNodeModulesDirOption { + previous, + suggestion, + }, + config_url: Url::from_file_path(root_dir().join("deno.json")).unwrap(), + } + } + + let cases = [ + ( + json!({ + "unstable": ["byonm"], + "nodeModulesDir": true, + }), + true, + NodeModulesDirMode::Manual, + ), + ( + json!({ + "unstable": ["byonm"], + "nodeModulesDir": false, + }), + false, + NodeModulesDirMode::Manual, + ), + ( + json!({ + "nodeModulesDir": true, + }), + true, + NodeModulesDirMode::Auto, + ), + ( + json!({ + "nodeModulesDir": false, + }), + false, + NodeModulesDirMode::None, + ), + ]; + + for (config, previous, suggestion) in cases { + let workspace_dir = workspace_for_root_and_member(config, json!({})); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![suggest(previous, suggestion)] + ); + } + } + + #[test] + fn test_root_member_pkg_only_fields_on_workspace_root() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "name": "@scope/name", + "version": "1.0.0", + "exports": "./main.ts" + }), + json!({}), + ); + // this is fine because we can tell it's a package by it having name and exports fields + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + } + + #[test] + fn test_root_member_workspace_on_member() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "workspace": ["./other_dir"] + }), + ); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + // start at root for this test + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir.workspace.diagnostics(), + vec![WorkspaceDiagnostic { + kind: WorkspaceDiagnosticKind::RootOnlyOption("workspace"), + config_url: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap(), + }] + ); + } + + #[test] + fn test_workspaces_property() { + run_single_json_diagnostics_test( + json!({ + "workspaces": ["./member"] + }), + vec![WorkspaceDiagnosticKind::InvalidWorkspacesOption], + ); + } + + #[test] + fn test_workspaces_missing_exports() { + run_single_json_diagnostics_test( + json!({ + "name": "@scope/name", + }), + vec![WorkspaceDiagnosticKind::MissingExports], + ); + } + + fn run_single_json_diagnostics_test( + json: serde_json::Value, + kinds: Vec, + ) { + let sys = InMemorySys::default(); + sys.fs_insert_json(root_dir().join("deno.json"), json); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir.workspace.diagnostics(), + kinds + .into_iter() + .map(|kind| { + WorkspaceDiagnostic { + kind, + config_url: Url::from_file_path(root_dir().join("deno.json")) + .unwrap(), + } + }) + .collect::>() + ); + } + + #[test] + fn test_multiple_pkgs_same_name() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member1", "./member2"] + }), + ); + let pkg = json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./main.ts", + }); + sys.fs_insert_json( + root_dir().join("member1").join("deno.json"), + pkg.clone(), + ); + sys.fs_insert_json( + root_dir().join("member2").join("deno.json"), + pkg.clone(), + ); + let err = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + ..Default::default() + }, + ) + .unwrap_err(); + match err.into_kind() { + WorkspaceDiscoverErrorKind::ResolveMember(err) => match err.into_kind() { + ResolveWorkspaceMemberErrorKind::DuplicatePackageName { + name, + deno_json_url, + other_deno_json_url, + } => { + assert_eq!(name, "@scope/pkg"); + assert_eq!( + deno_json_url, + Url::from_file_path(root_dir().join("member2").join("deno.json")) + .unwrap() + ); + assert_eq!( + other_deno_json_url, + Url::from_file_path(root_dir().join("member1").join("deno.json")) + .unwrap() + ); + } + _ => unreachable!(), + }, + _ => unreachable!(), + } + } + + #[test] + fn test_packages_for_publish_non_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/pkg"]); + } + + #[test] + fn test_packages_for_publish_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./a", "./b", "./c", "./d"] + }), + ); + sys.fs_insert_json( + root_dir().join("a/deno.json"), + json!({ + "name": "@scope/a", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + sys.fs_insert_json( + root_dir().join("b/deno.json"), + json!({ + "name": "@scope/b", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + sys.fs_insert_json( + root_dir().join("c/deno.json"), + // not a package + json!({}), + ); + sys.fs_insert_json( + root_dir().join("d/package.json"), + json!({ + "name": "pkg", + "version": "1.0.0", + }), + ); + // root + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/a", "@scope/b"]); + } + // member + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("a")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/a"]); + } + // member, not a package + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("c")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + assert!(jsr_pkgs.is_empty()); + } + // package.json + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("d")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + assert!(jsr_pkgs.is_empty()); + + // while we're here, test this + assert_eq!( + workspace_dir + .workspace + .package_jsons() + .map(|p| p.dir_path().to_path_buf()) + .collect::>(), + vec![root_dir().join("d")] + ); + assert_eq!( + workspace_dir + .workspace + .npm_packages() + .into_iter() + .map(|p| p.pkg_json.dir_path().to_path_buf()) + .collect::>(), + vec![root_dir().join("d")] + ); + } + } + + #[test] + fn test_packages_for_publish_root_is_package() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "name": "@scope/root", + "version": "1.0.0", + "exports": "./main.ts", + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + // in a member + { + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/pkg"]); + } + // at the root + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + // Only returns the root package because it allows for publishing + // this individually. If someone wants the behaviour of publishing + // the entire workspace then they should move each package to a descendant + // directory. + assert_eq!(names, vec!["@scope/root"]); + } + } + + #[test] + fn test_packages_for_publish_root_not_package() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + // the workspace is not a jsr package so publish the members + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/pkg"]); + } + + #[test] + fn test_packages_for_publish_npm_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./a", "./b", "./c", "./d"] + }), + ); + sys.fs_insert_json(root_dir().join("a/package.json"), json!({})); + sys.fs_insert_json( + root_dir().join("a/deno.json"), + json!({ + "name": "@scope/a", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + sys.fs_insert_json(root_dir().join("b/package.json"), json!({})); + sys.fs_insert_json( + root_dir().join("b/deno.json"), + json!({ + "name": "@scope/b", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + sys.fs_insert_json(root_dir().join("c/package.json"), json!({})); + sys.fs_insert_json( + root_dir().join("c/deno.json"), + // not a package + json!({}), + ); + sys.fs_insert_json( + root_dir().join("d/package.json"), + json!({ + "name": "pkg", + "version": "1.0.0", + }), + ); + // root + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/a", "@scope/b"]); + } + // member + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("a")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/a"]); + } + // member, not a package + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("c")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + assert!(jsr_pkgs.is_empty()); + } + // package.json + { + let workspace_dir = workspace_at_start_dir(&sys, &root_dir().join("d")); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + assert!(jsr_pkgs.is_empty()); + assert_eq!( + workspace_dir + .workspace + .npm_packages() + .into_iter() + .map(|p| p.pkg_json.dir_path().to_path_buf()) + .collect::>(), + vec![root_dir().join("d")] + ); + } + } + + #[test] + fn test_no_auto_discovery_node_modules_dir() { + let sys = InMemorySys::default(); + sys.fs_insert_json(root_dir().join("deno.json"), json!({})); + sys.fs_insert_json( + root_dir().join("node_modules/package/package.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0" + }), + ); + let workspace_dir = workspace_at_start_dir( + &sys, + &root_dir().join("node_modules/package/sub_dir"), + ); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 0); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 1); + } + + #[test] + fn test_deno_workspace_globs() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./packages/*"] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/deno.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/deno.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-c/deno.jsonc"), + json!({}), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("packages")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 4); + } + + #[test] + fn test_deno_workspace_globs_with_package_json() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./packages/*", "./examples/*"] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/deno.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/deno.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-c/deno.jsonc"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("examples/examples1/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("examples/examples2/package.json"), + json!({}), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("packages")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 4); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + } + + #[test] + fn test_deno_workspace_negations() { + for negation in ["!ignored/package-c", "!ignored/**"] { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": [ + "**/*", + negation, + ] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/deno.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/deno.jsonc"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("ignored/package-c/deno.jsonc"), + json!({}), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 3); + } + } + + #[test] + fn test_deno_workspace_member_no_config_file_error() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + // no deno.json in this folder, so should error + let err = workspace_at_start_dir_err(&sys, &root_dir().join("package")); + assert_eq!(err.to_string(), normalize_err_text("Could not find config file for workspace member in '[ROOT_DIR_URL]/member/'.")); + } + + #[test] + fn test_deno_workspace_member_deno_json_member_name() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member/deno.json"] + }), + ); + // no deno.json in this folder and the name was deno.json so give an error + let err = workspace_at_start_dir_err(&sys, &root_dir().join("package")); + assert_eq!(err.to_string(), normalize_err_text(concat!( + "Could not find config file for workspace member in '[ROOT_DIR_URL]/member/deno.json/'. ", + "Ensure you specify the directory and not the configuration file in the workspace member." + ))); + } + + #[test] + fn test_deno_member_not_referenced_in_deno_workspace() { + fn assert_err(err: &WorkspaceDiscoverError, config_file_path: &Path) { + match err.as_kind() { + WorkspaceDiscoverErrorKind::ConfigNotWorkspaceMember { + workspace_url, + config_url, + } => { + assert_eq!( + workspace_url, + &url_from_directory_path(&root_dir()).unwrap() + ); + assert_eq!( + config_url, + &Url::from_file_path(config_file_path).unwrap() + ); + } + _ => unreachable!(), + } + } + + for file_name in ["deno.json", "deno.jsonc"] { + let config_file_path = root_dir().join("member-b").join(file_name); + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a"], + }), + ); + sys.fs_insert_json(root_dir().join("member-a/deno.json"), json!({})); + sys.fs_insert_json(config_file_path.clone(), json!({})); + let err = workspace_at_start_dir_err(&sys, &root_dir().join("member-b")); + assert_err(&err, &config_file_path); + + // try for when the config file is specified as well + let err = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::ConfigFile(&config_file_path), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap_err(); + assert_err(&err, &config_file_path); + } + } + + #[test] + fn test_config_not_deno_workspace_member_non_natural_config_file_name() { + for file_name in ["other-name.json", "deno.jsonc"] { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a", "./member-b"], + }), + ); + sys.fs_insert_json(root_dir().join("member-a/deno.json"), json!({})); + // this is the "natural" config file that would be discovered by + // workspace discovery and since the file name specified does not + // match it, the workspace is not discovered and an error does not + // occur + sys.fs_insert_json(root_dir().join("member-b/deno.json"), json!({})); + let config_file_path = root_dir().join("member-b").join(file_name); + sys.fs_insert_json(config_file_path.clone(), json!({})); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::ConfigFile(&config_file_path), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir + .workspace + .deno_jsons() + .map(|c| c.specifier.to_file_path().unwrap()) + .collect::>(), + vec![config_file_path] + ); + } + } + + #[test] + fn test_config_workspace_non_natural_config_file_name() { + let sys = InMemorySys::default(); + let root_config_path = root_dir().join("deno-other.json"); + sys.fs_insert_json( + root_config_path.clone(), + json!({ + "workspace": ["./member-a"], + }), + ); + let member_a_config = root_dir().join("member-a/deno.json"); + sys.fs_insert_json(member_a_config.clone(), json!({})); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::ConfigFile(&root_config_path), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir + .workspace + .deno_jsons() + .map(|c| c.specifier.to_file_path().unwrap()) + .collect::>(), + vec![root_config_path, member_a_config] + ); + } + + #[test] + fn test_npm_package_not_referenced_in_deno_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("package/package.json"), json!({})); + // npm package needs to be a member of the deno workspace + let err = workspace_at_start_dir_err(&sys, &root_dir().join("package")); + assert_eq!( + err.to_string(), + normalize_err_text( + "Config file must be a member of the workspace. + Config: [ROOT_DIR_URL]/package/package.json + Workspace: [ROOT_DIR_URL]/" + ) + ); + } + + #[test] + fn test_multiple_workspaces_npm_package_referenced_in_package_json_workspace() + { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./package"] + }), + ); + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("package/package.json"), json!({})); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("package")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 2); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + } + + #[test] + fn test_npm_workspace_package_json_and_deno_json_ok() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"] + }), + ); + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("package")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 1); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + } + + #[test] + fn test_npm_workspace_member_deno_json_error() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"] + }), + ); + // no package.json in this folder, so should error + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + let err = workspace_at_start_dir_err(&sys, &root_dir().join("package")); + assert_eq!(err.to_string(), normalize_err_text("Could not find package.json for workspace member in '[ROOT_DIR_URL]/member/'.")); + } + + #[test] + fn test_npm_workspace_member_no_config_file_error() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"] + }), + ); + // no package.json in this folder, so should error + let err = workspace_at_start_dir_err(&sys, &root_dir().join("package")); + assert_eq!(err.to_string(), normalize_err_text("Could not find package.json for workspace member in '[ROOT_DIR_URL]/member/'.")); + } + + #[test] + fn test_npm_workspace_globs() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./packages/*"] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/package.json"), + json!({}), + ); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("packages")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 3); + } + + #[test] + fn test_npm_workspace_ignores_vendor_folder() { + for (is_vendor, expected_count) in [(true, 3), (false, 4)] { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "vendor": is_vendor, + }), + ); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./**/*"] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("vendor/package-c/package.json"), + json!({}), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!( + workspace_dir.workspace.package_jsons().count(), + expected_count + ); + } + } + + #[test] + fn test_npm_workspace_negations() { + for negation in ["!ignored/package-c", "!ignored/**"] { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": [ + "**/*", + negation, + ] + }), + ); + sys.fs_insert_json( + root_dir().join("packages/package-a/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("packages/package-b/package.json"), + json!({}), + ); + sys.fs_insert_json( + root_dir().join("ignored/package-c/package.json"), + json!({}), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 3); + } + } + + #[test] + fn test_npm_workspace_self_reference_and_duplicate_references_ok() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": [ + ".", + "./member", + "./member", + "**/*" + ] + }), + ); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + } + + #[test] + fn test_npm_workspace_start_deno_json_not_in_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./package"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "unstable": ["byonm"], + }), + ); + sys.fs_insert_json(root_dir().join("package/package.json"), json!({})); + // only resolves the member because it's not part of the workspace + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 1); + assert_eq!( + workspace_dir.workspace.root_dir().to_file_path().unwrap(), + root_dir().join("member") + ); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 0); + assert!(workspace_dir.workspace.has_unstable("byonm")); + assert_eq!( + workspace_dir.workspace.resolve_lockfile_path().unwrap(), + Some(root_dir().join("member/deno.lock")) + ); + } + + #[test] + fn test_npm_workspace_start_deno_json_part_of_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "lock": false, + "unstable": ["byonm"], + }), + ); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!( + workspace_dir + .workspace + .diagnostics() + .into_iter() + .map(|d| d.kind) + .collect::>(), + vec![ + WorkspaceDiagnosticKind::RootOnlyOption("lock"), + WorkspaceDiagnosticKind::RootOnlyOption("unstable") + ] + ); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 1); + assert_eq!( + workspace_dir.workspace.root_dir().to_file_path().unwrap(), + root_dir() + ); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + assert!(!workspace_dir.workspace.has_unstable("byonm")); + assert_eq!( + workspace_dir.workspace.resolve_lockfile_path().unwrap(), + Some(root_dir().join("deno.lock")) + ); + } + + #[test] + fn test_npm_workspace_start_deno_json_part_of_workspace_sub_folder() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ + "unstable": ["byonm"], + }), + ); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + sys.fs_insert("member/sub/sub_folder/sub/file.ts", ""); + let workspace_dir = workspace_at_start_dir( + &sys, + // note how we're starting in a sub folder of the member + &root_dir().join("member/sub/sub_folder/sub/"), + ); + assert_eq!( + workspace_dir + .workspace + .diagnostics() + .into_iter() + .map(|d| d.kind) + .collect::>(), + vec![WorkspaceDiagnosticKind::RootOnlyOption("unstable")] + ); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 1); + assert_eq!( + workspace_dir.workspace.root_dir().to_file_path().unwrap(), + root_dir() + ); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + assert!(!workspace_dir.workspace.has_unstable("byonm")); + } + + #[test] + fn test_npm_workspace_start_deno_json_part_of_workspace_sub_folder_other_deno_json( + ) { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member", "./member/sub"] + }), + ); + sys.fs_insert_json( + root_dir().join("member/deno.json"), + json!({ "unstable": ["sloppy-imports"] }), + ); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + sys.fs_insert_json( + root_dir().join("member/sub/deno.json"), + json!({ "unstable": ["byonm"] }), + ); + sys.fs_insert_json(root_dir().join("member/sub/package.json"), json!({})); + sys.fs_insert("member/sub/sub_folder/sub/file.ts", ""); + let workspace_dir = workspace_at_start_dir( + &sys, + // note how we're starting in a sub folder of the member + &root_dir().join("member/sub/sub_folder/sub/"), + ); + assert_eq!(workspace_dir.workspace.diagnostics().len(), 2); // for each unstable + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 2); + assert_eq!( + workspace_dir.workspace.root_dir.to_file_path().unwrap(), + root_dir() + ); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 3); + assert!(!workspace_dir.workspace.has_unstable("sloppy-imports")); + assert!(!workspace_dir.workspace.has_unstable("byonm")); + } + + #[test] + fn test_npm_workspace_start_package_json_not_in_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./package"] + }), + ); + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + sys.fs_insert_json(root_dir().join("package/package.json"), json!({})); + // only resolves the member because it's not part of the workspace + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 0); + assert_eq!( + workspace_dir.workspace.root_dir().to_file_path().unwrap(), + root_dir().join("member") + ); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 1); + } + + #[test] + fn test_resolve_multiple_dirs() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("workspace").join("deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys.fs_insert_json( + root_dir().join("workspace").join("member/deno.json"), + json!({ + "name": "@scope/pkg", + "version": "1.0.0", + "exports": "./main.ts", + }), + ); + let workspace_dir = workspace_at_start_dirs( + &sys, + &[ + root_dir().join("workspace/member"), + root_dir().join("other_dir"), // will be ignored because it's not in the workspace + ], + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.diagnostics(), vec![]); + let jsr_pkgs = workspace_dir.jsr_packages_for_publish(); + let names = jsr_pkgs.iter().map(|p| p.name.as_str()).collect::>(); + assert_eq!(names, vec!["@scope/pkg"]); + } + + #[test] + fn test_npm_workspace_ignore_pkg_json_between_member_and_root() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member/nested"] + }), + ); + // will ignore this one + sys.fs_insert_json(root_dir().join("member/package.json"), json!({})); + sys + .fs_insert_json(root_dir().join("member/nested/package.json"), json!({})); + // only resolves the member because it's not part of the workspace + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member/nested")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 0); + assert_eq!( + workspace_dir + .workspace + .package_jsons() + .map(|p| p.path.clone()) + .collect::>(), + vec![ + root_dir().join("package.json"), + root_dir().join("member/nested/package.json"), + ] + ); + } + + #[test] + fn test_npm_workspace_ignore_deno_json_between_member_and_root() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ + "workspaces": ["./member/nested"] + }), + ); + // will ignore this one + sys.fs_insert_json(root_dir().join("member/deno.json"), json!({})); + sys + .fs_insert_json(root_dir().join("member/nested/package.json"), json!({})); + // only resolves the member because it's not part of the workspace + let workspace_dir = + workspace_at_start_dir(&sys, &root_dir().join("member/nested")); + assert_eq!(workspace_dir.workspace.diagnostics(), Vec::new()); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 0); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 2); + } + + #[test] + fn test_resolve_multiple_dirs_outside_config() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("workspace/deno.json"), + json!({ + "workspace": { + "members": ["./member"] + }, + }), + ); + sys + .fs_insert_json(root_dir().join("workspace/member/deno.json"), json!({})); + // this one will cause issues because it's not in the workspace + sys.fs_insert_json(root_dir().join("other_dir/deno.json"), json!({})); + let err = workspace_at_start_dirs( + &sys, + &[ + root_dir().join("workspace/member"), + root_dir().join("other_dir"), + ], + ) + .unwrap_err(); + assert_eq!(err.to_string(), normalize_err_text("Command resolved to multiple config files. Ensure all specified paths are within the same workspace. + First: [ROOT_DIR_URL]/workspace/deno.json + Second: [ROOT_DIR_URL]/other_dir/deno.json")); + } + + #[test] + fn test_resolve_multiple_dirs_outside_workspace() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("workspace/deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys + .fs_insert_json(root_dir().join("workspace/member/deno.json"), json!({})); + // this one will cause issues because it's not in the workspace + sys.fs_insert_json( + root_dir().join("other_dir/deno.json"), + json!({ + "workspace": ["./member"] + }), + ); + sys + .fs_insert_json(root_dir().join("other_dir/member/deno.json"), json!({})); + let err = workspace_at_start_dirs( + &sys, + &[ + root_dir().join("workspace/member"), + root_dir().join("other_dir"), + ], + ) + .unwrap_err(); + assert_eq!(err.to_string(), normalize_err_text("Command resolved to multiple config files. Ensure all specified paths are within the same workspace. + First: [ROOT_DIR_URL]/workspace/deno.json + Second: [ROOT_DIR_URL]/other_dir/deno.json")); + } + + #[test] + fn test_specified_config_file_same_dir_discoverable_config_file() { + let sys = InMemorySys::default(); + // should not start discovering this deno.json because it + // should search for a workspace in the parent dir + sys.fs_insert_json(root_dir().join("sub_dir/deno.json"), json!({})); + let other_deno_json = root_dir().join("sub_dir/deno_other_name.json"); + sys.fs_insert_json(&other_deno_json, json!({})); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::ConfigFile(&other_deno_json), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir + .workspace + .deno_jsons() + .map(|d| d.specifier.clone()) + .collect::>(), + vec![Url::from_file_path(&other_deno_json).unwrap()] + ); + } + + #[test] + fn test_config_workspace() { + let sys = InMemorySys::default(); + let root_config_path = root_dir().join("deno.json"); + sys.fs_insert_json( + root_config_path.clone(), + json!({ + "workspace": ["./member-a"], + }), + ); + let member_a_config = root_dir().join("member-a/deno.json"); + sys.fs_insert_json(member_a_config.clone(), json!({})); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::ConfigFile(&root_config_path), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir + .workspace + .deno_jsons() + .map(|c| c.specifier.to_file_path().unwrap()) + .collect::>(), + vec![root_config_path, member_a_config] + ); + } + + #[test] + fn test_split_cli_args_by_deno_json_folder() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a", "./member-b"], + }), + ); + sys.fs_insert_json(root_dir().join("member-a/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("member-b/deno.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + // single member + { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a"), + )])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-a"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a") + )])), + exclude: Default::default(), + } + )]) + ); + } + // root and in single member + { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member-a").join("sub")), + PathOrPattern::Path(root_dir().join("file")), + ])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([ + ( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-a/sub"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a").join("sub") + )])), + exclude: Default::default(), + } + ), + ( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir().join("file"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("file") + )])), + exclude: Default::default(), + } + ), + ]) + ); + } + // multiple members (one with glob) and outside folder + { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member-a")), + PathOrPattern::Pattern( + GlobPattern::from_relative(&root_dir().join("member-b"), "**/*") + .unwrap(), + ), + PathOrPattern::Path(root_dir().join("other_dir")), + ])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([ + ( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir().join("other_dir"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("other_dir") + )])), + exclude: Default::default(), + } + ), + ( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-a"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a") + )])), + exclude: Default::default(), + } + ), + ( + new_rc( + url_from_directory_path(&root_dir().join("member-b")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-b"), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Pattern( + GlobPattern::from_relative( + &root_dir().join("member-b"), + "**/*" + ) + .unwrap(), + ) + ])), + exclude: Default::default(), + } + ), + ]) + ); + } + // glob at root dir + { + let root_glob = PathOrPattern::Pattern( + GlobPattern::from_relative(&root_dir(), "**/*").unwrap(), + ); + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![root_glob.clone()])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([ + ( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![root_glob.clone()])), + exclude: Default::default(), + } + ), + ( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-a"), + include: Some(PathOrPatternSet::new(vec![root_glob.clone()])), + exclude: Default::default(), + } + ), + ( + new_rc( + url_from_directory_path(&root_dir().join("member-b")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-b"), + include: Some(PathOrPatternSet::new(vec![root_glob])), + exclude: Default::default(), + } + ), + ]) + ); + } + // single path in descendant of member + { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a/sub-dir/descendant/further"), + )])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + base: root_dir().join("member-a/sub-dir/descendant/further"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a/sub-dir/descendant/further"), + )])), + exclude: Default::default(), + } + ),]) + ); + } + // path in descendant of member then second path that goes to a parent folder + { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path( + root_dir().join("member-a/sub-dir/descendant/further"), + ), + PathOrPattern::Path(root_dir().join("member-a/sub-dir/other")), + ])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc( + url_from_directory_path(&root_dir().join("member-a")).unwrap() + ), + FilePatterns { + // should use common base here + base: root_dir().join("member-a/sub-dir"), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path( + root_dir().join("member-a/sub-dir/descendant/further"), + ), + PathOrPattern::Path(root_dir().join("member-a/sub-dir/other"),) + ])), + exclude: Default::default(), + } + )]) + ); + } + // path outside the root directory + { + let dir_outside = normalize_path(root_dir().join("../dir_outside")); + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + dir_outside.clone(), + )])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + dir_outside.clone(), + ),])), + exclude: Default::default(), + } + )]) + ); + } + // multiple paths outside the root directory + { + let dir_outside_1 = normalize_path(root_dir().join("../dir_outside_1")); + let dir_outside_2 = normalize_path(root_dir().join("../dir_outside_2")); + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(dir_outside_1.clone()), + PathOrPattern::Path(dir_outside_2.clone()), + ])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(dir_outside_1.clone()), + PathOrPattern::Path(dir_outside_2.clone()), + ])), + exclude: Default::default(), + } + )]) + ); + } + } + + #[test] + fn test_split_cli_args_by_deno_json_folder_no_config() { + let sys = InMemorySys::default(); + sys.fs_insert(root_dir().join("path"), ""); // create the root directory + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + // two paths, looped to ensure that the order is maintained on + // the output and not sorted + let path1 = normalize_path(root_dir().join("./path-longer")); + let path2 = normalize_path(root_dir().join("./path")); + for (path1, path2) in [(&path1, &path2), (&path2, &path1)] { + let split = workspace_dir.workspace.split_cli_args_by_deno_json_folder( + &FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(path1.clone()), + PathOrPattern::Path(path2.clone()), + ])), + exclude: Default::default(), + }, + ); + assert_eq!( + split, + IndexMap::from([( + new_rc(url_from_directory_path(&root_dir()).unwrap()), + FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![ + PathOrPattern::Path(path1.clone()), + PathOrPattern::Path(path2.clone()), + ])), + exclude: Default::default(), + } + )]) + ); + } + } + + #[test] + fn test_resolve_config_for_members_include_root_and_sub_member() { + fn run_test( + config_key: &str, + workspace_to_file_patterns: impl Fn(&WorkspaceDirectory) -> Vec, + ) { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a", "./member-b", "member-c"], + config_key: { + "include": ["./file.ts", "./member-c/file.ts"] + } + }), + ); + sys.fs_insert_json( + root_dir().join("member-a/deno.json"), + json!({ + config_key: { + "include": ["./member-a-file.ts"] + } + }), + ); + sys.fs_insert_json(root_dir().join("member-b/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("member-c/deno.json"), json!({})); + let workspace = workspace_at_start_dir(&sys, &root_dir()); + assert_eq!( + workspace_to_file_patterns(&workspace), + vec![ + FilePatterns { + base: root_dir(), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("file.ts") + )])), + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member-a")), + PathOrPattern::Path(root_dir().join("member-b")), + PathOrPattern::Path(root_dir().join("member-c")), + ]) + }, + FilePatterns { + base: root_dir().join("member-a"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a").join("member-a-file.ts") + )])), + exclude: Default::default(), + }, + FilePatterns { + base: root_dir().join("member-b"), + include: None, + exclude: Default::default(), + }, + FilePatterns { + base: root_dir().join("member-c"), + include: Some(PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-c").join("file.ts") + )])), + exclude: Default::default(), + } + ] + ); + } + + run_test("bench", |workspace_dir| { + let config_for_members = workspace_dir + .workspace + .resolve_bench_config_for_members(&FilePatterns::new_with_base( + root_dir(), + )) + .unwrap(); + config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>() + }); + + run_test("fmt", |workspace_dir| { + let config_for_members = workspace_dir + .workspace + .resolve_fmt_config_for_members( + &FilePatterns::new_with_base(root_dir()), + ) + .unwrap(); + config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>() + }); + + run_test("lint", |workspace_dir| { + let config_for_members = workspace_dir + .workspace + .resolve_lint_config_for_members(&FilePatterns::new_with_base( + root_dir(), + )) + .unwrap(); + config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>() + }); + + run_test("test", |workspace_dir| { + let config_for_members = workspace_dir + .workspace + .resolve_test_config_for_members(&FilePatterns::new_with_base( + root_dir(), + )) + .unwrap(); + config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>() + }); + } + + #[test] + fn test_resolve_config_for_members_excluded_member() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a", "./member-b"], + "lint": { + "exclude": ["./member-a"] + } + }), + ); + sys.fs_insert_json(root_dir().join("member-a/deno.json"), json!({})); + sys.fs_insert_json(root_dir().join("member-b/deno.json"), json!({})); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let config_for_members = workspace_dir + .workspace + .resolve_lint_config_for_members(&FilePatterns::new_with_base(root_dir())) + .unwrap(); + let mut file_patterns = config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>(); + assert_eq!( + file_patterns, + vec![ + FilePatterns { + base: root_dir(), + include: None, + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member-a")), + // It will be in here twice because it's excluded from being + // traversed for this set of FilePatterns and also it's excluded + // in the "exclude". This is not a big deal because it's an edge + // case and the end behaviour is the same. It's probably not worth + // the complexity and perf to ensure only unique items are in here + PathOrPattern::Path(root_dir().join("member-a")), + PathOrPattern::Path(root_dir().join("member-b")), + ]) + }, + // This item is effectively a no-op as it excludes itself. + // It would be nice to have this not even included as a member, + // but doing that in a maintainable way would require a bit of + // refactoring to get resolve_config_for_members to understand + // that configs return FilePatterns. + FilePatterns { + base: root_dir().join("member-a"), + include: None, + exclude: PathOrPatternSet::new(vec![PathOrPattern::Path( + root_dir().join("member-a") + ),]), + }, + FilePatterns { + base: root_dir().join("member-b"), + include: None, + exclude: Default::default(), + }, + ] + ); + + // ensure the second file patterns is a no-op + sys.fs_insert(root_dir().join("member-a/file.ts"), ""); + sys.fs_insert(root_dir().join("member-a/sub-dir/file.ts"), ""); + let files = FileCollector::new(|_| true) + .collect_file_patterns(&sys, file_patterns.remove(1)); + assert!(files.is_empty()); + } + + #[test] + fn test_resolve_config_for_members_excluded_member_unexcluded_sub_dir() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["./member-a"], + "lint": { + "exclude": ["./member-a"] + } + }), + ); + sys.fs_insert_json( + root_dir().join("member-a/deno.json"), + json!({ + "lint": { + // unexclude this sub dir so it's linted + "exclude": ["!./sub-dir"] + } + }), + ); + let workspace_dir = workspace_at_start_dir(&sys, &root_dir()); + let config_for_members = workspace_dir + .workspace + .resolve_lint_config_for_members(&FilePatterns::new_with_base(root_dir())) + .unwrap(); + let mut file_patterns = config_for_members + .into_iter() + .map(|(_ctx, config)| config.files) + .collect::>(); + assert_eq!( + file_patterns, + vec![ + FilePatterns { + base: root_dir(), + include: None, + exclude: PathOrPatternSet::new(vec![ + PathOrPattern::Path(root_dir().join("member-a")), + // see note in previous test about this being here twice + PathOrPattern::Path(root_dir().join("member-a")), + ]) + }, + FilePatterns { + base: root_dir().join("member-a"), + include: None, + exclude: PathOrPatternSet::new(vec![ + // self will be excluded, but then sub dir will be unexcluded + PathOrPattern::Path(root_dir().join("member-a")), + PathOrPattern::NegatedPath( + root_dir().join("member-a").join("sub-dir") + ), + ]), + }, + ] + ); + sys.fs_insert(root_dir().join("member-a/file.ts"), ""); + sys.fs_insert(root_dir().join("member-a/sub-dir/file.ts"), ""); + let files = FileCollector::new(|_| true) + .collect_file_patterns(&sys, file_patterns.remove(1)); + // should only have member-a/sub-dir/file.ts and not member-a/file.ts + assert_eq!(files, vec![root_dir().join("member-a/sub-dir/file.ts")]); + } + + #[test] + fn test_lock_path() { + let workspace_dir = workspace_for_root_and_member( + json!({ + "lock": "other.lock", + }), + json!({}), + ); + assert_eq!( + workspace_dir.workspace.resolve_lockfile_path().unwrap(), + Some(root_dir().join("other.lock")) + ); + } + + #[derive(Default)] + struct DenoJsonMemCache(RefCell>); + + impl DenoJsonCache for DenoJsonMemCache { + fn get(&self, path: &Path) -> Option { + self.0.borrow().get(path).cloned() + } + + fn set(&self, path: PathBuf, deno_json: ConfigFileRc) { + self.0.borrow_mut().insert(path, deno_json); + } + } + + #[derive(Default)] + struct PkgJsonMemCache(RefCell>); + + impl deno_package_json::PackageJsonCache for PkgJsonMemCache { + fn get(&self, path: &Path) -> Option { + self.0.borrow().get(path).cloned() + } + + fn set(&self, path: PathBuf, value: PackageJsonRc) { + self.0.borrow_mut().insert(path, value); + } + } + + #[derive(Default)] + struct WorkspaceMemCache(RefCell>); + + impl WorkspaceCache for WorkspaceMemCache { + fn get(&self, dir_path: &Path) -> Option { + self.0.borrow().get(dir_path).cloned() + } + + fn set(&self, dir_path: PathBuf, workspace: WorkspaceRc) { + self.0.borrow_mut().insert(dir_path, workspace); + } + } + + #[test] + fn workspace_discovery_deno_json_cache() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ "nodeModulesDir": true }), + ); + let cache = DenoJsonMemCache::default(); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(cache.0.borrow().len(), 1); // writes to the cache + assert_eq!( + workspace_dir.workspace.node_modules_dir().unwrap(), + Some(NodeModulesDirMode::Auto) + ); + let new_config_file = ConfigFile::new( + r#"{ "nodeModulesDir": false }"#, + Url::from_file_path(root_dir().join("deno.json")).unwrap(), + ) + .unwrap(); + cache + .0 + .borrow_mut() + .insert(root_dir().join("deno.json"), new_rc(new_config_file)); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!( + workspace_dir.workspace.node_modules_dir().unwrap(), + Some(NodeModulesDirMode::None) // reads from the cache + ); + } + + #[test] + fn workspace_discovery_pkg_json_cache() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ "name": "member" }), + ); + let cache = PkgJsonMemCache::default(); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + pkg_json_cache: Some(&cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(cache.0.borrow().len(), 1); // writes to the cache + assert_eq!(workspace_dir.workspace.package_jsons().count(), 1); + let new_pkg_json = PackageJson::load_from_string( + root_dir().join("package.json"), + r#"{ "name": "cached-name" }"#, + ) + .unwrap(); + cache + .0 + .borrow_mut() + .insert(root_dir().join("package.json"), new_rc(new_pkg_json)); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + pkg_json_cache: Some(&cache), + ..Default::default() + }, + ) + .unwrap(); + // reads from the cache + assert_eq!( + workspace_dir + .workspace + .package_jsons() + .map(|p| p.name.as_deref().unwrap()) + .collect::>(), + vec!["cached-name"] + ); + } + + #[test] + fn workspace_discovery_workspace_cache() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("member/package-a/package.json"), + json!({ + "name": "member-a" + }), + ); + sys.fs_insert_json( + root_dir().join("member/package-b/deno.json"), + json!({ + "name": "member-b" + }), + ); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ + "workspace": ["member/package-a", "member/package-b"] + }), + ); + let deno_json_cache = DenoJsonMemCache::default(); + let pkg_json_cache = PkgJsonMemCache::default(); + let workspace_cache = WorkspaceMemCache::default(); + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&deno_json_cache), + pkg_json_cache: Some(&pkg_json_cache), + workspace_cache: Some(&workspace_cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 1); + // writes to the caches + assert_eq!(pkg_json_cache.0.borrow().len(), 1); + assert_eq!(deno_json_cache.0.borrow().len(), 2); + assert_eq!(workspace_cache.0.borrow().len(), 1); + // now delete from the deno json and pkg json caches + deno_json_cache.0.borrow_mut().clear(); + pkg_json_cache.0.borrow_mut().clear(); + // should load and not write to the caches + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[root_dir()]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&deno_json_cache), + pkg_json_cache: Some(&pkg_json_cache), + workspace_cache: Some(&workspace_cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 1); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 2); + // it wouldn't have written to these because it just + // loads from the workspace cache + assert_eq!(pkg_json_cache.0.borrow().len(), 0); + assert_eq!(deno_json_cache.0.borrow().len(), 0); + } + + #[test] + fn deno_workspace_discovery_workspace_cache() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("member/package-a/deno.json"), + json!({ "name": "member-a" }), + ); + sys.fs_insert_json( + root_dir().join("member/package-b/deno.json"), + json!({ "name": "member-b" }), + ); + sys.fs_insert_json( + root_dir().join("deno.json"), + json!({ "workspace": ["member/package-a", "member/package-b"] }), + ); + let deno_json_cache = DenoJsonMemCache::default(); + let pkg_json_cache = PkgJsonMemCache::default(); + let workspace_cache = WorkspaceMemCache::default(); + for start_dir in [ + root_dir(), + root_dir().join("member/package-a"), + root_dir().join("member/package-b"), + ] { + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[start_dir]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&deno_json_cache), + pkg_json_cache: Some(&pkg_json_cache), + workspace_cache: Some(&workspace_cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.deno_jsons().count(), 3); + } + } + + #[test] + fn npm_workspace_discovery_workspace_cache() { + let sys = InMemorySys::default(); + sys.fs_insert_json( + root_dir().join("member/package-a/package.json"), + json!({ "name": "member-a" }), + ); + sys.fs_insert_json( + root_dir().join("member/package-b/package.json"), + json!({ "name": "member-b" }), + ); + sys.fs_insert_json( + root_dir().join("package.json"), + json!({ "workspaces": ["member/*"] }), + ); + let deno_json_cache = DenoJsonMemCache::default(); + let pkg_json_cache = PkgJsonMemCache::default(); + let workspace_cache = WorkspaceMemCache::default(); + for start_dir in [ + root_dir(), + root_dir().join("member/package-a"), + root_dir().join("member/package-b"), + ] { + let workspace_dir = WorkspaceDirectory::discover( + &sys, + WorkspaceDiscoverStart::Paths(&[start_dir]), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + deno_json_cache: Some(&deno_json_cache), + pkg_json_cache: Some(&pkg_json_cache), + workspace_cache: Some(&workspace_cache), + ..Default::default() + }, + ) + .unwrap(); + assert_eq!(workspace_dir.workspace.package_jsons().count(), 3); + } + } + + #[test] + fn test_jsx_invalid_setting() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "preserve" } + }), + json!({}), + ); + let deno_json = member.workspace.root_deno_json().unwrap(); + assert_eq!( + member.to_maybe_jsx_import_source_config().err().unwrap().to_string(), + format!(concat!( + "Unsupported 'jsx' compiler option value 'preserve'. Supported: 'react-jsx', 'react-jsxdev', 'react', 'precompile'\n", + " at {}", + ), deno_json.specifier), + ); + } + + #[test] + fn test_jsx_import_source_only() { + { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsxImportSource": "test" } + }), + json!({}), + ); + let deno_json = member.workspace.root_deno_json().unwrap(); + assert_eq!( + member.to_maybe_jsx_import_source_config().err().unwrap().to_string(), + format!(concat!( + "'jsxImportSource' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n", + " at {}", + ), deno_json.specifier), + ); + } + { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react", "jsxImportSource": "test" } + }), + json!({}), + ); + let deno_json = member.workspace.root_deno_json().unwrap(); + assert_eq!( + member.to_maybe_jsx_import_source_config().err().unwrap().to_string(), + format!(concat!( + "'jsxImportSource' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n", + " at {}", + ), deno_json.specifier), + ); + } + } + + #[test] + fn test_jsx_import_source_types_only() { + { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsxImportSourceTypes": "test" } + }), + json!({}), + ); + let deno_json = member.workspace.root_deno_json().unwrap(); + assert_eq!( + member.to_maybe_jsx_import_source_config().err().unwrap().to_string(), + format!(concat!( + "'jsxImportSourceTypes' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n", + " at {}", + ), deno_json.specifier), + ); + } + { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react", "jsxImportSourceTypes": "test" } + }), + json!({}), + ); + let deno_json = member.workspace.root_deno_json().unwrap(); + assert_eq!( + member.to_maybe_jsx_import_source_config().err().unwrap().to_string(), + format!(concat!( + "'jsxImportSourceTypes' is only supported when 'jsx' is set to 'react-jsx' or 'react-jsxdev'.\n", + " at {}", + ), deno_json.specifier), + ); + } + } + + #[test] + fn test_jsx_import_source_valid() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react" } + }), + json!({}), + ); + assert!(member.to_maybe_jsx_import_source_config().is_ok()); + } + + #[test] + fn test_jsx_import_source_defaults() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react-jsx" } + }), + json!({}), + ); + let config = member.to_maybe_jsx_import_source_config().unwrap().unwrap(); + assert_eq!(config.import_source.unwrap().specifier, "react"); + assert_eq!(config.import_source_types.unwrap().specifier, "react"); + } + + #[test] + fn test_jsx_import_source_types_defaults_import_source() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react-jsx", "jsxImportSource": "jsx" } + }), + json!({}), + ); + let config = member.to_maybe_jsx_import_source_config().unwrap().unwrap(); + assert_eq!(config.import_source.unwrap().specifier, "jsx"); + assert_eq!(config.import_source_types.unwrap().specifier, "jsx"); + } + + #[test] + fn test_jsx_precompile_skip_setting() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "react-jsx", "jsxImportSource": "npm:react", "jsxImportSourceTypes": "npm:@types/react" } + }), + json!({ + "compilerOptions": { "jsxImportSource": "npm:preact/compat" } + }), + ); + let config = member.to_maybe_jsx_import_source_config().unwrap().unwrap(); + assert_eq!( + config, + JsxImportSourceConfig { + module: "jsx-runtime".to_string(), + import_source: Some(JsxImportSourceSpecifierConfig { + specifier: "npm:preact/compat".to_string(), + base: Url::from_file_path(root_dir().join("member/deno.json")) + .unwrap() + }), + import_source_types: Some(JsxImportSourceSpecifierConfig { + specifier: "npm:@types/react".to_string(), + base: Url::from_file_path(root_dir().join("deno.json")).unwrap() + }), + } + ); + } + + #[test] + fn test_override_member() { + let member = workspace_for_root_and_member( + json!({ + "compilerOptions": { "jsx": "precompile", "jsxPrecompileSkipElements": ["a", "p"] } + }), + json!({}), + ); + assert!(member.to_maybe_jsx_import_source_config().is_ok()); + } + + fn workspace_for_root_and_member( + root: serde_json::Value, + member: serde_json::Value, + ) -> WorkspaceDirectory { + workspace_for_root_and_member_with_fs(root, member, |_| {}) + } + + fn workspace_for_root_and_member_with_fs( + root: serde_json::Value, + member: serde_json::Value, + with_sys: impl FnOnce(&InMemorySys), + ) -> WorkspaceDirectory { + let sys = in_memory_fs_for_root_and_member(root, member); + with_sys(&sys); + // start in the member + workspace_at_start_dir(&sys, &root_dir().join("member")) + } + + fn in_memory_fs_for_root_and_member( + mut root: serde_json::Value, + member: serde_json::Value, + ) -> InMemorySys { + root + .as_object_mut() + .unwrap() + .insert("workspace".to_string(), json!(["./member"])); + let sys = InMemorySys::default(); + sys.fs_insert_json(root_dir().join("deno.json"), root); + sys.fs_insert_json(root_dir().join("member/deno.json"), member); + sys + } + + fn workspace_at_start_dir( + sys: &InMemorySys, + start_dir: &Path, + ) -> WorkspaceDirectory { + workspace_at_start_dir_result(sys, start_dir).unwrap() + } + + fn workspace_at_start_dir_err( + sys: &InMemorySys, + start_dir: &Path, + ) -> WorkspaceDiscoverError { + workspace_at_start_dir_result(sys, start_dir).unwrap_err() + } + + fn workspace_at_start_dir_result( + sys: &InMemorySys, + start_dir: &Path, + ) -> Result { + workspace_at_start_dirs(sys, &[start_dir.to_path_buf()]) + } + + fn workspace_at_start_dirs( + sys: &InMemorySys, + start_dirs: &[PathBuf], + ) -> Result { + WorkspaceDirectory::discover( + sys, + WorkspaceDiscoverStart::Paths(start_dirs), + &WorkspaceDiscoverOptions { + discover_pkg_json: true, + ..Default::default() + }, + ) + } + + fn normalize_err_text(text: &str) -> String { + text.replace( + "[ROOT_DIR_URL]", + url_from_directory_path(&root_dir()) + .unwrap() + .to_string() + .trim_end_matches('/'), + ) + } +} diff --git a/tests/specs/run/webtransport/main.ts b/tests/specs/run/webtransport/main.ts index 78ad1b7f0b..2e270298c9 100644 --- a/tests/specs/run/webtransport/main.ts +++ b/tests/specs/run/webtransport/main.ts @@ -1,4 +1,4 @@ -// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license. +// Copyright 2018-2025 the Deno authors. All rights reserved. MIT license. import { decodeBase64 } from "@std/encoding/base64"; import { assertEquals } from "@std/assert"; diff --git a/tools/copyright_checker.js b/tools/copyright_checker.js index 9ac84e3ec5..727be10f7c 100755 --- a/tools/copyright_checker.js +++ b/tools/copyright_checker.js @@ -43,6 +43,7 @@ export async function checkCopyright() { ":!:tests/testdata/**", ":!:tests/unit_node/testdata/**", ":!:tests/wpt/suite/**", + ":!:libs/config/testdata/**", // rust "*.rs",