refactor: add deno_config crate back to cli (#29740)
Some checks are pending
ci / build libs (push) Blocked by required conditions
ci / publish canary (push) Blocked by required conditions
ci / pre-build (push) Waiting to run
ci / test debug linux-aarch64 (push) Blocked by required conditions
ci / test release linux-aarch64 (push) Blocked by required conditions
ci / test debug macos-aarch64 (push) Blocked by required conditions
ci / test release macos-aarch64 (push) Blocked by required conditions
ci / bench release linux-x86_64 (push) Blocked by required conditions
ci / lint debug linux-x86_64 (push) Blocked by required conditions
ci / lint debug macos-x86_64 (push) Blocked by required conditions
ci / lint debug windows-x86_64 (push) Blocked by required conditions
ci / test debug linux-x86_64 (push) Blocked by required conditions
ci / test release linux-x86_64 (push) Blocked by required conditions
ci / test debug macos-x86_64 (push) Blocked by required conditions
ci / test release macos-x86_64 (push) Blocked by required conditions
ci / test debug windows-x86_64 (push) Blocked by required conditions
ci / test release windows-x86_64 (push) Blocked by required conditions

Co-authored-by: nayeemrmn <nayeemrmn@users.noreply.github.com>
This commit is contained in:
David Sherret 2025-06-13 13:01:26 -04:00 committed by GitHub
parent fa3208bf27
commit d15a302d41
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 13138 additions and 9 deletions

View file

@ -35,6 +35,7 @@
"cli/tools/doc/prism.js",
"ext/websocket/autobahn/reports",
"gh-pages",
"libs/config/testdata",
"target",
"tests/ffi/tests/test.js",
"tests/node_compat/runner/suite",

View file

@ -1230,8 +1230,8 @@ const ci = {
},
]),
},
wasm: {
name: "build wasm32",
libs: {
name: "build libs",
needs: ["pre_build"],
if: "${{ needs.pre_build.outputs.skip_build != 'true' }}",
"runs-on": ubuntuX86Runner,
@ -1254,6 +1254,16 @@ const ci = {
run:
"cargo check --target wasm32-unknown-unknown -p deno_npm_installer",
},
{
name: "Cargo check (deno_config)",
run: [
"cargo check --no-default-features -p deno_config",
"cargo check --no-default-features --features workspace -p deno_config",
"cargo check --no-default-features --features package_json -p deno_config",
"cargo check --no-default-features --features workspace --features sync -p deno_config",
"cargo check --target wasm32-unknown-unknown --all-features -p deno_config",
].join("\n"),
},
]),
},
"publish-canary": {

View file

@ -781,8 +781,8 @@ jobs:
!./target/*/*.zip
!./target/*/*.tar.gz
key: '60-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
wasm:
name: build wasm32
libs:
name: build libs
needs:
- pre_build
if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}'
@ -811,6 +811,14 @@ jobs:
- name: Cargo check (deno_npm_installer)
run: cargo check --target wasm32-unknown-unknown -p deno_npm_installer
if: '!(matrix.skip)'
- name: Cargo check (deno_config)
run: |-
cargo check --no-default-features -p deno_config
cargo check --no-default-features --features workspace -p deno_config
cargo check --no-default-features --features package_json -p deno_config
cargo check --no-default-features --features workspace --features sync -p deno_config
cargo check --target wasm32-unknown-unknown --all-features -p deno_config
if: '!(matrix.skip)'
publish-canary:
name: publish canary
runs-on: ubuntu-24.04

4
Cargo.lock generated
View file

@ -1722,8 +1722,6 @@ dependencies = [
[[package]]
name = "deno_config"
version = "0.57.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cede8eaf636037d9f3d9206eeecb0bdcea042810da5eb00be74ecb0d17b95acc"
dependencies = [
"boxed_error",
"capacity_builder",
@ -1739,9 +1737,11 @@ dependencies = [
"log",
"percent-encoding",
"phf",
"pretty_assertions",
"serde",
"serde_json",
"sys_traits",
"tempfile",
"thiserror 2.0.12",
"url",
]

View file

@ -32,6 +32,7 @@ members = [
"ext/webidl",
"ext/websocket",
"ext/webstorage",
"libs/config",
"resolvers/deno",
"resolvers/node",
"resolvers/npm_cache",
@ -59,7 +60,7 @@ deno_ast = { version = "=0.48.0", features = ["transpiling"] }
deno_core = { version = "0.350.0" }
deno_cache_dir = "=0.22.2"
deno_config = { version = "=0.57.0", features = ["workspace"] }
deno_config = { version = "=0.57.0", features = ["workspace"], path = "./libs/config" }
deno_doc = "=0.178.0"
deno_error = "=0.6.1"
deno_graph = { version = "=0.95.1", default-features = false }
@ -179,6 +180,7 @@ hyper-rustls = { version = "0.27.2", default-features = false, features = ["http
hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] }
hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] }
idna = "1.0.3"
ignore = "0.4"
import_map = { version = "0.22.0", features = ["ext"] }
indexmap = { version = "2", features = ["serde"] }
ipnet = "2.3"

View file

@ -1,6 +1,6 @@
MIT License
Copyright 2018-2024 the Deno authors
Copyright 2018-2025 the Deno authors
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in

45
libs/config/Cargo.toml Normal file
View file

@ -0,0 +1,45 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_config"
version = "0.57.0"
authors = ["the Deno authors"]
edition = "2021"
license = "MIT"
repository = "https://github.com/denoland/deno_config"
description = "Config file implementation for the Deno CLI"
[lib]
path = "lib.rs"
[features]
default = ["workspace"]
deno_json = ["jsonc-parser", "glob", "ignore", "import_map", "phf"]
package_json = ["deno_package_json"]
sync = ["deno_package_json/sync"]
workspace = ["deno_json", "deno_semver", "package_json"]
[dependencies]
boxed_error.workspace = true
capacity_builder = { workspace = true }
deno_error = { workspace = true, features = ["url"] }
deno_package_json = { workspace = true, optional = true }
deno_path_util.workspace = true
deno_semver = { workspace = true, optional = true }
glob = { workspace = true, optional = true }
ignore = { workspace = true, optional = true }
import_map = { workspace = true, features = ["ext"], optional = true }
indexmap = { workspace = true, features = ["serde"] }
jsonc-parser = { workspace = true, features = ["serde"], optional = true }
log.workspace = true
percent-encoding.workspace = true
phf = { workspace = true, features = ["macros"], optional = true }
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sys_traits.workspace = true
thiserror.workspace = true
url = { workspace = true }
[dev-dependencies]
pretty_assertions.workspace = true
sys_traits = { workspace = true, features = ["memory", "real", "serde_json"] }
tempfile.workspace = true

5
libs/config/README.md Normal file
View file

@ -0,0 +1,5 @@
# `deno_config`
An implementation of the
[Deno configuration file](https://docs.deno.com/runtime/manual/getting_started/configuration_file/)
in Rust.

43
libs/config/clippy.toml Normal file
View file

@ -0,0 +1,43 @@
disallowed-methods = [
{ path = "std::env::current_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::exists", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::is_file", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::read_link", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::canonicalize", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::copy", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::create_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::hard_link", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::read", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::read_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::read_link", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::read_to_string", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::remove_dir", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::remove_file", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::rename", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::set_permissions", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using the sys_traits crate" },
{ path = "std::fs::write", reason = "File system operations should be done using the sys_traits crate" },
]
disallowed-types = [
{ path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" },
]

2952
libs/config/deno_json/mod.rs Normal file

File diff suppressed because it is too large Load diff

229
libs/config/deno_json/ts.rs Normal file
View file

@ -0,0 +1,229 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::fmt;
use serde::Deserialize;
use serde::Serialize;
use serde::Serializer;
use serde_json::Value;
use url::Url;
#[derive(Debug, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawJsxCompilerOptions {
pub jsx: Option<String>,
pub jsx_import_source: Option<String>,
pub jsx_import_source_types: Option<String>,
}
/// The transpile options that are significant out of a user provided tsconfig
/// file, that we want to deserialize out of the final config for a transpile.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EmitConfigOptions {
pub check_js: bool,
pub experimental_decorators: bool,
pub emit_decorator_metadata: bool,
pub imports_not_used_as_values: String,
pub inline_source_map: bool,
pub inline_sources: bool,
pub source_map: bool,
pub jsx: String,
pub jsx_factory: String,
pub jsx_fragment_factory: String,
pub jsx_import_source: Option<String>,
pub jsx_precompile_skip_elements: Option<Vec<String>>,
}
/// A structure that represents a set of options that were ignored and the
/// path those options came from.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct IgnoredCompilerOptions {
pub items: Vec<String>,
pub maybe_specifier: Option<Url>,
}
impl fmt::Display for IgnoredCompilerOptions {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut codes = self.items.clone();
codes.sort_unstable();
if let Some(specifier) = &self.maybe_specifier {
write!(f, "Unsupported compiler options in \"{}\".\n The following options were ignored:\n {}", specifier, codes.join(", "))
} else {
write!(f, "Unsupported compiler options provided.\n The following options were ignored:\n {}", codes.join(", "))
}
}
}
impl Serialize for IgnoredCompilerOptions {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(&self.items, serializer)
}
}
/// A set of all the compiler options that should be allowed;
static ALLOWED_COMPILER_OPTIONS: phf::Set<&'static str> = phf::phf_set! {
"allowUnreachableCode",
"allowUnusedLabels",
"checkJs",
"erasableSyntaxOnly",
"emitDecoratorMetadata",
"exactOptionalPropertyTypes",
"experimentalDecorators",
"isolatedDeclarations",
"jsx",
"jsxFactory",
"jsxFragmentFactory",
"jsxImportSource",
"jsxPrecompileSkipElements",
"lib",
"noErrorTruncation",
"noFallthroughCasesInSwitch",
"noImplicitAny",
"noImplicitOverride",
"noImplicitReturns",
"noImplicitThis",
"noPropertyAccessFromIndexSignature",
"noUncheckedIndexedAccess",
"noUnusedLocals",
"noUnusedParameters",
"rootDirs",
"strict",
"strictBindCallApply",
"strictBuiltinIteratorReturn",
"strictFunctionTypes",
"strictNullChecks",
"strictPropertyInitialization",
"types",
"useUnknownInCatchVariables",
"verbatimModuleSyntax",
};
#[derive(Debug, Default, Clone)]
pub struct ParsedTsConfigOptions {
pub options: serde_json::Map<String, serde_json::Value>,
pub maybe_ignored: Option<IgnoredCompilerOptions>,
}
pub fn parse_compiler_options(
compiler_options: serde_json::Map<String, Value>,
maybe_specifier: Option<&Url>,
) -> ParsedTsConfigOptions {
let mut allowed: serde_json::Map<String, Value> =
serde_json::Map::with_capacity(compiler_options.len());
let mut ignored: Vec<String> = Vec::new(); // don't pre-allocate because it's rare
for (key, value) in compiler_options {
// We don't pass "types" entries to typescript via the compiler
// options and instead provide those to tsc as "roots". This is
// because our "types" behavior is at odds with how TypeScript's
// "types" works.
// We also don't pass "jsxImportSourceTypes" to TypeScript as it doesn't
// know about this option. It will still take this option into account
// because the graph resolves the JSX import source to the types for TSC.
if key != "types" && key != "jsxImportSourceTypes" {
if ALLOWED_COMPILER_OPTIONS.contains(key.as_str()) {
allowed.insert(key, value.to_owned());
} else {
ignored.push(key);
}
}
}
let maybe_ignored = if !ignored.is_empty() {
Some(IgnoredCompilerOptions {
items: ignored,
maybe_specifier: maybe_specifier.cloned(),
})
} else {
None
};
ParsedTsConfigOptions {
options: allowed,
maybe_ignored,
}
}
/// A structure for managing the configuration of TypeScript
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TsConfig(pub Value);
impl Default for TsConfig {
fn default() -> Self {
Self(serde_json::Value::Object(Default::default()))
}
}
impl TsConfig {
/// Create a new `TsConfig` with the base being the `value` supplied.
pub fn new(value: Value) -> Self {
TsConfig(value)
}
pub fn merge_mut(&mut self, value: TsConfig) {
json_merge(&mut self.0, value.0);
}
/// Merge a serde_json value into the configuration.
pub fn merge_object_mut(
&mut self,
value: serde_json::Map<String, serde_json::Value>,
) {
json_merge(&mut self.0, serde_json::Value::Object(value));
}
}
impl Serialize for TsConfig {
/// Serializes inner hash map which is ordered by the key
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
Serialize::serialize(&self.0, serializer)
}
}
/// A function that works like JavaScript's `Object.assign()`.
fn json_merge(a: &mut Value, b: Value) {
match (a, b) {
(&mut Value::Object(ref mut a), Value::Object(b)) => {
for (k, v) in b {
json_merge(a.entry(k).or_insert(Value::Null), v);
}
}
(a, b) => {
*a = b;
}
}
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
#[test]
fn test_json_merge() {
let mut value_a = json!({
"a": true,
"b": "c"
});
let value_b = json!({
"b": "d",
"e": false,
});
json_merge(&mut value_a, value_b);
assert_eq!(
value_a,
json!({
"a": true,
"b": "d",
"e": false,
})
);
}
}

View file

@ -0,0 +1,374 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::Path;
use std::path::PathBuf;
use deno_path_util::normalize_path;
use sys_traits::FsDirEntry;
use sys_traits::FsMetadata;
use sys_traits::FsMetadataValue;
use sys_traits::FsRead;
use sys_traits::FsReadDir;
use super::FilePatterns;
use crate::glob::gitignore::DirGitIgnores;
use crate::glob::gitignore::GitIgnoreTree;
use crate::glob::FilePatternsMatch;
use crate::glob::PathKind;
use crate::glob::PathOrPattern;
#[derive(Debug, Clone)]
pub struct WalkEntry<'a> {
pub path: &'a Path,
pub metadata: &'a dyn FsMetadataValue,
pub patterns: &'a FilePatterns,
}
/// Collects file paths that satisfy the given predicate, by recursively walking `files`.
/// If the walker visits a path that is listed in `ignore`, it skips descending into the directory.
pub struct FileCollector<TFilter: Fn(WalkEntry) -> bool> {
file_filter: TFilter,
ignore_git_folder: bool,
ignore_node_modules: bool,
vendor_folder: Option<PathBuf>,
use_gitignore: bool,
}
impl<TFilter: Fn(WalkEntry) -> bool> FileCollector<TFilter> {
pub fn new(file_filter: TFilter) -> Self {
Self {
file_filter,
ignore_git_folder: false,
ignore_node_modules: false,
vendor_folder: None,
use_gitignore: false,
}
}
pub fn ignore_node_modules(mut self) -> Self {
self.ignore_node_modules = true;
self
}
pub fn set_vendor_folder(mut self, vendor_folder: Option<PathBuf>) -> Self {
self.vendor_folder = vendor_folder;
self
}
pub fn ignore_git_folder(mut self) -> Self {
self.ignore_git_folder = true;
self
}
pub fn use_gitignore(mut self) -> Self {
self.use_gitignore = true;
self
}
pub fn collect_file_patterns<TSys: FsRead + FsMetadata + FsReadDir>(
&self,
sys: &TSys,
file_patterns: FilePatterns,
) -> Vec<PathBuf> {
fn is_pattern_matched(
maybe_git_ignore: Option<&DirGitIgnores>,
path: &Path,
is_dir: bool,
file_patterns: &FilePatterns,
) -> bool {
let path_kind = match is_dir {
true => PathKind::Directory,
false => PathKind::File,
};
match file_patterns.matches_path_detail(path, path_kind) {
FilePatternsMatch::Passed => {
// check gitignore
let is_gitignored = maybe_git_ignore
.as_ref()
.map(|git_ignore| git_ignore.is_ignored(path, is_dir))
.unwrap_or(false);
!is_gitignored
}
FilePatternsMatch::PassedOptedOutExclude => true,
FilePatternsMatch::Excluded => false,
}
}
let mut maybe_git_ignores = if self.use_gitignore {
// Override explicitly specified include paths in the
// .gitignore file. This does not apply to globs because
// that is way too complicated to reason about.
let include_paths = file_patterns
.include
.as_ref()
.map(|include| {
include
.inner()
.iter()
.filter_map(|path_or_pattern| {
if let PathOrPattern::Path(p) = path_or_pattern {
Some(p.clone())
} else {
None
}
})
.collect::<Vec<_>>()
})
.unwrap_or_default();
Some(GitIgnoreTree::new(sys, include_paths))
} else {
None
};
let mut target_files = Vec::new();
let mut visited_paths: HashSet<PathBuf> = HashSet::default();
let file_patterns_by_base = file_patterns.split_by_base();
for file_patterns in file_patterns_by_base {
let specified_path = normalize_path(&file_patterns.base);
let mut pending_dirs = VecDeque::new();
let mut handle_entry =
|path: PathBuf,
metadata: &dyn FsMetadataValue,
pending_dirs: &mut VecDeque<PathBuf>| {
let maybe_gitignore =
maybe_git_ignores.as_mut().and_then(|git_ignores| {
if metadata.file_type().is_dir() {
git_ignores.get_resolved_git_ignore_for_dir(&path)
} else {
git_ignores.get_resolved_git_ignore_for_file(&path)
}
});
if !is_pattern_matched(
maybe_gitignore.as_deref(),
&path,
metadata.file_type().is_dir(),
&file_patterns,
) {
// ignore
} else if metadata.file_type().is_dir() {
// allow the user to opt out of ignoring by explicitly specifying the dir
let opt_out_ignore = specified_path == path;
let should_ignore_dir =
!opt_out_ignore && self.is_ignored_dir(&path);
if !should_ignore_dir && visited_paths.insert(path.clone()) {
pending_dirs.push_back(path);
}
} else if (self.file_filter)(WalkEntry {
path: &path,
metadata,
patterns: &file_patterns,
}) && visited_paths.insert(path.clone())
{
target_files.push(path);
}
};
if let Ok(metadata) = sys.fs_metadata(&specified_path) {
handle_entry(specified_path.clone(), &metadata, &mut pending_dirs);
}
// use an iterator in order to minimize the number of file system operations
while let Some(next_dir) = pending_dirs.pop_front() {
let Ok(entries) = sys.fs_read_dir(&next_dir) else {
continue;
};
for entry in entries {
let Ok(entry) = entry else {
continue;
};
let Ok(metadata) = entry.metadata() else {
continue;
};
handle_entry(entry.path().into_owned(), &metadata, &mut pending_dirs)
}
}
}
target_files
}
fn is_ignored_dir(&self, path: &Path) -> bool {
path
.file_name()
.map(|dir_name| {
let dir_name = dir_name.to_string_lossy().to_lowercase();
let is_ignored_file = match dir_name.as_str() {
"node_modules" => self.ignore_node_modules,
".git" => self.ignore_git_folder,
_ => false,
};
is_ignored_file
})
.unwrap_or(false)
|| self.is_vendor_folder(path)
}
fn is_vendor_folder(&self, path: &Path) -> bool {
self
.vendor_folder
.as_ref()
.map(|vendor_folder| path == *vendor_folder)
.unwrap_or(false)
}
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use sys_traits::impls::RealSys;
use tempfile::TempDir;
use super::*;
use crate::glob::FilePatterns;
use crate::glob::PathOrPattern;
use crate::glob::PathOrPatternSet;
#[allow(clippy::disallowed_methods)] // allow fs methods
#[test]
fn test_collect_files() {
fn create_files(dir_path: &PathBuf, files: &[&str]) {
std::fs::create_dir_all(dir_path).unwrap();
for f in files {
std::fs::write(dir_path.join(f), "").unwrap();
}
}
// dir.ts
// ├── a.ts
// ├── b.js
// ├── child
// | ├── git
// | | └── git.js
// | ├── node_modules
// | | └── node_modules.js
// | ├── vendor
// | | └── vendor.js
// │ ├── e.mjs
// │ ├── f.mjsx
// │ ├── .foo.TS
// │ └── README.md
// ├── c.tsx
// ├── d.jsx
// └── ignore
// ├── g.d.ts
// └── .gitignore
let t = TempDir::new().unwrap();
let root_dir_path = t.path().join("dir.ts");
let root_dir_files = ["a.ts", "b.js", "c.tsx", "d.jsx"];
create_files(&root_dir_path, &root_dir_files);
let child_dir_path = root_dir_path.join("child");
let child_dir_files = ["e.mjs", "f.mjsx", ".foo.TS", "README.md"];
create_files(&child_dir_path, &child_dir_files);
std::fs::create_dir_all(t.path().join("dir.ts/child/node_modules"))
.unwrap();
std::fs::write(
t.path().join("dir.ts/child/node_modules/node_modules.js"),
"",
)
.unwrap();
std::fs::create_dir_all(t.path().join("dir.ts/child/.git")).unwrap();
std::fs::write(t.path().join("dir.ts/child/.git/git.js"), "").unwrap();
std::fs::create_dir_all(t.path().join("dir.ts/child/vendor")).unwrap();
std::fs::write(t.path().join("dir.ts/child/vendor/vendor.js"), "").unwrap();
let ignore_dir_path = root_dir_path.join("ignore");
let ignore_dir_files = ["g.d.ts", ".gitignore"];
create_files(&ignore_dir_path, &ignore_dir_files);
let file_patterns = FilePatterns {
base: root_dir_path.to_path_buf(),
include: None,
exclude: PathOrPatternSet::new(vec![PathOrPattern::Path(
ignore_dir_path.to_path_buf(),
)]),
};
let file_collector = FileCollector::new(|e| {
// exclude dotfiles
e.path
.file_name()
.and_then(|f| f.to_str())
.map(|f| !f.starts_with('.'))
.unwrap_or(false)
});
let result =
file_collector.collect_file_patterns(&RealSys, file_patterns.clone());
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
"git.js",
"node_modules.js",
"vendor.js",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().to_string())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
// test ignoring the .git and node_modules folder
let file_collector = file_collector
.ignore_git_folder()
.ignore_node_modules()
.set_vendor_folder(Some(child_dir_path.join("vendor").to_path_buf()));
let result =
file_collector.collect_file_patterns(&RealSys, file_patterns.clone());
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().to_string())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
// test opting out of ignoring by specifying the dir
let file_patterns = FilePatterns {
base: root_dir_path.to_path_buf(),
include: Some(PathOrPatternSet::new(vec![
PathOrPattern::Path(root_dir_path.to_path_buf()),
PathOrPattern::Path(
root_dir_path.to_path_buf().join("child/node_modules/"),
),
])),
exclude: PathOrPatternSet::new(vec![PathOrPattern::Path(
ignore_dir_path.to_path_buf(),
)]),
};
let result = file_collector.collect_file_patterns(&RealSys, file_patterns);
let expected = [
"README.md",
"a.ts",
"b.js",
"c.tsx",
"d.jsx",
"e.mjs",
"f.mjsx",
"node_modules.js",
];
let mut file_names = result
.into_iter()
.map(|r| r.file_name().unwrap().to_string_lossy().to_string())
.collect::<Vec<_>>();
file_names.sort();
assert_eq!(file_names, expected);
}
}

View file

@ -0,0 +1,180 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use std::rc::Rc;
use sys_traits::FsMetadata;
use sys_traits::FsRead;
/// Resolved gitignore for a directory.
pub struct DirGitIgnores {
current: Option<Rc<ignore::gitignore::Gitignore>>,
parent: Option<Rc<DirGitIgnores>>,
}
impl DirGitIgnores {
pub fn is_ignored(&self, path: &Path, is_dir: bool) -> bool {
let mut is_ignored = false;
if let Some(parent) = &self.parent {
is_ignored = parent.is_ignored(path, is_dir);
}
if let Some(current) = &self.current {
match current.matched(path, is_dir) {
ignore::Match::None => {}
ignore::Match::Ignore(_) => {
is_ignored = true;
}
ignore::Match::Whitelist(_) => {
is_ignored = false;
}
}
}
is_ignored
}
}
/// Resolves gitignores in a directory tree taking into account
/// ancestor gitignores that may be found in a directory.
pub struct GitIgnoreTree<'a, Sys: FsRead + FsMetadata> {
sys: &'a Sys,
ignores: HashMap<PathBuf, Option<Rc<DirGitIgnores>>>,
include_paths: Vec<PathBuf>,
}
impl<'a, Sys: FsRead + FsMetadata> GitIgnoreTree<'a, Sys> {
pub fn new(
sys: &'a Sys,
// paths that should override what's in the gitignore
include_paths: Vec<PathBuf>,
) -> Self {
Self {
sys,
ignores: Default::default(),
include_paths,
}
}
pub fn get_resolved_git_ignore_for_dir(
&mut self,
dir_path: &Path,
) -> Option<Rc<DirGitIgnores>> {
// for directories, provide itself in order to tell
// if it should stop searching for gitignores because
// maybe this dir_path is a .git directory
let parent = dir_path.parent()?;
self.get_resolved_git_ignore_inner(parent, Some(dir_path))
}
pub fn get_resolved_git_ignore_for_file(
&mut self,
file_path: &Path,
) -> Option<Rc<DirGitIgnores>> {
let dir_path = file_path.parent()?;
self.get_resolved_git_ignore_inner(dir_path, None)
}
fn get_resolved_git_ignore_inner(
&mut self,
dir_path: &Path,
maybe_parent: Option<&Path>,
) -> Option<Rc<DirGitIgnores>> {
let maybe_resolved = self.ignores.get(dir_path).cloned();
if let Some(resolved) = maybe_resolved {
resolved
} else {
let resolved = self.resolve_gitignore_in_dir(dir_path, maybe_parent);
self.ignores.insert(dir_path.to_owned(), resolved.clone());
resolved
}
}
fn resolve_gitignore_in_dir(
&mut self,
dir_path: &Path,
maybe_parent: Option<&Path>,
) -> Option<Rc<DirGitIgnores>> {
if let Some(parent) = maybe_parent {
// stop searching if the parent dir had a .git directory in it
if self.sys.fs_exists_no_err(parent.join(".git")) {
return None;
}
}
let parent = dir_path.parent().and_then(|parent| {
self.get_resolved_git_ignore_inner(parent, Some(dir_path))
});
let current = self
.sys
.fs_read_to_string_lossy(dir_path.join(".gitignore"))
.ok()
.and_then(|text| {
let mut builder = ignore::gitignore::GitignoreBuilder::new(dir_path);
for line in text.lines() {
builder.add_line(None, line).ok()?;
}
// override the gitignore contents to include these paths
for path in &self.include_paths {
if let Ok(suffix) = path.strip_prefix(dir_path) {
let suffix = suffix.to_string_lossy().replace('\\', "/");
let _ignore = builder.add_line(None, &format!("!/{}", suffix));
if !suffix.ends_with('/') {
let _ignore = builder.add_line(None, &format!("!/{}/", suffix));
}
}
}
let gitignore = builder.build().ok()?;
Some(Rc::new(gitignore))
});
if parent.is_none() && current.is_none() {
None
} else {
Some(Rc::new(DirGitIgnores { current, parent }))
}
}
}
#[cfg(test)]
mod test {
use sys_traits::impls::InMemorySys;
use sys_traits::FsCreateDirAll;
use sys_traits::FsWrite;
use super::*;
#[test]
fn git_ignore_tree() {
let sys = InMemorySys::default();
sys.fs_create_dir_all("/sub_dir/sub_dir").unwrap();
sys.fs_write("/.gitignore", "file.txt").unwrap();
sys.fs_write("/sub_dir/.gitignore", "data.txt").unwrap();
sys
.fs_write("/sub_dir/sub_dir/.gitignore", "!file.txt\nignore.txt")
.unwrap();
let mut ignore_tree = GitIgnoreTree::new(&sys, Vec::new());
let mut run_test = |path: &str, expected: bool| {
let path = PathBuf::from(path);
let gitignore =
ignore_tree.get_resolved_git_ignore_for_file(&path).unwrap();
assert_eq!(
gitignore.is_ignored(&path, /* is_dir */ false),
expected,
"Path: {}",
path.display()
);
};
run_test("/file.txt", true);
run_test("/other.txt", false);
run_test("/data.txt", false);
run_test("/sub_dir/file.txt", true);
run_test("/sub_dir/other.txt", false);
run_test("/sub_dir/data.txt", true);
run_test("/sub_dir/sub_dir/file.txt", false); // unignored up here
run_test("/sub_dir/sub_dir/sub_dir/file.txt", false);
run_test("/sub_dir/sub_dir/sub_dir/ignore.txt", true);
run_test("/sub_dir/sub_dir/ignore.txt", true);
run_test("/sub_dir/ignore.txt", false);
run_test("/ignore.txt", false);
}
}

1626
libs/config/glob/mod.rs Normal file

File diff suppressed because it is too large Load diff

20
libs/config/lib.rs Normal file
View file

@ -0,0 +1,20 @@
// Copyright 2018-2025 the Deno authors. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
#![deny(clippy::unused_async)]
#![deny(clippy::unnecessary_wraps)]
#[cfg(feature = "deno_json")]
pub mod deno_json;
#[cfg(feature = "deno_json")]
pub mod glob;
#[cfg(feature = "deno_json")]
mod sync;
#[cfg(feature = "deno_json")]
mod util;
#[cfg(feature = "workspace")]
pub mod workspace;
#[cfg(feature = "deno_json")]
pub use deno_path_util::UrlToFilePathError;

20
libs/config/sync.rs Normal file
View file

@ -0,0 +1,20 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub use inner::*;
#[cfg(feature = "sync")]
mod inner {
#![allow(clippy::disallowed_types)]
pub use std::sync::Arc as MaybeArc;
}
#[cfg(not(feature = "sync"))]
mod inner {
pub use std::rc::Rc as MaybeArc;
}
// ok for constructing
#[allow(clippy::disallowed_types)]
pub fn new_rc<T>(value: T) -> MaybeArc<T> {
MaybeArc::new(value)
}

View file

@ -0,0 +1,3 @@
{
"name": "@foo/bar"
}

1
libs/config/testdata/deno.json vendored Normal file
View file

@ -0,0 +1 @@
not a json file

View file

@ -0,0 +1,20 @@
{
"fmt": {
"files": {
"include": [
"./subdir/"
],
"exclude": [
"./subdir/b.ts"
]
},
"options": {
"useTabs": true,
"lineWidth": 40,
"indentWidth": 8,
"singleQuote": true,
"proseWrap": "always",
"semiColons": false
}
}
}

View file

@ -0,0 +1,16 @@
{
"fmt": {
"include": [
"./subdir/"
],
"exclude": [
"./subdir/b.ts"
],
"useTabs": true,
"lineWidth": 40,
"indentWidth": 8,
"singleQuote": true,
"proseWrap": "always",
"semiColons": false
}
}

View file

@ -0,0 +1,46 @@
Deno.test(
{ perms: { net: true } },
async function responseClone() {
const response =
await fetch(
'http://localhost:4545/assets/fixture.json',
)
const response1 =
response.clone()
assert(
response !==
response1,
)
assertEquals(
response.status,
response1
.status,
)
assertEquals(
response.statusText,
response1
.statusText,
)
const u8a =
new Uint8Array(
await response
.arrayBuffer(),
)
const u8a1 =
new Uint8Array(
await response1
.arrayBuffer(),
)
for (
let i = 0;
i <
u8a.byteLength;
i++
) {
assertEquals(
u8a[i],
u8a1[i],
)
}
},
)

View file

@ -0,0 +1,15 @@
// This file should be excluded from formatting
Deno.test(
{ perms: { net: true } },
async function fetchBodyUsedCancelStream() {
const response = await fetch(
"http://localhost:4545/assets/fixture.json",
);
assert(response.body !== null);
assertEquals(response.bodyUsed, false);
const promise = response.body.cancel();
assertEquals(response.bodyUsed, true);
await promise;
},
);

View file

@ -0,0 +1,17 @@
## Permissions
Deno is secure by default. Therefore,
unless you specifically enable it, a
program run with Deno has no file,
network, or environment access. Access
to security sensitive functionality
requires that permisisons have been
granted to an executing script through
command line flags, or a runtime
permission prompt.
For the following example `mod.ts` has
been granted read-only access to the
file system. It cannot write to the file
system, or perform any other security
sensitive functions.

View file

@ -0,0 +1,6 @@
{
"compilerOptions": {
"target": "ES5",
"jsx": "preserve"
}
}

32
libs/config/util.rs Normal file
View file

@ -0,0 +1,32 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub fn is_skippable_io_error(e: &std::io::Error) -> bool {
use std::io::ErrorKind::*;
// skip over invalid filenames on windows
const ERROR_INVALID_NAME: i32 = 123;
if cfg!(windows) && e.raw_os_error() == Some(ERROR_INVALID_NAME) {
return true;
}
match e.kind() {
InvalidInput | PermissionDenied | NotFound => {
// ok keep going
true
}
_ => {
const NOT_A_DIRECTORY: i32 = 20;
cfg!(unix) && e.raw_os_error() == Some(NOT_A_DIRECTORY)
}
}
}
#[cfg(test)]
mod tests {
#[cfg(windows)]
#[test]
fn is_skippable_io_error_win_invalid_filename() {
let error = std::io::Error::from_raw_os_error(123);
assert!(super::is_skippable_io_error(&error));
}
}

File diff suppressed because it is too large Load diff

6363
libs/config/workspace/mod.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
// Copyright 2018-2025 the Deno authors. All rights reserved. MIT license.
import { decodeBase64 } from "@std/encoding/base64";
import { assertEquals } from "@std/assert";

View file

@ -43,6 +43,7 @@ export async function checkCopyright() {
":!:tests/testdata/**",
":!:tests/unit_node/testdata/**",
":!:tests/wpt/suite/**",
":!:libs/config/testdata/**",
// rust
"*.rs",