refactor: add deno_npm_installer crate (#29319)

More changes/improvements will following in follow-up PRs.
This commit is contained in:
David Sherret 2025-05-16 18:11:05 -04:00 committed by GitHub
parent 2b0d44d8da
commit c4412ffb13
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 3834 additions and 3917 deletions

71
Cargo.lock generated
View file

@ -1473,6 +1473,7 @@ dependencies = [
"deno_media_type",
"deno_npm",
"deno_npm_cache",
"deno_npm_installer",
"deno_package_json",
"deno_panic",
"deno_path_util",
@ -2137,6 +2138,7 @@ dependencies = [
"deno_media_type",
"deno_node",
"deno_npm",
"deno_npm_installer",
"deno_path_util",
"deno_resolver",
"deno_runtime",
@ -2345,9 +2347,9 @@ dependencies = [
[[package]]
name = "deno_npm"
version = "0.33.3"
version = "0.33.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9cf5aab0fbd2e68c022fef8981a92c4e4b0fcec341c08af1ebb3651f03cd86b"
checksum = "e5eeb448f883bd522905d10e890c04815e9167282b0cb91ad4bf04b998c2d805"
dependencies = [
"async-trait",
"capacity_builder",
@ -2394,6 +2396,45 @@ dependencies = [
"url",
]
[[package]]
name = "deno_npm_installer"
version = "0.0.1"
dependencies = [
"anyhow",
"async-trait",
"bincode",
"boxed_error",
"capacity_builder",
"deno_config",
"deno_error",
"deno_lockfile",
"deno_npm",
"deno_npm_cache",
"deno_package_json",
"deno_path_util",
"deno_resolver",
"deno_semver",
"deno_terminal 0.2.2",
"deno_unsync",
"fs3",
"futures",
"junction",
"log",
"parking_lot",
"pathdiff",
"rustc-hash 2.1.1",
"serde",
"serde_json",
"sys_traits",
"test_server",
"thiserror 2.0.12",
"tokio",
"tokio-util",
"twox-hash",
"url",
"winapi",
]
[[package]]
name = "deno_ops"
version = "0.223.0"
@ -2461,9 +2502,9 @@ dependencies = [
[[package]]
name = "deno_path_util"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c238a664a0a6f1ce0ff2b73c6854811526d00f442a12f878cb8555b23fe13aa3"
checksum = "b8850326ea9cb786aafd938f3de9866432904c0bae3aa0139a7a4e570b0174f6"
dependencies = [
"deno_error",
"percent-encoding",
@ -2533,6 +2574,7 @@ dependencies = [
"deno_config",
"deno_error",
"deno_graph",
"deno_lockfile",
"deno_media_type",
"deno_npm",
"deno_package_json",
@ -2540,6 +2582,7 @@ dependencies = [
"deno_semver",
"deno_terminal 0.2.2",
"deno_unsync",
"dissimilar",
"futures",
"import_map",
"indexmap 2.8.0",
@ -2751,9 +2794,9 @@ dependencies = [
[[package]]
name = "deno_unsync"
version = "0.4.2"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d774fd83f26b24f0805a6ab8b26834a0d06ceac0db517b769b1e4633c96a2057"
checksum = "47c618b51088b3ac67f15c69b3ed7620ba3a7d495e5a090186df9424b5ab623e"
dependencies = [
"futures",
"parking_lot",
@ -8348,9 +8391,9 @@ dependencies = [
[[package]]
name = "sys_traits"
version = "0.1.9"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3374191d43a934854e99a46cd47f8124369e690353e0f8db42769218d083690"
checksum = "ce4475783d109dc026244ec6fda72b81868f502db08d27ae1b39419f67f40d71"
dependencies = [
"filetime",
"getrandom",
@ -8358,9 +8401,21 @@ dependencies = [
"parking_lot",
"serde",
"serde_json",
"sys_traits_macros",
"windows-sys 0.59.0",
]
[[package]]
name = "sys_traits_macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "181f22127402abcf8ee5c83ccd5b408933fec36a6095cf82cda545634692657e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.87",
]
[[package]]
name = "tagptr"
version = "0.2.0"

View file

@ -35,6 +35,7 @@ members = [
"resolvers/deno",
"resolvers/node",
"resolvers/npm_cache",
"resolvers/npm_installer",
"runtime",
"runtime/features",
"runtime/permissions",
@ -65,13 +66,13 @@ deno_lint = "=0.75.0"
deno_lockfile = "=0.28.0"
deno_media_type = { version = "=0.2.8", features = ["module_specifier"] }
deno_native_certs = "0.3.0"
deno_npm = "=0.33.3"
deno_npm = "=0.33.4"
deno_package_json = { version = "=0.6.0", default-features = false }
deno_path_util = "=0.3.2"
deno_path_util = "=0.3.3"
deno_semver = "=0.7.1"
deno_task_shell = "=0.23.0"
deno_terminal = "=0.2.2"
deno_unsync = "0.4.2"
deno_unsync = "0.4.3"
deno_whoami = "0.1.0"
denokv_proto = "0.10.0"
@ -112,6 +113,7 @@ deno_bench_util = { version = "0.199.0", path = "./bench_util" }
deno_features = { version = "0.2.0", path = "./runtime/features" }
deno_lib = { version = "0.22.0", path = "./cli/lib" }
deno_npm_cache = { version = "0.24.0", path = "./resolvers/npm_cache" }
deno_npm_installer = { version = "0.0.1", path = "./resolvers/npm_installer" }
deno_permissions = { version = "0.64.0", path = "./runtime/permissions" }
deno_resolver = { version = "0.36.0", path = "./resolvers/deno" }
deno_runtime = { version = "0.213.0", path = "./runtime" }
@ -234,7 +236,7 @@ simd-json = "0.14.0"
slab = "0.4"
smallvec = "1.8"
socket2 = { version = "0.5.3", features = ["all"] }
sys_traits = "=0.1.9"
sys_traits = "=0.1.10"
tar = "=0.4.43"
tempfile = "3.4.0"
termcolor = "1.1.3"

View file

@ -82,6 +82,7 @@ deno_lockfile.workspace = true
deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] }
deno_npm.workspace = true
deno_npm_cache.workspace = true
deno_npm_installer = { workspace = true }
deno_package_json = { workspace = true, features = ["sync"] }
deno_panic = { version = "0.1.0", optional = true }
deno_path_util.workspace = true

View file

@ -1,6 +1,5 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::HashSet;
use std::sync::Arc;
use deno_ast::SourceMapOption;
@ -17,108 +16,11 @@ use deno_core::unsync::sync::AtomicFlag;
use deno_core::url::Url;
use deno_lib::util::hash::FastInsecureHasher;
use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use once_cell::sync::OnceCell;
use crate::sys::CliSys;
use crate::util::collections::FolderScopedMap;
pub fn import_map_deps(
import_map: &serde_json::Value,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(import_map.get("imports"))
.into_iter()
.chain(scope_values(import_map.get("scopes")));
values_to_set(values)
}
pub fn deno_json_deps(
config: &deno_config::deno_json::ConfigFile,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(config.json.imports.as_ref())
.into_iter()
.chain(scope_values(config.json.scopes.as_ref()));
let mut set = values_to_set(values);
if let Some(serde_json::Value::Object(compiler_options)) =
&config.json.compiler_options
{
// add jsxImportSource
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSource")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add jsxImportSourceTypes
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSourceTypes")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add the dependencies in the types array
if let Some(serde_json::Value::Array(types)) = compiler_options.get("types")
{
for value in types {
if let serde_json::Value::String(value) = value {
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
}
}
}
set
}
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
let mut items = Vec::with_capacity(obj.len());
for value in obj.values() {
if let serde_json::Value::String(value) = value {
items.push(value);
}
}
items
}
fn scope_values(value: Option<&serde_json::Value>) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
obj.values().flat_map(|v| imports_values(Some(v))).collect()
}
fn values_to_set<'a>(
values: impl Iterator<Item = &'a String>,
) -> HashSet<JsrDepPackageReq> {
let mut entries = HashSet::new();
for value in values {
if let Some(dep_req) = value_to_dep_req(value) {
entries.insert(dep_req);
}
}
entries
}
fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::jsr(req_ref.into_inner().req))
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::npm(req_ref.into_inner().req))
} else {
None
}
}
fn check_warn_tsconfig(
ts_config: &TsConfigWithIgnoredOptions,
logged_warnings: &LoggedWarnings,

View file

@ -35,6 +35,7 @@ use deno_lib::args::CaData;
use deno_lib::args::UnstableConfig;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::NpmSystemInfo;
use deno_npm_installer::PackagesAllowedScripts;
use deno_path_util::normalize_path;
use deno_path_util::url_to_file_path;
use deno_runtime::deno_permissions::SysDescriptor;
@ -623,25 +624,6 @@ impl Default for TypeCheckMode {
}
}
// Info needed to run NPM lifecycle scripts
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct LifecycleScriptsConfig {
pub allowed: PackagesAllowedScripts,
pub initial_cwd: PathBuf,
pub root_dir: PathBuf,
/// Part of an explicit `deno install`
pub explicit_install: bool,
}
#[derive(Debug, Clone, Eq, PartialEq, Default)]
/// The set of npm packages that are allowed to run lifecycle scripts.
pub enum PackagesAllowedScripts {
All,
Some(Vec<String>),
#[default]
None,
}
fn parse_packages_allowed_scripts(s: &str) -> Result<String, AnyError> {
if !s.starts_with("npm:") {
bail!("Invalid package for --allow-scripts: '{}'. An 'npm:' specifier is required", s);

View file

@ -3,8 +3,6 @@
pub mod deno_json;
mod flags;
mod flags_net;
mod lockfile;
mod package_json;
use std::borrow::Cow;
use std::collections::HashMap;
@ -47,6 +45,7 @@ use deno_lib::args::NPM_PROCESS_STATE;
use deno_lib::version::DENO_VERSION_INFO;
use deno_lib::worker::StorageKeyResolver;
use deno_npm::NpmSystemInfo;
use deno_npm_installer::LifecycleScriptsConfig;
use deno_resolver::factory::resolve_jsr_url;
use deno_runtime::deno_permissions::PermissionsOptions;
use deno_runtime::inspector_server::InspectorServer;
@ -56,17 +55,14 @@ use deno_telemetry::OtelConfig;
use deno_terminal::colors;
use dotenvy::from_filename;
pub use flags::*;
pub use lockfile::AtomicWriteFileWithRetriesError;
pub use lockfile::CliLockfile;
pub use lockfile::CliLockfileReadFromPathOptions;
use once_cell::sync::Lazy;
pub use package_json::NpmInstallDepsProvider;
pub use package_json::PackageJsonDepValueParseWithLocationError;
use sys_traits::FsRead;
use thiserror::Error;
use crate::sys::CliSys;
pub type CliLockfile = deno_resolver::lockfile::LockfileLock<CliSys>;
pub fn jsr_url() -> &'static Url {
static JSR_URL: Lazy<Url> = Lazy::new(|| resolve_jsr_url(&CliSys::default()));

View file

@ -21,7 +21,6 @@ use deno_error::JsErrorBox;
use deno_lib::args::get_root_cert_store;
use deno_lib::args::resolve_npm_resolution_snapshot;
use deno_lib::args::CaData;
use deno_lib::args::NpmProcessStateKind;
use deno_lib::args::NPM_PROCESS_STATE;
use deno_lib::loader::NpmModuleLoader;
use deno_lib::npm::create_npm_process_state_provider;
@ -32,6 +31,11 @@ use deno_lib::worker::LibMainWorkerOptions;
use deno_lib::worker::LibWorkerFactoryRoots;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm_cache::NpmCacheSetting;
use deno_npm_installer::initializer::NpmResolverManagedSnapshotOption;
use deno_npm_installer::lifecycle_scripts::LifecycleScriptsExecutor;
use deno_npm_installer::lifecycle_scripts::NullLifecycleScriptsExecutor;
use deno_npm_installer::package_json::NpmInstallDepsProvider;
use deno_npm_installer::process_state::NpmProcessStateKind;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::factory::ConfigDiscoveryOption;
use deno_resolver::factory::DenoDirPathProviderOptions;
@ -40,6 +44,7 @@ use deno_resolver::factory::ResolverFactoryOptions;
use deno_resolver::factory::SpecifiedImportMapProvider;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_resolver::workspace::WorkspaceResolver;
use deno_runtime::deno_fs;
use deno_runtime::deno_fs::RealFs;
@ -63,7 +68,7 @@ use crate::args::CliOptions;
use crate::args::ConfigFlag;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::NpmInstallDepsProvider;
use crate::args::InstallFlags;
use crate::args::WorkspaceExternalImportMapLoader;
use crate::cache::Caches;
use crate::cache::CodeCache;
@ -90,19 +95,15 @@ use crate::node::CliCjsModuleExportAnalyzer;
use crate::node::CliNodeCodeTranslator;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::installer::DenoTaskLifeCycleScriptsExecutor;
use crate::npm::installer::LifecycleScriptsExecutor;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::installer::NullLifecycleScriptsExecutor;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolutionInitializer;
use crate::npm::CliNpmResolutionInstaller;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::CliNpmTarballCache;
use crate::npm::NpmResolutionInitializer;
use crate::npm::WorkspaceNpmPatchPackages;
use crate::npm::DenoTaskLifeCycleScriptsExecutor;
use crate::resolver::on_resolve_diagnostic;
use crate::resolver::CliCjsTracker;
use crate::resolver::CliNpmGraphResolver;
@ -329,10 +330,10 @@ struct CliFactoryServices {
npm_cache: Deferred<Arc<CliNpmCache>>,
npm_cache_http_client: Deferred<Arc<CliNpmCacheHttpClient>>,
npm_graph_resolver: Deferred<Arc<CliNpmGraphResolver>>,
npm_installer: Deferred<Arc<NpmInstaller>>,
npm_installer: Deferred<Arc<CliNpmInstaller>>,
npm_registry_info_provider: Deferred<Arc<CliNpmRegistryInfoProvider>>,
npm_resolution_initializer: Deferred<Arc<NpmResolutionInitializer>>,
npm_resolution_installer: Deferred<Arc<NpmResolutionInstaller>>,
npm_resolution_initializer: Deferred<Arc<CliNpmResolutionInitializer>>,
npm_resolution_installer: Deferred<Arc<CliNpmResolutionInstaller>>,
npm_tarball_cache: Deferred<Arc<CliNpmTarballCache>>,
parsed_source_cache: Deferred<Arc<ParsedSourceCache>>,
permission_desc_parser:
@ -407,8 +408,25 @@ impl CliFactory {
let adapter = self.lockfile_npm_package_info_provider()?;
let maybe_lock_file = CliLockfile::discover(
&self.sys(),
&self.flags,
self.sys(),
deno_resolver::lockfile::LockfileFlags {
no_lock: self.flags.no_lock
|| matches!(
self.flags.subcommand,
DenoSubcommand::Install(InstallFlags::Global(..))
| DenoSubcommand::Uninstall(_)
),
frozen_lockfile: self.flags.frozen_lockfile,
lock: self
.flags
.lock
.as_ref()
.map(|p| workspace_factory.initial_cwd().join(p)),
skip_write: self.flags.internal.lockfile_skip_write,
no_config: self.flags.config_flag
== crate::args::ConfigFlag::Disabled,
no_npm: self.flags.no_npm,
},
&workspace_directory.workspace,
maybe_external_import_map.as_ref().map(|v| &v.value),
&adapter,
@ -628,7 +646,7 @@ impl CliFactory {
pub async fn npm_installer_if_managed(
&self,
) -> Result<Option<&Arc<NpmInstaller>>, AnyError> {
) -> Result<Option<&Arc<CliNpmInstaller>>, AnyError> {
if self.resolver_factory()?.use_byonm()? || self.cli_options()?.no_npm() {
Ok(None)
} else {
@ -636,7 +654,7 @@ impl CliFactory {
}
}
pub async fn npm_installer(&self) -> Result<&Arc<NpmInstaller>, AnyError> {
pub async fn npm_installer(&self) -> Result<&Arc<CliNpmInstaller>, AnyError> {
self
.services
.npm_installer
@ -650,7 +668,7 @@ impl CliFactory {
let workspace_npm_patch_packages =
self.workspace_npm_patch_packages()?;
let npm_resolver = self.npm_resolver().await?.clone();
Ok(Arc::new(NpmInstaller::new(
Ok(Arc::new(CliNpmInstaller::new(
match npm_resolver.as_managed() {
Some(managed_npm_resolver) => {
Arc::new(DenoTaskLifeCycleScriptsExecutor::new(
@ -708,25 +726,25 @@ impl CliFactory {
pub async fn npm_resolution_initializer(
&self,
) -> Result<&Arc<NpmResolutionInitializer>, AnyError> {
) -> Result<&Arc<CliNpmResolutionInitializer>, AnyError> {
self
.services
.npm_resolution_initializer
.get_or_try_init_async(async move {
Ok(Arc::new(NpmResolutionInitializer::new(
Ok(Arc::new(CliNpmResolutionInitializer::new(
self.npm_resolution()?.clone(),
self.workspace_npm_patch_packages()?.clone(),
match resolve_npm_resolution_snapshot()? {
Some(snapshot) => {
CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot))
NpmResolverManagedSnapshotOption::Specified(Some(snapshot))
}
None => match self.maybe_lockfile().await? {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
NpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
None => NpmResolverManagedSnapshotOption::Specified(None),
},
},
)))
@ -736,12 +754,12 @@ impl CliFactory {
pub async fn npm_resolution_installer(
&self,
) -> Result<&Arc<NpmResolutionInstaller>, AnyError> {
) -> Result<&Arc<CliNpmResolutionInstaller>, AnyError> {
self
.services
.npm_resolution_installer
.get_or_try_init_async(async move {
Ok(Arc::new(NpmResolutionInstaller::new(
Ok(Arc::new(CliNpmResolutionInstaller::new(
self.npm_registry_info_provider()?.clone(),
self.npm_resolution()?.clone(),
self.maybe_lockfile().await?.cloned(),

View file

@ -42,7 +42,7 @@ pub struct MainModuleGraphContainer {
// Allow only one request to update the graph data at a time,
// but allow other requests to read from it at any time even
// while another request is updating the data.
update_queue: Arc<crate::util::sync::TaskQueue>,
update_queue: Arc<deno_core::unsync::sync::TaskQueue>,
inner: Arc<RwLock<Arc<ModuleGraph>>>,
cli_options: Arc<CliOptions>,
module_load_preparer: Arc<ModuleLoadPreparer>,
@ -154,7 +154,7 @@ impl ModuleGraphContainer for MainModuleGraphContainer {
/// everything looks fine, calling `.commit()` will store the
/// new graph in the ModuleGraphContainer.
pub struct MainModuleGraphUpdatePermit<'a> {
permit: crate::util::sync::TaskQueuePermit<'a>,
permit: deno_core::unsync::sync::TaskQueuePermit<'a>,
inner: Arc<RwLock<Arc<ModuleGraph>>>,
graph: ModuleGraph,
}

View file

@ -31,6 +31,7 @@ use deno_graph::ModuleLoadError;
use deno_graph::ResolutionError;
use deno_graph::SpecifierError;
use deno_graph::WorkspaceFastCheckOption;
use deno_npm_installer::PackageCaching;
use deno_path_util::url_to_file_path;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::workspace::sloppy_imports_resolve;
@ -56,8 +57,7 @@ use crate::cache::ModuleInfoCache;
use crate::cache::ParsedSourceCache;
use crate::colors;
use crate::file_fetcher::CliFileFetcher;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::resolver::CliCjsTracker;
use crate::resolver::CliNpmGraphResolver;
@ -628,7 +628,7 @@ pub struct ModuleGraphBuilder {
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,
@ -650,7 +650,7 @@ impl ModuleGraphBuilder {
maybe_file_watcher_reporter: Option<FileWatcherReporter>,
module_info_cache: Arc<ModuleInfoCache>,
npm_graph_resolver: Arc<CliNpmGraphResolver>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver,
parsed_source_cache: Arc<ParsedSourceCache>,
resolver: Arc<CliResolver>,

View file

@ -20,6 +20,7 @@ deno_fs = { workspace = true, features = ["sync_fs"] }
deno_media_type.workspace = true
deno_node = { workspace = true, features = ["sync_fs"] }
deno_npm.workspace = true
deno_npm_installer.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_runtime.workspace = true

View file

@ -10,6 +10,8 @@ use std::sync::LazyLock;
use deno_npm::resolution::PackageIdNotFoundError;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm_installer::process_state::NpmProcessState;
use deno_npm_installer::process_state::NpmProcessStateKind;
use deno_runtime::colors;
use deno_runtime::deno_tls::deno_native_certs::load_native_certs;
use deno_runtime::deno_tls::rustls;
@ -153,19 +155,6 @@ pub fn get_root_cert_store(
Ok(root_cert_store)
}
/// State provided to the process via an environment variable.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
pub static NPM_PROCESS_STATE: LazyLock<Option<NpmProcessState>> =
LazyLock::new(|| {
/// Allows for passing either a file descriptor or file path.

View file

@ -2,10 +2,10 @@
mod permission_checker;
use std::path::Path;
use std::sync::Arc;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_npm_installer::process_state::NpmProcessState;
use deno_npm_installer::process_state::NpmProcessStateKind;
use deno_resolver::npm::ByonmNpmResolver;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_resolver::npm::NpmResolver;
@ -14,8 +14,6 @@ use deno_runtime::deno_process::NpmProcessStateProviderRc;
pub use permission_checker::NpmRegistryReadPermissionChecker;
pub use permission_checker::NpmRegistryReadPermissionCheckerMode;
use crate::args::NpmProcessState;
use crate::args::NpmProcessStateKind;
use crate::sys::DenoLibSys;
pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
@ -31,18 +29,6 @@ pub fn create_npm_process_state_provider<TSys: DenoLibSys>(
}
}
pub fn npm_process_state(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> String {
serde_json::to_string(&NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
#[derive(Debug)]
pub struct ManagedNpmProcessStateProvider<TSys: DenoLibSys>(
pub ManagedNpmResolverRc<TSys>,
@ -52,10 +38,11 @@ impl<TSys: DenoLibSys> NpmProcessStateProvider
for ManagedNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
npm_process_state(
NpmProcessState::new_managed(
self.0.resolution().serialized_valid_snapshot(),
self.0.root_node_modules_path(),
)
.as_serialized()
}
}
@ -68,13 +55,13 @@ impl<TSys: DenoLibSys> NpmProcessStateProvider
for ByonmNpmProcessStateProvider<TSys>
{
fn get_npm_process_state(&self) -> String {
serde_json::to_string(&NpmProcessState {
NpmProcessState {
kind: NpmProcessStateKind::Byonm,
local_node_modules_path: self
.0
.root_node_modules_path()
.map(|p| p.to_string_lossy().to_string()),
})
.unwrap()
}
.as_serialized()
}
}

View file

@ -12,6 +12,7 @@ use sys_traits::FsRename;
use sys_traits::SystemRandom;
use sys_traits::ThreadSleep;
#[sys_traits::auto_impl]
pub trait DenoLibSys:
FsCanonicalize
+ FsCreateDirAll
@ -31,24 +32,3 @@ pub trait DenoLibSys:
+ 'static
{
}
impl<
T: FsCanonicalize
+ FsCreateDirAll
+ FsReadDir
+ FsMetadata
+ FsOpen
+ FsRemoveFile
+ FsRename
+ FsRead
+ ThreadSleep
+ SystemRandom
+ ExtNodeSys
+ Clone
+ Send
+ Sync
+ std::fmt::Debug
+ 'static,
> DenoLibSys for T
{
}

View file

@ -44,6 +44,7 @@ use deno_lint::linter::LintConfig as DenoLintConfig;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_package_json::PackageJsonCache;
use deno_path_util::url_to_file_path;
use deno_resolver::lockfile::LockfileReadFromPathOptions;
use deno_resolver::npmrc::discover_npmrc_from_workspace;
use deno_resolver::workspace::CreateResolverOptions;
use deno_resolver::workspace::FsCacheOptions;
@ -62,7 +63,6 @@ use super::lsp_custom;
use super::urls::uri_to_url;
use super::urls::url_to_uri;
use crate::args::CliLockfile;
use crate::args::CliLockfileReadFromPathOptions;
use crate::args::ConfigFile;
use crate::args::LintFlags;
use crate::args::LintOptions;
@ -2144,8 +2144,8 @@ async fn resolve_lockfile_from_path(
>,
) -> Option<CliLockfile> {
match CliLockfile::read_from_path(
&CliSys::default(),
CliLockfileReadFromPathOptions {
CliSys::default(),
LockfileReadFromPathOptions {
file_path: lockfile_path,
frozen,
skip_write: false,

View file

@ -20,16 +20,24 @@ use deno_graph::ModuleSpecifier;
use deno_graph::Range;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::TarballCache;
use deno_npm_installer::initializer::NpmResolutionInitializer;
use deno_npm_installer::initializer::NpmResolverManagedSnapshotOption;
use deno_npm_installer::lifecycle_scripts::NullLifecycleScriptsExecutor;
use deno_npm_installer::package_json::NpmInstallDepsProvider;
use deno_npm_installer::resolution::NpmResolutionInstaller;
use deno_npm_installer::LifecycleScriptsConfig;
use deno_path_util::url_to_file_path;
use deno_resolver::cjs::IsCjsResolutionMode;
use deno_resolver::graph::FoundPackageJsonDepFlag;
use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::npm::CreateInNpmPkgCheckerOptions;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_resolver::npm::NpmReqResolverOptions;
use deno_resolver::npmrc::create_default_npmrc;
use deno_resolver::workspace::PackageJsonDepResolution;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_resolver::workspace::WorkspaceResolver;
use deno_resolver::DenoResolverOptions;
use deno_resolver::NodeAndNpmReqResolver;
@ -53,8 +61,6 @@ use super::cache::LspCache;
use super::documents::DocumentModule;
use super::jsr::JsrCacheResolver;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::factory::Deferred;
use crate::graph_util::CliJsrUrlProvider;
use crate::http_util::HttpClientProvider;
@ -63,20 +69,14 @@ use crate::lsp::config::ConfigData;
use crate::lsp::logging::lsp_warn;
use crate::node::CliNodeResolver;
use crate::node::CliPackageJsonResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::NpmResolutionInstaller;
use crate::npm::installer::NullLifecycleScriptsExecutor;
use crate::npm::CliByonmNpmResolverCreateOptions;
use crate::npm::CliManagedNpmResolver;
use crate::npm::CliManagedNpmResolverCreateOptions;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmCacheHttpClient;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::CliNpmResolver;
use crate::npm::CliNpmResolverCreateOptions;
use crate::npm::CliNpmResolverManagedSnapshotOption;
use crate::npm::NpmResolutionInitializer;
use crate::npm::WorkspaceNpmPatchPackages;
use crate::resolver::on_resolve_diagnostic;
use crate::resolver::CliIsCjsResolver;
use crate::resolver::CliNpmReqResolver;
@ -92,7 +92,7 @@ pub struct LspScopedResolver {
in_npm_pkg_checker: DenoInNpmPackageChecker,
is_cjs_resolver: Arc<CliIsCjsResolver>,
jsr_resolver: Option<Arc<JsrCacheResolver>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_installer_reqs: Arc<Mutex<BTreeSet<PackageReq>>>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolver: Option<CliNpmResolver>,
@ -251,7 +251,7 @@ impl LspScopedResolver {
managed_npm_resolver.global_cache_root_path().to_path_buf(),
npmrc.get_all_known_registries_urls(),
));
CliManagedNpmResolverCreateOptions {
ManagedNpmResolverCreateOptions {
sys,
npm_cache_dir,
maybe_node_modules_path: managed_npm_resolver
@ -750,7 +750,7 @@ struct ResolverFactoryServices {
in_npm_pkg_checker: Deferred<DenoInNpmPackageChecker>,
is_cjs_resolver: Deferred<Arc<CliIsCjsResolver>>,
node_resolver: Deferred<Option<Arc<CliNodeResolver>>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_pkg_req_resolver: Deferred<Option<Arc<CliNpmReqResolver>>>,
npm_resolver: Option<CliNpmResolver>,
npm_resolution: Arc<NpmResolutionCell>,
@ -852,11 +852,11 @@ impl<'a> ResolverFactory<'a> {
patch_packages.clone(),
match self.config_data.and_then(|d| d.lockfile.as_ref()) {
Some(lockfile) => {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(
NpmResolverManagedSnapshotOption::ResolveFromLockfile(
lockfile.clone(),
)
}
None => CliNpmResolverManagedSnapshotOption::Specified(None),
None => NpmResolverManagedSnapshotOption::Specified(None),
},
));
// Don't provide the lockfile. We don't want these resolvers
@ -876,7 +876,7 @@ impl<'a> ResolverFactory<'a> {
maybe_lockfile.clone(),
patch_packages.clone(),
));
let npm_installer = Arc::new(NpmInstaller::new(
let npm_installer = Arc::new(CliNpmInstaller::new(
Arc::new(NullLifecycleScriptsExecutor),
npm_cache.clone(),
Arc::new(NpmInstallDepsProvider::empty()),
@ -898,7 +898,7 @@ impl<'a> ResolverFactory<'a> {
log::warn!("failed to initialize npm resolution: {}", err);
}
CliNpmResolverCreateOptions::Managed(CliManagedNpmResolverCreateOptions {
CliNpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions {
sys: CliSys::default(),
npm_cache_dir,
maybe_node_modules_path,
@ -910,7 +910,7 @@ impl<'a> ResolverFactory<'a> {
self.set_npm_resolver(CliNpmResolver::new(options));
}
pub fn set_npm_installer(&mut self, npm_installer: Arc<NpmInstaller>) {
pub fn set_npm_installer(&mut self, npm_installer: Arc<CliNpmInstaller>) {
self.services.npm_installer = Some(npm_installer);
}
@ -972,7 +972,7 @@ impl<'a> ResolverFactory<'a> {
})
}
pub fn npm_installer(&self) -> Option<&Arc<NpmInstaller>> {
pub fn npm_installer(&self) -> Option<&Arc<CliNpmInstaller>> {
self.services.npm_installer.as_ref()
}
@ -1257,7 +1257,7 @@ impl RedirectResolver {
}
type AddNpmReqsRequest = (
Arc<NpmInstaller>,
Arc<CliNpmInstaller>,
Vec<PackageReq>,
std::sync::mpsc::Sender<Result<(), JsErrorBox>>,
);
@ -1292,7 +1292,7 @@ impl AddNpmReqsThread {
pub fn add_npm_reqs(
&self,
npm_installer: Arc<NpmInstaller>,
npm_installer: Arc<CliNpmInstaller>,
reqs: Vec<PackageReq>,
) -> Result<(), JsErrorBox> {
let request_tx = self.request_tx.as_ref().unwrap();

View file

@ -118,9 +118,7 @@ pub enum PrepareModuleLoadError {
Check(#[from] CheckError),
#[class(inherit)]
#[error(transparent)]
AtomicWriteFileWithRetries(
#[from] crate::args::AtomicWriteFileWithRetriesError,
),
LockfileWrite(#[from] deno_resolver::lockfile::LockfileWriteError),
#[class(inherit)]
#[error(transparent)]
Other(#[from] JsErrorBox),

666
cli/npm.rs Normal file
View file

@ -0,0 +1,666 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use dashmap::DashMap;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::TryStreamExt;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm::resolution::NpmRegistryDefaultTarballUrlProvider;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmResolutionPackage;
use deno_npm_cache::NpmCacheHttpClientBytesResponse;
use deno_npm_cache::NpmCacheHttpClientResponse;
use deno_npm_installer::lifecycle_scripts::is_broken_default_install_script;
use deno_npm_installer::lifecycle_scripts::LifecycleScriptsExecutor;
use deno_npm_installer::lifecycle_scripts::LifecycleScriptsExecutorOptions;
use deno_npm_installer::lifecycle_scripts::PackageWithScript;
use deno_npm_installer::lifecycle_scripts::LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR;
use deno_npm_installer::BinEntries;
use deno_npm_installer::CachedNpmPackageExtraInfoProvider;
use deno_npm_installer::ExpectedExtraInfo;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_runtime::deno_io::FromRawIoHandle;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_task_shell::KillSignal;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::sys::CliSys;
use crate::task_runner::TaskStdio;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressMessagePrompt;
pub type CliNpmInstaller =
deno_npm_installer::NpmInstaller<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmTarballCache =
deno_npm_cache::TarballCache<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmCache = deno_npm_cache::NpmCache<CliSys>;
pub type CliNpmRegistryInfoProvider =
deno_npm_cache::RegistryInfoProvider<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmResolver = deno_resolver::npm::NpmResolver<CliSys>;
pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver<CliSys>;
pub type CliNpmResolverCreateOptions =
deno_resolver::npm::NpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
pub type CliNpmResolutionInitializer =
deno_npm_installer::initializer::NpmResolutionInitializer<CliSys>;
pub type CliNpmResolutionInstaller =
deno_npm_installer::resolution::NpmResolutionInstaller<
CliNpmCacheHttpClient,
CliSys,
>;
pub struct NpmPackageInfoApiAdapter {
api: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
}
impl NpmPackageInfoApiAdapter {
pub fn new(
api: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
api,
workspace_patch_packages,
}
}
}
#[async_trait::async_trait(?Send)]
impl deno_lockfile::NpmPackageInfoProvider for NpmPackageInfoApiAdapter {
async fn get_npm_package_info(
&self,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let package_infos =
get_infos(&*self.api, &self.workspace_patch_packages, values).await;
match package_infos {
Ok(package_infos) => Ok(package_infos),
Err(err) => {
if self.api.mark_force_reload() {
get_infos(&*self.api, &self.workspace_patch_packages, values).await
} else {
Err(err)
}
}
}
}
}
async fn get_infos(
info_provider: &(dyn NpmRegistryApi + Send + Sync),
workspace_patch_packages: &WorkspaceNpmPatchPackages,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let futs = values
.iter()
.map(|v| async move {
let info = info_provider.package_info(v.name.as_str()).await?;
let version_info = info.version_info(v, &workspace_patch_packages.0)?;
Ok::<_, Box<dyn std::error::Error + Send + Sync>>(
deno_lockfile::Lockfile5NpmInfo {
tarball_url: version_info.dist.as_ref().and_then(|d| {
let tarball_url_provider = NpmRegistryDefaultTarballUrlProvider;
if d.tarball == tarball_url_provider.default_tarball_url(v) {
None
} else {
Some(d.tarball.clone())
}
}),
optional_dependencies: version_info
.optional_dependencies
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect::<std::collections::BTreeMap<_, _>>(),
cpu: version_info.cpu.iter().map(|s| s.to_string()).collect(),
os: version_info.os.iter().map(|s| s.to_string()).collect(),
deprecated: version_info.deprecated.is_some(),
bin: version_info.bin.is_some(),
scripts: version_info.scripts.contains_key("preinstall")
|| version_info.scripts.contains_key("install")
|| version_info.scripts.contains_key("postinstall"),
optional_peers: version_info
.peer_dependencies_meta
.iter()
.filter_map(|(k, v)| {
if v.optional {
version_info
.peer_dependencies
.get(k)
.map(|v| (k.to_string(), v.to_string()))
} else {
None
}
})
.collect::<std::collections::BTreeMap<_, _>>(),
},
)
})
.collect::<FuturesOrdered<_>>();
let package_infos = futs.try_collect::<Vec<_>>().await?;
Ok(package_infos)
}
#[derive(Debug)]
pub struct CliNpmCacheHttpClient {
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
}
impl CliNpmCacheHttpClient {
pub fn new(
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
) -> Self {
Self {
http_client_provider,
progress_bar,
}
}
}
#[async_trait::async_trait(?Send)]
impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth: Option<String>,
maybe_etag: Option<String>,
) -> Result<NpmCacheHttpClientResponse, deno_npm_cache::DownloadError> {
let guard = self.progress_bar.update(url.as_str());
let client = self.http_client_provider.get_or_create().map_err(|err| {
deno_npm_cache::DownloadError {
status_code: None,
error: err,
}
})?;
let mut headers = http::HeaderMap::new();
if let Some(auth) = maybe_auth {
headers.append(
http::header::AUTHORIZATION,
http::header::HeaderValue::try_from(auth).unwrap(),
);
}
if let Some(etag) = maybe_etag {
headers.append(
http::header::IF_NONE_MATCH,
http::header::HeaderValue::try_from(etag).unwrap(),
);
}
client
.download_with_progress_and_retries(url, &headers, &guard)
.await
.map(|response| match response {
crate::http_util::HttpClientResponse::Success { headers, body } => {
NpmCacheHttpClientResponse::Bytes(NpmCacheHttpClientBytesResponse {
etag: headers
.get(http::header::ETAG)
.and_then(|e| e.to_str().map(|t| t.to_string()).ok()),
bytes: body,
})
}
crate::http_util::HttpClientResponse::NotFound => {
NpmCacheHttpClientResponse::NotFound
}
crate::http_util::HttpClientResponse::NotModified => {
NpmCacheHttpClientResponse::NotModified
}
})
.map_err(|err| {
use crate::http_util::DownloadErrorKind::*;
let status_code = match err.as_kind() {
Fetch { .. }
| UrlParse { .. }
| HttpParse { .. }
| Json { .. }
| ToStr { .. }
| RedirectHeaderParse { .. }
| TooManyRedirects
| UnhandledNotModified
| NotFound
| Other(_) => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code.as_u16())
}
};
deno_npm_cache::DownloadError {
status_code,
error: JsErrorBox::from_err(err),
}
})
}
}
#[derive(Debug)]
pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
}
impl NpmFetchResolver {
pub fn new(
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self {
nv_by_req: Default::default(),
info_by_name: Default::default(),
file_fetcher,
npmrc,
}
}
pub async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
if let Some(nv) = self.nv_by_req.get(req) {
return nv.value().clone();
}
let maybe_get_nv = || async {
let name = req.name.clone();
let package_info = self.package_info(&name).await?;
if let Some(dist_tag) = req.version_req.tag() {
let version = package_info.dist_tags.get(dist_tag)?.clone();
return Some(PackageNv { name, version });
}
// Find the first matching version of the package.
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
versions.sort();
let version = versions
.into_iter()
.rev()
.find(|v| req.version_req.tag().is_none() && req.version_req.matches(v))
.cloned()?;
Some(PackageNv { name, version })
};
let nv = maybe_get_nv().await;
self.nv_by_req.insert(req.clone(), nv.clone());
nv
}
pub async fn package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>> {
if let Some(info) = self.info_by_name.get(name) {
return info.value().clone();
}
// todo(#27198): use RegistryInfoProvider instead
let fetch_package_info = || async {
let info_url = deno_npm_cache::get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header =
deno_npm_cache::maybe_auth_header_value_for_npm_registry(
registry_config,
)
.map_err(AnyError::from)
.and_then(|value| match value {
Some(value) => Ok(Some((
http::header::AUTHORIZATION,
http::HeaderValue::try_from(value.into_bytes())?,
))),
None => Ok(None),
})
.ok()?;
let file = self
.file_fetcher
.fetch_bypass_permissions_with_maybe_auth(&info_url, maybe_auth_header)
.await
.ok()?;
serde_json::from_slice::<NpmPackageInfo>(&file.source).ok()
};
let info = fetch_package_info().await.map(Arc::new);
self.info_by_name.insert(name.to_string(), info.clone());
info
}
}
pub static NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
pub fn get_npm_config_user_agent() -> String {
format!(
"deno/{} npm/? deno/{} {} {}",
DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.deno,
std::env::consts::OS,
std::env::consts::ARCH
)
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum DenoTaskLifecycleScriptsError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
BinEntries(#[from] deno_npm_installer::BinEntriesError),
#[class(inherit)]
#[error(
"failed to create npm process state tempfile for running lifecycle scripts"
)]
CreateNpmProcessState(#[source] std::io::Error),
#[class(generic)]
#[error(transparent)]
Task(AnyError),
#[class(generic)]
#[error("failed to run scripts for packages: {}", .0.join(", "))]
RunScripts(Vec<String>),
}
pub struct DenoTaskLifeCycleScriptsExecutor {
npm_resolver: ManagedNpmResolverRc<CliSys>,
}
#[async_trait::async_trait(?Send)]
impl LifecycleScriptsExecutor for DenoTaskLifeCycleScriptsExecutor {
async fn execute(
&self,
options: LifecycleScriptsExecutorOptions<'_>,
) -> Result<(), AnyError> {
let mut failed_packages = Vec::new();
let mut bin_entries = BinEntries::new();
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base = self
.resolve_baseline_custom_commands(
options.extra_info_provider,
&mut bin_entries,
options.snapshot,
options.system_packages,
)
.await;
// we don't run with signals forwarded because once signals
// are setup then they're process wide.
let kill_signal = KillSignal::default();
let _drop_signal = kill_signal.clone().drop_guard();
let mut env_vars = crate::task_runner::real_env_vars();
// so the subprocess can detect that it is running as part of a lifecycle script,
// and avoid trying to set up node_modules again
env_vars.insert(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR.into(), "1".into());
// we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file
//
// SAFETY: we are sharing a single temp file across all of the scripts. the file position
// will be shared among these, which is okay since we run only one script at a time.
// However, if we concurrently run scripts in the future we will
// have to have multiple temp files.
let temp_file_fd = deno_runtime::deno_process::npm_process_state_tempfile(
options.process_state.as_bytes(),
)
.map_err(DenoTaskLifecycleScriptsError::CreateNpmProcessState)?;
// SAFETY: fd/handle is valid
let _temp_file = unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
env_vars.insert(
deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME.into(),
(temp_file_fd as usize).to_string().into(),
);
for PackageWithScript {
package,
scripts,
package_folder,
} in options.packages_with_scripts
{
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = self
.resolve_custom_commands_from_deps(
options.extra_info_provider,
base.clone(),
package,
options.snapshot,
)
.await;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, package_folder)
{
continue;
}
let pb = ProgressBar::new(
crate::util::progress_bar::ProgressBarStyle::TextOnly,
);
let _guard = pb.update_with_prompt(
ProgressMessagePrompt::Initialize,
&format!("{}: running '{script_name}' script", package.id.nv),
);
let crate::task_runner::TaskResult {
exit_code,
stderr,
stdout,
} =
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: package_folder.clone(),
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd: options.init_cwd,
argv: &[],
root_node_modules_dir: Some(options.root_node_modules_dir_path),
stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
}),
kill_signal: kill_signal.clone(),
})
.await
.map_err(DenoTaskLifecycleScriptsError::Task)?;
let stdout = stdout.unwrap();
let stderr = stderr.unwrap();
if exit_code != 0 {
log::warn!(
"error: script '{}' in '{}' failed with exit code {}{}{}",
script_name,
package.id.nv,
exit_code,
if !stdout.trim_ascii().is_empty() {
format!(
"\nstdout:\n{}\n",
String::from_utf8_lossy(&stdout).trim()
)
} else {
String::new()
},
if !stderr.trim_ascii().is_empty() {
format!(
"\nstderr:\n{}\n",
String::from_utf8_lossy(&stderr).trim()
)
} else {
String::new()
},
);
failed_packages.push(&package.id.nv);
// assume if earlier script fails, later ones will fail too
break;
}
}
}
(options.on_ran_pkg_scripts)(package)?;
}
// re-set up bin entries for the packages which we've run scripts for.
// lifecycle scripts can create files that are linked to by bin entries,
// and the only reliable way to handle this is to re-link bin entries
// (this is what PNPM does as well)
let package_ids = options
.packages_with_scripts
.iter()
.map(|p| &p.package.id)
.collect::<HashSet<_>>();
bin_entries.finish_only(
options.snapshot,
&options.root_node_modules_dir_path.join(".bin"),
|outcome| outcome.warn_if_failed(),
&package_ids,
)?;
if failed_packages.is_empty() {
Ok(())
} else {
Err(
DenoTaskLifecycleScriptsError::RunScripts(
failed_packages
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>(),
)
.into(),
)
}
}
}
impl DenoTaskLifeCycleScriptsExecutor {
pub fn new(npm_resolver: ManagedNpmResolverRc<CliSys>) -> Self {
Self { npm_resolver }
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
async fn resolve_baseline_custom_commands<'a>(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
bin_entries: &mut BinEntries<'a>,
snapshot: &'a NpmResolutionSnapshot,
packages: &'a [NpmResolutionPackage],
) -> crate::task_runner::TaskCustomCommands {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
self
.resolve_custom_commands_from_packages(
extra_info_provider,
bin_entries,
custom_commands,
snapshot,
packages,
)
.await
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
async fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
bin_entries: &mut BinEntries<'a>,
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
) -> crate::task_runner::TaskCustomCommands {
for package in packages {
let Ok(package_path) = self
.npm_resolver
.resolve_pkg_folder_from_pkg_id(&package.id)
else {
continue;
};
let extra = if let Some(extra) = &package.extra {
Cow::Borrowed(extra)
} else {
let Ok(extra) = extra_info_provider
.get_package_extra_info(
&package.id.nv,
&package_path,
ExpectedExtraInfo::from_package(package),
)
.await
else {
continue;
};
Cow::Owned(extra)
};
if extra.bin.is_some() {
bin_entries.add(package, &extra, package_path);
}
}
let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
commands
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
async fn resolve_custom_commands_from_deps(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
) -> crate::task_runner::TaskCustomCommands {
let mut bin_entries = BinEntries::new();
self
.resolve_custom_commands_from_packages(
extra_info_provider,
&mut bin_entries,
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
)
.await
}
}

View file

@ -1,337 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::borrow::Cow;
use std::collections::HashSet;
use std::path::PathBuf;
use std::rc::Rc;
use deno_core::error::AnyError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmResolutionPackage;
use deno_resolver::npm::ManagedNpmResolverRc;
use deno_runtime::deno_io::FromRawIoHandle;
use deno_task_shell::KillSignal;
use super::bin_entries::BinEntries;
use super::lifecycle_scripts::is_broken_default_install_script;
use super::lifecycle_scripts::LifecycleScriptsExecutor;
use super::lifecycle_scripts::LifecycleScriptsExecutorOptions;
use super::lifecycle_scripts::PackageWithScript;
use super::lifecycle_scripts::LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR;
use super::CachedNpmPackageExtraInfoProvider;
use super::ExpectedExtraInfo;
use crate::sys::CliSys;
use crate::task_runner::TaskStdio;
use crate::util::progress_bar::ProgressMessagePrompt;
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum DenoTaskLifecycleScriptsError {
#[class(inherit)]
#[error(transparent)]
Io(#[from] std::io::Error),
#[class(inherit)]
#[error(transparent)]
BinEntries(#[from] super::bin_entries::BinEntriesError),
#[class(inherit)]
#[error(
"failed to create npm process state tempfile for running lifecycle scripts"
)]
CreateNpmProcessState(#[source] std::io::Error),
#[class(generic)]
#[error(transparent)]
Task(AnyError),
#[class(generic)]
#[error("failed to run scripts for packages: {}", .0.join(", "))]
RunScripts(Vec<String>),
}
pub struct DenoTaskLifeCycleScriptsExecutor {
npm_resolver: ManagedNpmResolverRc<CliSys>,
}
#[async_trait::async_trait(?Send)]
impl LifecycleScriptsExecutor for DenoTaskLifeCycleScriptsExecutor {
async fn execute(
&self,
options: LifecycleScriptsExecutorOptions<'_>,
) -> Result<(), AnyError> {
let mut failed_packages = Vec::new();
let mut bin_entries = BinEntries::new();
// get custom commands for each bin available in the node_modules dir (essentially
// the scripts that are in `node_modules/.bin`)
let base = self
.resolve_baseline_custom_commands(
options.extra_info_provider,
&mut bin_entries,
options.snapshot,
options.system_packages,
)
.await;
// we don't run with signals forwarded because once signals
// are setup then they're process wide.
let kill_signal = KillSignal::default();
let _drop_signal = kill_signal.clone().drop_guard();
let mut env_vars = crate::task_runner::real_env_vars();
// so the subprocess can detect that it is running as part of a lifecycle script,
// and avoid trying to set up node_modules again
env_vars.insert(LIFECYCLE_SCRIPTS_RUNNING_ENV_VAR.into(), "1".into());
// we want to pass the current state of npm resolution down to the deno subprocess
// (that may be running as part of the script). we do this with an inherited temp file
//
// SAFETY: we are sharing a single temp file across all of the scripts. the file position
// will be shared among these, which is okay since we run only one script at a time.
// However, if we concurrently run scripts in the future we will
// have to have multiple temp files.
let temp_file_fd = deno_runtime::deno_process::npm_process_state_tempfile(
options.process_state.as_bytes(),
)
.map_err(DenoTaskLifecycleScriptsError::CreateNpmProcessState)?;
// SAFETY: fd/handle is valid
let _temp_file = unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed
env_vars.insert(
deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME.into(),
(temp_file_fd as usize).to_string().into(),
);
for PackageWithScript {
package,
scripts,
package_folder,
} in options.packages_with_scripts
{
// add custom commands for binaries from the package's dependencies. this will take precedence over the
// baseline commands, so if the package relies on a bin that conflicts with one higher in the dependency tree, the
// correct bin will be used.
let custom_commands = self
.resolve_custom_commands_from_deps(
options.extra_info_provider,
base.clone(),
package,
options.snapshot,
)
.await;
for script_name in ["preinstall", "install", "postinstall"] {
if let Some(script) = scripts.get(script_name) {
if script_name == "install"
&& is_broken_default_install_script(script, package_folder)
{
continue;
}
let _guard = options.progress_bar.update_with_prompt(
ProgressMessagePrompt::Initialize,
&format!("{}: running '{script_name}' script", package.id.nv),
);
let crate::task_runner::TaskResult {
exit_code,
stderr,
stdout,
} =
crate::task_runner::run_task(crate::task_runner::RunTaskOptions {
task_name: script_name,
script,
cwd: package_folder.clone(),
env_vars: env_vars.clone(),
custom_commands: custom_commands.clone(),
init_cwd: options.init_cwd,
argv: &[],
root_node_modules_dir: Some(options.root_node_modules_dir_path),
stdio: Some(crate::task_runner::TaskIo {
stderr: TaskStdio::piped(),
stdout: TaskStdio::piped(),
}),
kill_signal: kill_signal.clone(),
})
.await
.map_err(DenoTaskLifecycleScriptsError::Task)?;
let stdout = stdout.unwrap();
let stderr = stderr.unwrap();
if exit_code != 0 {
log::warn!(
"error: script '{}' in '{}' failed with exit code {}{}{}",
script_name,
package.id.nv,
exit_code,
if !stdout.trim_ascii().is_empty() {
format!(
"\nstdout:\n{}\n",
String::from_utf8_lossy(&stdout).trim()
)
} else {
String::new()
},
if !stderr.trim_ascii().is_empty() {
format!(
"\nstderr:\n{}\n",
String::from_utf8_lossy(&stderr).trim()
)
} else {
String::new()
},
);
failed_packages.push(&package.id.nv);
// assume if earlier script fails, later ones will fail too
break;
}
}
}
(options.on_ran_pkg_scripts)(package)?;
}
// re-set up bin entries for the packages which we've run scripts for.
// lifecycle scripts can create files that are linked to by bin entries,
// and the only reliable way to handle this is to re-link bin entries
// (this is what PNPM does as well)
let package_ids = options
.packages_with_scripts
.iter()
.map(|p| &p.package.id)
.collect::<HashSet<_>>();
bin_entries.finish_only(
options.snapshot,
&options.root_node_modules_dir_path.join(".bin"),
|outcome| outcome.warn_if_failed(),
&package_ids,
)?;
if failed_packages.is_empty() {
Ok(())
} else {
Err(
DenoTaskLifecycleScriptsError::RunScripts(
failed_packages
.iter()
.map(|p| p.to_string())
.collect::<Vec<_>>(),
)
.into(),
)
}
}
}
impl DenoTaskLifeCycleScriptsExecutor {
pub fn new(npm_resolver: ManagedNpmResolverRc<CliSys>) -> Self {
Self { npm_resolver }
}
// take in all (non copy) packages from snapshot,
// and resolve the set of available binaries to create
// custom commands available to the task runner
async fn resolve_baseline_custom_commands<'a>(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
bin_entries: &mut BinEntries<'a>,
snapshot: &'a NpmResolutionSnapshot,
packages: &'a [NpmResolutionPackage],
) -> crate::task_runner::TaskCustomCommands {
let mut custom_commands = crate::task_runner::TaskCustomCommands::new();
custom_commands
.insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand));
custom_commands
.insert("npm".to_string(), Rc::new(crate::task_runner::NpmCommand));
custom_commands
.insert("node".to_string(), Rc::new(crate::task_runner::NodeCommand));
custom_commands.insert(
"node-gyp".to_string(),
Rc::new(crate::task_runner::NodeGypCommand),
);
// TODO: this recreates the bin entries which could be redoing some work, but the ones
// we compute earlier in `sync_resolution_with_fs` may not be exhaustive (because we skip
// doing it for packages that are set up already.
// realistically, scripts won't be run very often so it probably isn't too big of an issue.
self
.resolve_custom_commands_from_packages(
extra_info_provider,
bin_entries,
custom_commands,
snapshot,
packages,
)
.await
}
// resolves the custom commands from an iterator of packages
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands
async fn resolve_custom_commands_from_packages<
'a,
P: IntoIterator<Item = &'a NpmResolutionPackage>,
>(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
bin_entries: &mut BinEntries<'a>,
mut commands: crate::task_runner::TaskCustomCommands,
snapshot: &'a NpmResolutionSnapshot,
packages: P,
) -> crate::task_runner::TaskCustomCommands {
for package in packages {
let Ok(package_path) = self
.npm_resolver
.resolve_pkg_folder_from_pkg_id(&package.id)
else {
continue;
};
let extra = if let Some(extra) = &package.extra {
Cow::Borrowed(extra)
} else {
let Ok(extra) = extra_info_provider
.get_package_extra_info(
&package.id.nv,
&package_path,
ExpectedExtraInfo::from_package(package),
)
.await
else {
continue;
};
Cow::Owned(extra)
};
if extra.bin.is_some() {
bin_entries.add(package, &extra, package_path);
}
}
let bins: Vec<(String, PathBuf)> = bin_entries.collect_bin_files(snapshot);
for (bin_name, script_path) in bins {
commands.insert(
bin_name.clone(),
Rc::new(crate::task_runner::NodeModulesFileRunCommand {
command_name: bin_name,
path: script_path,
}),
);
}
commands
}
// resolves the custom commands from the dependencies of a package
// and adds them to the existing custom commands.
// note that this will overwrite any existing custom commands.
async fn resolve_custom_commands_from_deps(
&self,
extra_info_provider: &CachedNpmPackageExtraInfoProvider,
baseline: crate::task_runner::TaskCustomCommands,
package: &NpmResolutionPackage,
snapshot: &NpmResolutionSnapshot,
) -> crate::task_runner::TaskCustomCommands {
let mut bin_entries = BinEntries::new();
self
.resolve_custom_commands_from_packages(
extra_info_provider,
&mut bin_entries,
baseline,
snapshot,
package
.dependencies
.values()
.map(|id| snapshot.package_from_id(id).unwrap()),
)
.await
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,306 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use capacity_builder::StringBuilder;
use deno_error::JsErrorBox;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmResolutionPackage;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_runtime::colors;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::VersionReq;
use crate::args::CliLockfile;
use crate::npm::CliNpmRegistryInfoProvider;
use crate::npm::WorkspaceNpmPatchPackages;
use crate::util::display::DisplayTreeNode;
use crate::util::sync::TaskQueue;
pub struct AddPkgReqsResult {
/// Results from adding the individual packages.
///
/// The indexes of the results correspond to the indexes of the provided
/// package requirements.
pub results: Vec<Result<PackageNv, NpmResolutionError>>,
/// The final result of resolving and caching all the package requirements.
pub dependencies_result: Result<(), JsErrorBox>,
}
/// Updates the npm resolution with the provided package requirements.
#[derive(Debug)]
pub struct NpmResolutionInstaller {
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<CliLockfile>>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
update_queue: TaskQueue,
}
impl NpmResolutionInstaller {
pub fn new(
registry_info_provider: Arc<CliNpmRegistryInfoProvider>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<CliLockfile>>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
registry_info_provider,
resolution,
maybe_lockfile,
patch_packages,
update_queue: Default::default(),
}
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
// this will internally cache the package information
self.registry_info_provider.package_info(package_name).await
}
pub async fn add_package_reqs(
&self,
package_reqs: &[PackageReq],
) -> AddPkgReqsResult {
// only allow one thread in here at a time
let _snapshot_lock = self.update_queue.acquire().await;
let result = add_package_reqs_to_snapshot(
&self.registry_info_provider,
package_reqs,
self.maybe_lockfile.clone(),
&self.patch_packages,
|| self.resolution.snapshot(),
)
.await;
AddPkgReqsResult {
results: result.results,
dependencies_result: match result.dep_graph_result {
Ok(snapshot) => {
self.resolution.set_snapshot(snapshot);
Ok(())
}
Err(err) => Err(JsErrorBox::from_err(err)),
},
}
}
}
async fn add_package_reqs_to_snapshot(
registry_info_provider: &Arc<CliNpmRegistryInfoProvider>,
package_reqs: &[PackageReq],
maybe_lockfile: Option<Arc<CliLockfile>>,
patch_packages: &WorkspaceNpmPatchPackages,
get_new_snapshot: impl Fn() -> NpmResolutionSnapshot,
) -> deno_npm::resolution::AddPkgReqsResult {
fn get_types_node_version() -> VersionReq {
// WARNING: When bumping this version, check if anything needs to be
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
VersionReq::parse_from_npm("22.9.0 - 22.15.15").unwrap()
}
let snapshot = get_new_snapshot();
if package_reqs
.iter()
.all(|req| snapshot.package_reqs().contains_key(req))
{
log::debug!("Snapshot already up to date. Skipping npm resolution.");
return deno_npm::resolution::AddPkgReqsResult {
results: package_reqs
.iter()
.map(|req| Ok(snapshot.package_reqs().get(req).unwrap().clone()))
.collect(),
dep_graph_result: Ok(snapshot),
unmet_peer_diagnostics: Default::default(),
};
}
log::debug!(
/* this string is used in tests */
"Running npm resolution."
);
let npm_registry_api = registry_info_provider.as_npm_registry_api();
let result = snapshot
.add_pkg_reqs(
&npm_registry_api,
AddPkgReqsOptions {
package_reqs,
types_node_version_req: Some(get_types_node_version()),
patch_packages: &patch_packages.0,
},
)
.await;
let result = match &result.dep_graph_result {
Err(NpmResolutionError::Resolution(err))
if npm_registry_api.mark_force_reload() =>
{
log::debug!("{err:#}");
log::debug!("npm resolution failed. Trying again...");
// try again with forced reloading
let snapshot = get_new_snapshot();
snapshot
.add_pkg_reqs(
&npm_registry_api,
AddPkgReqsOptions {
package_reqs,
types_node_version_req: Some(get_types_node_version()),
patch_packages: &patch_packages.0,
},
)
.await
}
_ => result,
};
registry_info_provider.clear_memory_cache();
if !result.unmet_peer_diagnostics.is_empty()
&& log::log_enabled!(log::Level::Warn)
{
let root_node = DisplayTreeNode {
text: format!(
"{} The following peer dependency issues were found:",
colors::yellow("Warning")
),
children: result
.unmet_peer_diagnostics
.iter()
.map(|diagnostic| {
let mut node = DisplayTreeNode {
text: format!(
"peer {}: resolved to {}",
diagnostic.dependency, diagnostic.resolved
),
children: Vec::new(),
};
for ancestor in &diagnostic.ancestors {
node = DisplayTreeNode {
text: ancestor.to_string(),
children: vec![node],
};
}
node
})
.collect(),
};
let mut text = String::new();
_ = root_node.print(&mut text);
log::warn!("{}", text);
}
if let Ok(snapshot) = &result.dep_graph_result {
if let Some(lockfile) = maybe_lockfile {
populate_lockfile_from_snapshot(&lockfile, snapshot);
}
}
result
}
fn populate_lockfile_from_snapshot(
lockfile: &CliLockfile,
snapshot: &NpmResolutionSnapshot,
) {
fn npm_package_to_lockfile_info(
pkg: &NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.iter()
.filter_map(|(name, id)| {
if pkg.optional_dependencies.contains(name) {
None
} else {
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
}
})
.collect();
let optional_dependencies = pkg
.optional_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
let optional_peers = pkg
.optional_peer_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
NpmPackageLockfileInfo {
serialized_id: pkg.id.as_serialized(),
integrity: pkg.dist.as_ref().and_then(|dist| {
dist.integrity().for_lockfile().map(|s| s.into_owned())
}),
dependencies,
optional_dependencies,
os: pkg.system.os.clone(),
cpu: pkg.system.cpu.clone(),
tarball: pkg.dist.as_ref().and_then(|dist| {
// Omit the tarball URL if it's the standard NPM registry URL
if dist.tarball
== crate::npm::managed::DefaultTarballUrl::default_tarball_url(
&crate::npm::managed::DefaultTarballUrl,
&pkg.id.nv,
)
{
None
} else {
Some(StackString::from_str(&dist.tarball))
}
}),
deprecated: pkg.is_deprecated,
bin: pkg.has_bin,
scripts: pkg.has_scripts,
optional_peers,
}
}
let mut lockfile = lockfile.lock();
for (package_req, nv) in snapshot.package_reqs() {
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
lockfile.insert_package_specifier(
JsrDepPackageReq::npm(package_req.clone()),
{
StringBuilder::<SmallStackString>::build(|builder| {
builder.append(&id.nv.version);
builder.append(&id.peer_dependencies);
})
.unwrap()
},
);
}
for package in snapshot.all_packages_for_every_system() {
lockfile.insert_npm_package(npm_package_to_lockfile_info(package));
}
}

View file

@ -1,562 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
pub mod installer;
mod managed;
use std::collections::HashMap;
use std::sync::Arc;
use dashmap::DashMap;
use deno_config::workspace::Workspace;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesOrdered;
use deno_core::futures::TryStreamExt;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_error::JsErrorBox;
use deno_lib::version::DENO_VERSION_INFO;
use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmPackageVersionInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm_cache::NpmCacheHttpClientBytesResponse;
use deno_npm_cache::NpmCacheHttpClientResponse;
use deno_resolver::npm::ByonmNpmResolverCreateOptions;
use deno_runtime::colors;
use deno_semver::package::PackageName;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use indexmap::IndexMap;
use thiserror::Error;
pub use self::managed::CliManagedNpmResolverCreateOptions;
pub use self::managed::CliNpmResolverManagedSnapshotOption;
pub use self::managed::NpmResolutionInitializer;
use crate::file_fetcher::CliFileFetcher;
use crate::http_util::HttpClientProvider;
use crate::npm::managed::DefaultTarballUrl;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
pub type CliNpmTarballCache =
deno_npm_cache::TarballCache<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmCache = deno_npm_cache::NpmCache<CliSys>;
pub type CliNpmRegistryInfoProvider =
deno_npm_cache::RegistryInfoProvider<CliNpmCacheHttpClient, CliSys>;
pub type CliNpmResolver = deno_resolver::npm::NpmResolver<CliSys>;
pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver<CliSys>;
pub type CliNpmResolverCreateOptions =
deno_resolver::npm::NpmResolverCreateOptions<CliSys>;
pub type CliByonmNpmResolverCreateOptions =
ByonmNpmResolverCreateOptions<CliSys>;
pub struct NpmPackageInfoApiAdapter {
api: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
}
impl NpmPackageInfoApiAdapter {
pub fn new(
api: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
api,
workspace_patch_packages,
}
}
}
async fn get_infos(
info_provider: &(dyn NpmRegistryApi + Send + Sync),
workspace_patch_packages: &WorkspaceNpmPatchPackages,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let futs = values
.iter()
.map(|v| async move {
let info = info_provider.package_info(v.name.as_str()).await?;
let version_info = info.version_info(v, &workspace_patch_packages.0)?;
Ok::<_, Box<dyn std::error::Error + Send + Sync>>(
deno_lockfile::Lockfile5NpmInfo {
tarball_url: version_info.dist.as_ref().and_then(|d| {
if d.tarball == DefaultTarballUrl.default_tarball_url(v) {
None
} else {
Some(d.tarball.clone())
}
}),
optional_dependencies: version_info
.optional_dependencies
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect::<std::collections::BTreeMap<_, _>>(),
cpu: version_info.cpu.iter().map(|s| s.to_string()).collect(),
os: version_info.os.iter().map(|s| s.to_string()).collect(),
deprecated: version_info.deprecated.is_some(),
bin: version_info.bin.is_some(),
scripts: version_info.scripts.contains_key("preinstall")
|| version_info.scripts.contains_key("install")
|| version_info.scripts.contains_key("postinstall"),
optional_peers: version_info
.peer_dependencies_meta
.iter()
.filter_map(|(k, v)| {
if v.optional {
version_info
.peer_dependencies
.get(k)
.map(|v| (k.to_string(), v.to_string()))
} else {
None
}
})
.collect::<std::collections::BTreeMap<_, _>>(),
},
)
})
.collect::<FuturesOrdered<_>>();
let package_infos = futs.try_collect::<Vec<_>>().await?;
Ok(package_infos)
}
#[async_trait::async_trait(?Send)]
impl deno_lockfile::NpmPackageInfoProvider for NpmPackageInfoApiAdapter {
async fn get_npm_package_info(
&self,
values: &[PackageNv],
) -> Result<
Vec<deno_lockfile::Lockfile5NpmInfo>,
Box<dyn std::error::Error + Send + Sync>,
> {
let package_infos =
get_infos(&*self.api, &self.workspace_patch_packages, values).await;
match package_infos {
Ok(package_infos) => Ok(package_infos),
Err(err) => {
if self.api.mark_force_reload() {
get_infos(&*self.api, &self.workspace_patch_packages, values).await
} else {
Err(err)
}
}
}
}
}
#[derive(Debug, Default)]
pub struct WorkspaceNpmPatchPackages(
pub HashMap<PackageName, Vec<NpmPackageVersionInfo>>,
);
impl WorkspaceNpmPatchPackages {
pub fn from_workspace(workspace: &Workspace) -> Self {
let mut entries: HashMap<PackageName, Vec<NpmPackageVersionInfo>> =
HashMap::new();
if workspace.has_unstable("npm-patch") {
for pkg_json in workspace.patch_pkg_jsons() {
let Some(name) = pkg_json.name.as_ref() else {
log::warn!(
"{} Patch package ignored because package.json was missing name field.\n at {}",
colors::yellow("Warning"),
pkg_json.path.display(),
);
continue;
};
match pkg_json_to_version_info(pkg_json) {
Ok(version_info) => {
let entry = entries.entry(PackageName::from_str(name)).or_default();
entry.push(version_info);
}
Err(err) => {
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err,
pkg_json.path.display(),
);
}
}
}
} else if workspace.patch_pkg_jsons().next().is_some() {
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
"Patching npm packages is only supported when setting \"unstable\": [\"npm-patch\"] in the root deno.json",
workspace
.root_deno_json()
.map(|d| d.specifier.to_string())
.unwrap_or_else(|| workspace.root_dir().to_string()),
);
}
Self(entries)
}
}
#[derive(Debug, Error)]
enum PkgJsonToVersionInfoError {
#[error(
"Patch package ignored because package.json was missing version field."
)]
VersionMissing,
#[error("Patch package ignored because package.json version field could not be parsed.")]
VersionInvalid {
#[source]
source: deno_semver::npm::NpmVersionParseError,
},
}
fn pkg_json_to_version_info(
pkg_json: &deno_package_json::PackageJson,
) -> Result<NpmPackageVersionInfo, PkgJsonToVersionInfoError> {
fn parse_deps(
deps: Option<&IndexMap<String, String>>,
) -> HashMap<StackString, StackString> {
deps
.map(|d| {
d.into_iter()
.map(|(k, v)| (StackString::from_str(k), StackString::from_str(v)))
.collect()
})
.unwrap_or_default()
}
fn parse_array(v: &[String]) -> Vec<SmallStackString> {
v.iter().map(|s| SmallStackString::from_str(s)).collect()
}
let Some(version) = &pkg_json.version else {
return Err(PkgJsonToVersionInfoError::VersionMissing);
};
let version = Version::parse_from_npm(version)
.map_err(|source| PkgJsonToVersionInfoError::VersionInvalid { source })?;
Ok(NpmPackageVersionInfo {
version,
dist: None,
bin: pkg_json
.bin
.as_ref()
.and_then(|v| serde_json::from_value(v.clone()).ok()),
dependencies: parse_deps(pkg_json.dependencies.as_ref()),
optional_dependencies: parse_deps(pkg_json.optional_dependencies.as_ref()),
peer_dependencies: parse_deps(pkg_json.peer_dependencies.as_ref()),
peer_dependencies_meta: pkg_json
.peer_dependencies_meta
.clone()
.and_then(|m| serde_json::from_value(m).ok())
.unwrap_or_default(),
os: pkg_json.os.as_deref().map(parse_array).unwrap_or_default(),
cpu: pkg_json.cpu.as_deref().map(parse_array).unwrap_or_default(),
scripts: pkg_json
.scripts
.as_ref()
.map(|scripts| {
scripts
.iter()
.map(|(k, v)| (SmallStackString::from_str(k), v.clone()))
.collect()
})
.unwrap_or_default(),
// not worth increasing memory for showing a deprecated
// message for patched packages
deprecated: None,
})
}
#[derive(Debug)]
pub struct CliNpmCacheHttpClient {
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
}
impl CliNpmCacheHttpClient {
pub fn new(
http_client_provider: Arc<HttpClientProvider>,
progress_bar: ProgressBar,
) -> Self {
Self {
http_client_provider,
progress_bar,
}
}
}
#[async_trait::async_trait(?Send)]
impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient {
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
maybe_auth: Option<String>,
maybe_etag: Option<String>,
) -> Result<NpmCacheHttpClientResponse, deno_npm_cache::DownloadError> {
let guard = self.progress_bar.update(url.as_str());
let client = self.http_client_provider.get_or_create().map_err(|err| {
deno_npm_cache::DownloadError {
status_code: None,
error: err,
}
})?;
let mut headers = http::HeaderMap::new();
if let Some(auth) = maybe_auth {
headers.append(
http::header::AUTHORIZATION,
http::header::HeaderValue::try_from(auth).unwrap(),
);
}
if let Some(etag) = maybe_etag {
headers.append(
http::header::IF_NONE_MATCH,
http::header::HeaderValue::try_from(etag).unwrap(),
);
}
client
.download_with_progress_and_retries(url, &headers, &guard)
.await
.map(|response| match response {
crate::http_util::HttpClientResponse::Success { headers, body } => {
NpmCacheHttpClientResponse::Bytes(NpmCacheHttpClientBytesResponse {
etag: headers
.get(http::header::ETAG)
.and_then(|e| e.to_str().map(|t| t.to_string()).ok()),
bytes: body,
})
}
crate::http_util::HttpClientResponse::NotFound => {
NpmCacheHttpClientResponse::NotFound
}
crate::http_util::HttpClientResponse::NotModified => {
NpmCacheHttpClientResponse::NotModified
}
})
.map_err(|err| {
use crate::http_util::DownloadErrorKind::*;
let status_code = match err.as_kind() {
Fetch { .. }
| UrlParse { .. }
| HttpParse { .. }
| Json { .. }
| ToStr { .. }
| RedirectHeaderParse { .. }
| TooManyRedirects
| UnhandledNotModified
| NotFound
| Other(_) => None,
BadResponse(bad_response_error) => {
Some(bad_response_error.status_code.as_u16())
}
};
deno_npm_cache::DownloadError {
status_code,
error: JsErrorBox::from_err(err),
}
})
}
}
#[derive(Debug)]
pub struct NpmFetchResolver {
nv_by_req: DashMap<PackageReq, Option<PackageNv>>,
info_by_name: DashMap<String, Option<Arc<NpmPackageInfo>>>,
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
}
impl NpmFetchResolver {
pub fn new(
file_fetcher: Arc<CliFileFetcher>,
npmrc: Arc<ResolvedNpmRc>,
) -> Self {
Self {
nv_by_req: Default::default(),
info_by_name: Default::default(),
file_fetcher,
npmrc,
}
}
pub async fn req_to_nv(&self, req: &PackageReq) -> Option<PackageNv> {
if let Some(nv) = self.nv_by_req.get(req) {
return nv.value().clone();
}
let maybe_get_nv = || async {
let name = req.name.clone();
let package_info = self.package_info(&name).await?;
if let Some(dist_tag) = req.version_req.tag() {
let version = package_info.dist_tags.get(dist_tag)?.clone();
return Some(PackageNv { name, version });
}
// Find the first matching version of the package.
let mut versions = package_info.versions.keys().collect::<Vec<_>>();
versions.sort();
let version = versions
.into_iter()
.rev()
.find(|v| req.version_req.tag().is_none() && req.version_req.matches(v))
.cloned()?;
Some(PackageNv { name, version })
};
let nv = maybe_get_nv().await;
self.nv_by_req.insert(req.clone(), nv.clone());
nv
}
pub async fn package_info(&self, name: &str) -> Option<Arc<NpmPackageInfo>> {
if let Some(info) = self.info_by_name.get(name) {
return info.value().clone();
}
// todo(#27198): use RegistryInfoProvider instead
let fetch_package_info = || async {
let info_url = deno_npm_cache::get_package_url(&self.npmrc, name);
let registry_config = self.npmrc.get_registry_config(name);
// TODO(bartlomieju): this should error out, not use `.ok()`.
let maybe_auth_header =
deno_npm_cache::maybe_auth_header_value_for_npm_registry(
registry_config,
)
.map_err(AnyError::from)
.and_then(|value| match value {
Some(value) => Ok(Some((
http::header::AUTHORIZATION,
http::HeaderValue::try_from(value.into_bytes())?,
))),
None => Ok(None),
})
.ok()?;
let file = self
.file_fetcher
.fetch_bypass_permissions_with_maybe_auth(&info_url, maybe_auth_header)
.await
.ok()?;
serde_json::from_slice::<NpmPackageInfo>(&file.source).ok()
};
let info = fetch_package_info().await.map(Arc::new);
self.info_by_name.insert(name.to_string(), info.clone());
info
}
}
pub static NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent";
pub fn get_npm_config_user_agent() -> String {
format!(
"deno/{} npm/? deno/{} {} {}",
DENO_VERSION_INFO.deno,
DENO_VERSION_INFO.deno,
std::env::consts::OS,
std::env::consts::ARCH
)
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use deno_npm::registry::NpmPeerDependencyMeta;
use super::*;
#[test]
fn test_pkg_json_to_version_info() {
fn convert(
text: &str,
) -> Result<NpmPackageVersionInfo, PkgJsonToVersionInfoError> {
let pkg_json = deno_package_json::PackageJson::load_from_string(
PathBuf::from("package.json"),
text,
)
.unwrap();
pkg_json_to_version_info(&pkg_json)
}
assert_eq!(
convert(
r#"{
"name": "pkg",
"version": "1.0.0",
"bin": "./bin.js",
"dependencies": {
"my-dep": "1"
},
"optionalDependencies": {
"optional-dep": "~1"
},
"peerDependencies": {
"my-peer-dep": "^2"
},
"peerDependenciesMeta": {
"my-peer-dep": {
"optional": true
}
},
"os": ["win32"],
"cpu": ["x86_64"],
"scripts": {
"script": "testing",
"postInstall": "testing2"
},
"deprecated": "ignored for now"
}"#
)
.unwrap(),
NpmPackageVersionInfo {
version: Version::parse_from_npm("1.0.0").unwrap(),
dist: None,
bin: Some(deno_npm::registry::NpmPackageVersionBinEntry::String(
"./bin.js".to_string()
)),
dependencies: HashMap::from([(
StackString::from_static("my-dep"),
StackString::from_static("1")
)]),
optional_dependencies: HashMap::from([(
StackString::from_static("optional-dep"),
StackString::from_static("~1")
)]),
peer_dependencies: HashMap::from([(
StackString::from_static("my-peer-dep"),
StackString::from_static("^2")
)]),
peer_dependencies_meta: HashMap::from([(
StackString::from_static("my-peer-dep"),
NpmPeerDependencyMeta { optional: true }
)]),
os: vec![SmallStackString::from_static("win32")],
cpu: vec![SmallStackString::from_static("x86_64")],
scripts: HashMap::from([
(
SmallStackString::from_static("script"),
"testing".to_string(),
),
(
SmallStackString::from_static("postInstall"),
"testing2".to_string(),
)
]),
// we don't bother ever setting this because we don't store it in deno_package_json
deprecated: None,
}
);
match convert("{}").unwrap_err() {
PkgJsonToVersionInfoError::VersionMissing => {
// ok
}
_ => unreachable!(),
}
match convert(r#"{ "version": "1.0.~" }"#).unwrap_err() {
PkgJsonToVersionInfoError::VersionInvalid { source: err } => {
assert_eq!(err.to_string(), "Invalid npm version");
}
_ => unreachable!(),
}
}
}

View file

@ -7,14 +7,14 @@ use deno_error::JsErrorBox;
use deno_graph::NpmLoadError;
use deno_graph::NpmResolvePkgReqsResult;
use deno_npm::resolution::NpmResolutionError;
use deno_npm_installer::PackageCaching;
use deno_resolver::graph::FoundPackageJsonDepFlag;
use deno_resolver::npm::DenoInNpmPackageChecker;
use deno_semver::package::PackageReq;
use node_resolver::DenoIsBuiltInNodeModuleChecker;
use crate::args::NpmCachingStrategy;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
@ -49,14 +49,14 @@ pub fn on_resolve_diagnostic(
#[derive(Debug)]
pub struct CliNpmGraphResolver {
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
npm_caching: NpmCachingStrategy,
}
impl CliNpmGraphResolver {
pub fn new(
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
found_package_json_dep_flag: Arc<FoundPackageJsonDepFlag>,
npm_caching: NpmCachingStrategy,
) -> Self {

View file

@ -13,9 +13,9 @@ use deno_lib::standalone::virtual_fs::VirtualFile;
use deno_lib::standalone::virtual_fs::VirtualSymlinkParts;
use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath;
use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME;
use deno_resolver::display::DisplayTreeNode;
use crate::util::display::human_size;
use crate::util::display::DisplayTreeNode;
pub fn output_vfs(vfs: &BuiltVfs, executable_name: &str) {
if !log::log_enabled!(log::Level::Info) {

View file

@ -27,6 +27,7 @@ use crate::factory::CliFactory;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt;
@ -530,8 +531,10 @@ fn clean_node_modules(
};
// TODO(nathanwhit): this probably shouldn't reach directly into this code
let mut setup_cache =
crate::npm::installer::SetupCache::load(base.join(".setup-cache.bin"));
let mut setup_cache = deno_npm_installer::LocalSetupCache::load(
CliSys::default(),
base.join(".setup-cache.bin"),
);
for entry in entries {
let entry = entry?;

View file

@ -49,7 +49,6 @@ use crate::cache::IncrementalCache;
use crate::colors;
use crate::factory::CliFactory;
use crate::sys::CliSys;
use crate::util::diff::diff;
use crate::util::file_watcher;
use crate::util::fs::canonicalize_path;
use crate::util::path::get_extension;
@ -922,7 +921,8 @@ impl Formatter for CheckFormatter {
Ok(Some(formatted_text)) => {
not_formatted_files_count.fetch_add(1, Ordering::Relaxed);
let _g = output_lock.lock();
let diff = diff(&file_text, &formatted_text);
let diff =
deno_resolver::display::diff(&file_text, &formatted_text);
info!("");
info!("{} {}:", colors::bold("from"), file_path.display());
info!("{}", diff);

View file

@ -24,6 +24,7 @@ use deno_npm::npm_rc::ResolvedNpmRc;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageId;
use deno_npm::NpmResolutionPackage;
use deno_resolver::display::DisplayTreeNode;
use deno_resolver::DenoResolveErrorKind;
use deno_semver::npm::NpmPackageNvReference;
use deno_semver::npm::NpmPackageReqReference;
@ -36,7 +37,6 @@ use crate::display;
use crate::factory::CliFactory;
use crate::graph_util::graph_exit_integrity_errors;
use crate::npm::CliManagedNpmResolver;
use crate::util::display::DisplayTreeNode;
const JSON_SCHEMA_VERSION: u8 = 1;

View file

@ -11,13 +11,13 @@ use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::futures::FutureExt;
use deno_core::serde_json::json;
use deno_npm_installer::PackagesAllowedScripts;
use deno_runtime::WorkerExecutionMode;
use log::info;
use crate::args::DenoSubcommand;
use crate::args::Flags;
use crate::args::InitFlags;
use crate::args::PackagesAllowedScripts;
use crate::args::PermissionFlags;
use crate::args::RunFlags;
use crate::colors;

View file

@ -8,6 +8,7 @@ use std::sync::Arc;
use deno_core::error::AnyError;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_npm_installer::PackageCaching;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use deno_semver::Version;
@ -16,7 +17,6 @@ use crate::factory::CliFactory;
use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::graph_util::CreateGraphOptions;
use crate::npm::installer::PackageCaching;
pub async fn cache_top_level_deps(
// todo(dsherret): don't pass the factory into this function. Instead use ctor deps

View file

@ -42,7 +42,7 @@ use crate::graph_container::ModuleGraphContainer;
use crate::graph_container::ModuleGraphUpdatePermit;
use crate::jsr::JsrFetchResolver;
use crate::module_loader::ModuleLoadPreparer;
use crate::npm::installer::NpmInstaller;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::npm::NpmFetchResolver;
use crate::util::sync::AtomicFlag;
@ -461,7 +461,7 @@ pub struct DepManager {
pub(crate) jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub(crate) npm_fetch_resolver: Arc<NpmFetchResolver>,
npm_resolver: CliNpmResolver,
npm_installer: Arc<NpmInstaller>,
npm_installer: Arc<CliNpmInstaller>,
permissions_container: PermissionsContainer,
main_module_graph_container: Arc<MainModuleGraphContainer>,
lockfile: Option<Arc<CliLockfile>>,
@ -471,7 +471,7 @@ pub struct DepManagerArgs {
pub module_load_preparer: Arc<ModuleLoadPreparer>,
pub jsr_fetch_resolver: Arc<JsrFetchResolver>,
pub npm_fetch_resolver: Arc<NpmFetchResolver>,
pub npm_installer: Arc<NpmInstaller>,
pub npm_installer: Arc<CliNpmInstaller>,
pub npm_resolver: CliNpmResolver,
pub permissions_container: PermissionsContainer,
pub main_module_graph_container: Arc<MainModuleGraphContainer>,

View file

@ -48,7 +48,7 @@ use crate::cdp;
use crate::cdp::RemoteObjectId;
use crate::colors;
use crate::lsp::ReplLanguageServer;
use crate::npm::installer::NpmInstaller;
use crate::npm::CliNpmInstaller;
use crate::resolver::CliResolver;
use crate::tools::test::report_tests;
use crate::tools::test::reporters::PrettyTestReporter;
@ -172,7 +172,7 @@ struct ReplJsxState {
pub struct ReplSession {
internal_object_id: Option<RemoteObjectId>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
resolver: Arc<CliResolver>,
pub worker: MainWorker,
session: LocalInspectorSession,
@ -192,7 +192,7 @@ impl ReplSession {
#[allow(clippy::too_many_arguments)]
pub async fn initialize(
cli_options: &CliOptions,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
resolver: Arc<CliResolver>,
tsconfig_resolver: &TsConfigResolver,
mut worker: MainWorker,

View file

@ -11,6 +11,7 @@ use deno_core::futures::FutureExt;
use deno_core::resolve_url_or_path;
use deno_lib::standalone::binary::SerializedWorkspaceResolverImportMap;
use deno_lib::worker::LibWorkerFactoryRoots;
use deno_npm_installer::PackageCaching;
use deno_runtime::WorkerExecutionMode;
use eszip::EszipV2;
use jsonc_parser::ParseOptions;
@ -20,7 +21,6 @@ use crate::args::Flags;
use crate::args::RunFlags;
use crate::args::WatchFlagsWithPaths;
use crate::factory::CliFactory;
use crate::npm::installer::PackageCaching;
use crate::util;
use crate::util::file_watcher::WatcherRestartMode;

View file

@ -24,6 +24,7 @@ use deno_core::futures::stream::futures_unordered;
use deno_core::futures::FutureExt;
use deno_core::futures::StreamExt;
use deno_core::url::Url;
use deno_npm_installer::PackageCaching;
use deno_path_util::normalize_path;
use deno_task_shell::KillSignal;
use deno_task_shell::ShellCommand;
@ -38,8 +39,7 @@ use crate::args::TaskFlags;
use crate::colors;
use crate::factory::CliFactory;
use crate::node::CliNodeResolver;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::task_runner;
use crate::task_runner::run_future_forwarding_signals;
@ -231,7 +231,7 @@ struct RunSingleOptions<'a> {
struct TaskRunner<'a> {
task_flags: &'a TaskFlags,
npm_installer: Option<&'a NpmInstaller>,
npm_installer: Option<&'a CliNpmInstaller>,
npm_resolver: &'a CliNpmResolver,
node_resolver: &'a CliNodeResolver,
env_vars: HashMap<OsString, OsString>,

View file

@ -4,7 +4,6 @@ use std::io::Write;
use deno_core::error::AnyError;
use deno_core::serde_json;
use deno_runtime::colors;
/// A function that converts a float to a string the represents a human
/// readable version of that number.
@ -87,78 +86,6 @@ where
Ok(())
}
pub struct DisplayTreeNode {
pub text: String,
pub children: Vec<DisplayTreeNode>,
}
impl DisplayTreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
pub fn print<TWrite: std::fmt::Write>(
&self,
writer: &mut TWrite,
) -> std::fmt::Result {
fn print_children<TWrite: std::fmt::Write>(
writer: &mut TWrite,
prefix: &str,
children: &[DisplayTreeNode],
) -> std::fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!(
"{prefix}{sibling_connector}─{child_connector}"
)),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", self.text)?;
print_children(writer, "", &self.children)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -4,8 +4,6 @@ use std::io::Error;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use deno_config::glob::FileCollector;
use deno_config::glob::FilePatterns;
@ -14,16 +12,9 @@ use deno_config::glob::PathOrPatternSet;
use deno_config::glob::WalkEntry;
use deno_core::anyhow::anyhow;
use deno_core::error::AnyError;
use deno_core::unsync::spawn_blocking;
use deno_core::ModuleSpecifier;
use sys_traits::FsCreateDirAll;
use sys_traits::FsDirEntry;
use sys_traits::FsSymlinkDir;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
use crate::util::progress_bar::ProgressBarStyle;
use crate::util::progress_bar::ProgressMessagePrompt;
/// Creates a std::fs::File handling if the parent does not exist.
pub fn create_file(file_path: &Path) -> std::io::Result<std::fs::File> {
@ -150,182 +141,6 @@ pub async fn remove_dir_all_if_exists(path: &Path) -> std::io::Result<()> {
}
}
/// Clones a directory to another directory. The exact method
/// is not guaranteed - it may be a hardlink, copy, or other platform-specific
/// operation.
///
/// Note: Does not handle symlinks.
pub fn clone_dir_recursive<
TSys: sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir
+ sys_traits::FsRemoveFile
+ sys_traits::ThreadSleep,
>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
if cfg!(target_vendor = "apple") {
if let Some(parent) = to.parent() {
sys.fs_create_dir_all(parent)?;
}
// Try to clone the whole directory
if let Err(err) = sys.fs_clone_file(from, to) {
if !matches!(
err.kind(),
std::io::ErrorKind::AlreadyExists | std::io::ErrorKind::Unsupported
) {
log::debug!(
"Failed to clone dir {:?} to {:?} via clonefile: {}",
from,
to,
err
);
}
// clonefile won't overwrite existing files, so if the dir exists
// we need to handle it recursively.
copy_dir_recursive(sys, from, to)?;
}
} else if let Err(e) = deno_npm_cache::hard_link_dir_recursive(sys, from, to)
{
log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e);
copy_dir_recursive(sys, from, to)?;
}
Ok(())
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CopyDirRecursiveError {
#[class(inherit)]
#[error("Creating {path}")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Reading {path}")]
Reading {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Dir {from} to {to}")]
Dir {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error("Copying {from} to {to}")]
Copying {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error(transparent)]
Other(#[from] Error),
}
/// Copies a directory to another directory.
///
/// Note: Does not handle symlinks.
pub fn copy_dir_recursive<
TSys: sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir,
>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
sys.fs_create_dir_all(to).map_err(|source| {
CopyDirRecursiveError::Creating {
path: to.to_path_buf(),
source,
}
})?;
let read_dir =
sys
.fs_read_dir(from)
.map_err(|source| CopyDirRecursiveError::Reading {
path: from.to_path_buf(),
source,
})?;
for entry in read_dir {
let entry = entry?;
let file_type = entry.file_type()?;
let new_from = from.join(entry.file_name());
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
copy_dir_recursive(sys, &new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Dir {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source: Box::new(source),
}
})?;
} else if file_type.is_file() {
sys.fs_copy(&new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Copying {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source,
}
})?;
}
}
Ok(())
}
pub fn symlink_dir<TSys: sys_traits::BaseFsSymlinkDir>(
sys: &TSys,
oldpath: &Path,
newpath: &Path,
) -> Result<(), Error> {
let err_mapper = |err: Error, kind: Option<ErrorKind>| {
Error::new(
kind.unwrap_or_else(|| err.kind()),
format!(
"{}, symlink '{}' -> '{}'",
err,
oldpath.display(),
newpath.display()
),
)
};
sys.fs_symlink_dir(oldpath, newpath).map_err(|err| {
#[cfg(windows)]
if let Some(code) = err.raw_os_error() {
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION
{
return err_mapper(err, Some(ErrorKind::PermissionDenied));
}
}
err_mapper(err, None)
})
}
/// Gets the total size (in bytes) of a directory.
pub fn dir_size(path: &Path) -> std::io::Result<u64> {
let entries = std::fs::read_dir(path)?;
@ -340,161 +155,6 @@ pub fn dir_size(path: &Path) -> std::io::Result<u64> {
Ok(total)
}
struct LaxSingleProcessFsFlagInner {
file_path: PathBuf,
fs_file: std::fs::File,
finished_token: Arc<tokio_util::sync::CancellationToken>,
}
impl Drop for LaxSingleProcessFsFlagInner {
fn drop(&mut self) {
// kill the poll thread
self.finished_token.cancel();
// release the file lock
if let Err(err) = fs3::FileExt::unlock(&self.fs_file) {
log::debug!(
"Failed releasing lock for {}. {:#}",
self.file_path.display(),
err
);
}
}
}
/// A file system based flag that will attempt to synchronize multiple
/// processes so they go one after the other. In scenarios where
/// synchronization cannot be achieved, it will allow the current process
/// to proceed.
///
/// This should only be used in places where it's ideal for multiple
/// processes to not update something on the file system at the same time,
/// but it's not that big of a deal.
pub struct LaxSingleProcessFsFlag(
#[allow(dead_code)] Option<LaxSingleProcessFsFlagInner>,
);
impl LaxSingleProcessFsFlag {
pub async fn lock(file_path: PathBuf, long_wait_message: &str) -> Self {
log::debug!("Acquiring file lock at {}", file_path.display());
use fs3::FileExt;
let last_updated_path = file_path.with_extension("lock.poll");
let start_instant = std::time::Instant::now();
let open_result = std::fs::OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(false)
.open(&file_path);
match open_result {
Ok(fs_file) => {
let mut pb_update_guard = None;
let mut error_count = 0;
while error_count < 10 {
let lock_result = fs_file.try_lock_exclusive();
let poll_file_update_ms = 100;
match lock_result {
Ok(_) => {
log::debug!("Acquired file lock at {}", file_path.display());
let _ignore = std::fs::write(&last_updated_path, "");
let token = Arc::new(tokio_util::sync::CancellationToken::new());
// Spawn a blocking task that will continually update a file
// signalling the lock is alive. This is a fail safe for when
// a file lock is never released. For example, on some operating
// systems, if a process does not release the lock (say it's
// killed), then the OS may release it at an indeterminate time
//
// This uses a blocking task because we use a single threaded
// runtime and this is time sensitive so we don't want it to update
// at the whims of whatever is occurring on the runtime thread.
spawn_blocking({
let token = token.clone();
let last_updated_path = last_updated_path.clone();
move || {
let mut i = 0;
while !token.is_cancelled() {
i += 1;
let _ignore =
std::fs::write(&last_updated_path, i.to_string());
std::thread::sleep(Duration::from_millis(
poll_file_update_ms,
));
}
}
});
return Self(Some(LaxSingleProcessFsFlagInner {
file_path,
fs_file,
finished_token: token,
}));
}
Err(_) => {
// show a message if it's been a while
if pb_update_guard.is_none()
&& start_instant.elapsed().as_millis() > 1_000
{
let pb = ProgressBar::new(ProgressBarStyle::TextOnly);
let guard = pb.update_with_prompt(
ProgressMessagePrompt::Blocking,
long_wait_message,
);
pb_update_guard = Some((guard, pb));
}
// sleep for a little bit
tokio::time::sleep(Duration::from_millis(20)).await;
// Poll the last updated path to check if it's stopped updating,
// which is an indication that the file lock is claimed, but
// was never properly released.
match std::fs::metadata(&last_updated_path)
.and_then(|p| p.modified())
{
Ok(last_updated_time) => {
let current_time = std::time::SystemTime::now();
match current_time.duration_since(last_updated_time) {
Ok(duration) => {
if duration.as_millis()
> (poll_file_update_ms * 2) as u128
{
// the other process hasn't updated this file in a long time
// so maybe it was killed and the operating system hasn't
// released the file lock yet
return Self(None);
} else {
error_count = 0; // reset
}
}
Err(_) => {
error_count += 1;
}
}
}
Err(_) => {
error_count += 1;
}
}
}
}
}
drop(pb_update_guard); // explicit for clarity
Self(None)
}
Err(err) => {
log::debug!(
"Failed to open file lock at {}. {:#}",
file_path.display(),
err
);
Self(None) // let the process through
}
}
}
}
pub fn specifier_from_file_path(
path: &Path,
) -> Result<ModuleSpecifier, AnyError> {
@ -504,13 +164,10 @@ pub fn specifier_from_file_path(
#[cfg(test)]
mod tests {
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use deno_path_util::normalize_path;
use pretty_assertions::assert_eq;
use test_util::PathRef;
use test_util::TempDir;
use tokio::sync::Notify;
use super::*;
@ -660,94 +317,4 @@ mod tests {
expected
);
}
#[tokio::test]
async fn lax_fs_lock() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let signal1 = Arc::new(Notify::new());
let signal2 = Arc::new(Notify::new());
let signal3 = Arc::new(Notify::new());
let signal4 = Arc::new(Notify::new());
tokio::spawn({
let lock_path = lock_path.clone();
let signal1 = signal1.clone();
let signal2 = signal2.clone();
let signal3 = signal3.clone();
let signal4 = signal4.clone();
let temp_dir = temp_dir.clone();
async move {
let flag =
LaxSingleProcessFsFlag::lock(lock_path.to_path_buf(), "waiting")
.await;
signal1.notify_one();
signal2.notified().await;
tokio::time::sleep(Duration::from_millis(10)).await; // give the other thread time to acquire the lock
temp_dir.write("file.txt", "update1");
signal3.notify_one();
signal4.notified().await;
drop(flag);
}
});
let signal5 = Arc::new(Notify::new());
tokio::spawn({
let temp_dir = temp_dir.clone();
let signal5 = signal5.clone();
async move {
signal1.notified().await;
signal2.notify_one();
let flag =
LaxSingleProcessFsFlag::lock(lock_path.to_path_buf(), "waiting")
.await;
temp_dir.write("file.txt", "update2");
signal5.notify_one();
drop(flag);
}
});
signal3.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update1");
signal4.notify_one();
signal5.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update2");
}
#[tokio::test]
async fn lax_fs_lock_ordered() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let output_path = temp_dir.path().join("output");
let expected_order = Arc::new(Mutex::new(Vec::new()));
let count = 10;
let mut tasks = Vec::with_capacity(count);
std::fs::write(&output_path, "").unwrap();
for i in 0..count {
let lock_path = lock_path.clone();
let output_path = output_path.clone();
let expected_order = expected_order.clone();
tasks.push(tokio::spawn(async move {
let flag =
LaxSingleProcessFsFlag::lock(lock_path.to_path_buf(), "waiting")
.await;
expected_order.lock().push(i.to_string());
// be extremely racy
let mut output = std::fs::read_to_string(&output_path).unwrap();
if !output.is_empty() {
output.push('\n');
}
output.push_str(&i.to_string());
std::fs::write(&output_path, output).unwrap();
drop(flag);
}));
}
futures::future::join_all(tasks).await;
let expected_output = expected_order.lock().join("\n");
assert_eq!(
std::fs::read_to_string(output_path).unwrap(),
expected_output
);
}
}

View file

@ -4,7 +4,6 @@
pub mod archive;
pub mod collections;
pub mod console;
pub mod diff;
pub mod display;
pub mod draw_thread;
pub mod extract;

View file

@ -2,7 +2,6 @@
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_ast::MediaType;
use deno_ast::ModuleSpecifier;
@ -128,11 +127,6 @@ pub fn relative_specifier(
Some(to_percent_decoded_str(&text))
}
#[cfg_attr(windows, allow(dead_code))]
pub fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
pathdiff::diff_paths(to, from)
}
/// Slightly different behaviour than the default matching
/// where an exact path needs to be matched to be opted-in
/// rather than just a partial directory match.

View file

@ -266,6 +266,23 @@ pub struct ProgressBar {
inner: ProgressBarInner,
}
impl deno_npm_installer::Reporter for ProgressBar {
type Guard = UpdateGuard;
type ClearGuard = ClearGuard;
fn on_blocking(&self, message: &str) -> Self::Guard {
self.update_with_prompt(ProgressMessagePrompt::Blocking, message)
}
fn on_initializing(&self, message: &str) -> Self::Guard {
self.update_with_prompt(ProgressMessagePrompt::Initialize, message)
}
fn clear_guard(&self) -> Self::ClearGuard {
self.clear_guard()
}
}
impl ProgressBar {
/// Checks if progress bars are supported
pub fn are_supported() -> bool {

View file

@ -1,9 +1,6 @@
// Copyright 2018-2025 the Deno authors. MIT license.
mod async_flag;
mod task_queue;
pub use async_flag::AsyncFlag;
pub use deno_core::unsync::sync::AtomicFlag;
pub use task_queue::TaskQueue;
pub use task_queue::TaskQueuePermit;

View file

@ -1,267 +0,0 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::collections::LinkedList;
use std::future::Future;
use std::sync::Arc;
use deno_core::futures::task::AtomicWaker;
use deno_core::parking_lot::Mutex;
use super::AtomicFlag;
#[derive(Debug, Default)]
struct TaskQueueTaskItem {
is_ready: AtomicFlag,
is_future_dropped: AtomicFlag,
waker: AtomicWaker,
}
#[derive(Debug, Default)]
struct TaskQueueTasks {
is_running: bool,
items: LinkedList<Arc<TaskQueueTaskItem>>,
}
/// A queue that executes tasks sequentially one after the other
/// ensuring order and that no task runs at the same time as another.
///
/// Note that this differs from tokio's semaphore in that the order
/// is acquired synchronously.
#[derive(Debug, Default)]
pub struct TaskQueue {
tasks: Mutex<TaskQueueTasks>,
}
impl TaskQueue {
/// Acquires a permit where the tasks are executed one at a time
/// and in the order that they were acquired.
pub fn acquire(&self) -> TaskQueuePermitAcquireFuture {
TaskQueuePermitAcquireFuture::new(self)
}
/// Alternate API that acquires a permit internally
/// for the duration of the future.
#[allow(unused)]
pub fn run<'a, R>(
&'a self,
future: impl Future<Output = R> + 'a,
) -> impl Future<Output = R> + 'a {
let acquire_future = self.acquire();
async move {
let permit = acquire_future.await;
let result = future.await;
drop(permit); // explicit for clarity
result
}
}
fn raise_next(&self) {
let front_item = {
let mut tasks = self.tasks.lock();
// clear out any wakers for futures that were dropped
while let Some(front_waker) = tasks.items.front() {
if front_waker.is_future_dropped.is_raised() {
tasks.items.pop_front();
} else {
break;
}
}
let front_item = tasks.items.pop_front();
tasks.is_running = front_item.is_some();
front_item
};
// wake up the next waker
if let Some(front_item) = front_item {
front_item.is_ready.raise();
front_item.waker.wake();
}
}
}
/// A permit that when dropped will allow another task to proceed.
pub struct TaskQueuePermit<'a>(&'a TaskQueue);
impl Drop for TaskQueuePermit<'_> {
fn drop(&mut self) {
self.0.raise_next();
}
}
pub struct TaskQueuePermitAcquireFuture<'a> {
task_queue: Option<&'a TaskQueue>,
item: Arc<TaskQueueTaskItem>,
}
impl<'a> TaskQueuePermitAcquireFuture<'a> {
pub fn new(task_queue: &'a TaskQueue) -> Self {
// acquire the waker position synchronously
let mut tasks = task_queue.tasks.lock();
let item = if !tasks.is_running {
tasks.is_running = true;
let item = Arc::new(TaskQueueTaskItem::default());
item.is_ready.raise();
item
} else {
let item = Arc::new(TaskQueueTaskItem::default());
tasks.items.push_back(item.clone());
item
};
drop(tasks);
Self {
task_queue: Some(task_queue),
item,
}
}
}
impl Drop for TaskQueuePermitAcquireFuture<'_> {
fn drop(&mut self) {
if let Some(task_queue) = self.task_queue.take() {
if self.item.is_ready.is_raised() {
task_queue.raise_next();
} else {
self.item.is_future_dropped.raise();
}
}
}
}
impl<'a> Future for TaskQueuePermitAcquireFuture<'a> {
type Output = TaskQueuePermit<'a>;
fn poll(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Self::Output> {
if self.item.is_ready.is_raised() {
std::task::Poll::Ready(TaskQueuePermit(self.task_queue.take().unwrap()))
} else {
self.item.waker.register(cx.waker());
std::task::Poll::Pending
}
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;
use deno_core::futures;
use deno_core::parking_lot::Mutex;
use super::*;
#[tokio::test]
async fn task_queue_runs_one_after_other() {
let task_queue = TaskQueue::default();
let mut tasks = Vec::new();
let data = Arc::new(Mutex::new(0));
for i in 0..100 {
let data = data.clone();
tasks.push(task_queue.run(async move {
deno_core::unsync::spawn_blocking(move || {
let mut data = data.lock();
assert_eq!(*data, i);
*data = i + 1;
})
.await
.unwrap();
}));
}
futures::future::join_all(tasks).await;
}
#[tokio::test]
async fn task_queue_run_in_sequence() {
let task_queue = TaskQueue::default();
let data = Arc::new(Mutex::new(0));
let first = task_queue.run(async {
*data.lock() = 1;
});
let second = task_queue.run(async {
assert_eq!(*data.lock(), 1);
*data.lock() = 2;
});
let _ = tokio::join!(first, second);
assert_eq!(*data.lock(), 2);
}
#[tokio::test]
async fn task_queue_future_dropped_before_poll() {
let task_queue = Arc::new(TaskQueue::default());
// acquire a future, but do not await it
let future = task_queue.acquire();
// this task tries to acquire another permit, but will be blocked by the first permit.
let enter_flag = Arc::new(AtomicFlag::default());
let delayed_task = deno_core::unsync::spawn({
let enter_flag = enter_flag.clone();
let task_queue = task_queue.clone();
async move {
enter_flag.raise();
task_queue.acquire().await;
true
}
});
// ensure the task gets a chance to be scheduled and blocked
tokio::task::yield_now().await;
assert!(enter_flag.is_raised());
// now, drop the first future
drop(future);
assert!(delayed_task.await.unwrap());
}
#[tokio::test]
async fn task_queue_many_future_dropped_before_poll() {
let task_queue = Arc::new(TaskQueue::default());
// acquire a future, but do not await it
let mut futures = Vec::new();
for _ in 0..=10_000 {
futures.push(task_queue.acquire());
}
// this task tries to acquire another permit, but will be blocked by the first permit.
let enter_flag = Arc::new(AtomicFlag::default());
let delayed_task = deno_core::unsync::spawn({
let task_queue = task_queue.clone();
let enter_flag = enter_flag.clone();
async move {
enter_flag.raise();
task_queue.acquire().await;
true
}
});
// ensure the task gets a chance to be scheduled and blocked
tokio::task::yield_now().await;
assert!(enter_flag.is_raised());
// now, drop the futures
drop(futures);
assert!(delayed_task.await.unwrap());
}
#[tokio::test]
async fn task_queue_middle_future_dropped_while_permit_acquired() {
let task_queue = TaskQueue::default();
let fut1 = task_queue.acquire();
let fut2 = task_queue.acquire();
let fut3 = task_queue.acquire();
// should not hang
drop(fut2);
drop(fut1.await);
drop(fut3.await);
}
}

View file

@ -12,6 +12,7 @@ use deno_error::JsErrorBox;
use deno_lib::worker::LibMainWorker;
use deno_lib::worker::LibMainWorkerFactory;
use deno_lib::worker::ResolveNpmBinaryEntrypointError;
use deno_npm_installer::PackageCaching;
use deno_runtime::deno_permissions::PermissionsContainer;
use deno_runtime::worker::MainWorker;
use deno_runtime::WorkerExecutionMode;
@ -21,8 +22,7 @@ use tokio::select;
use crate::args::CliLockfile;
use crate::args::NpmCachingStrategy;
use crate::npm::installer::NpmInstaller;
use crate::npm::installer::PackageCaching;
use crate::npm::CliNpmInstaller;
use crate::npm::CliNpmResolver;
use crate::sys::CliSys;
use crate::util::file_watcher::WatcherCommunicator;
@ -307,15 +307,13 @@ pub enum CreateCustomWorkerError {
NpmPackageReq(JsErrorBox),
#[class(inherit)]
#[error(transparent)]
AtomicWriteFileWithRetries(
#[from] crate::args::AtomicWriteFileWithRetriesError,
),
LockfileWrite(#[from] deno_resolver::lockfile::LockfileWriteError),
}
pub struct CliMainWorkerFactory {
lib_main_worker_factory: LibMainWorkerFactory<CliSys>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver,
root_permissions: PermissionsContainer,
shared: Arc<SharedState>,
@ -330,7 +328,7 @@ impl CliMainWorkerFactory {
lib_main_worker_factory: LibMainWorkerFactory<CliSys>,
maybe_file_watcher_communicator: Option<Arc<WatcherCommunicator>>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_installer: Option<Arc<NpmInstaller>>,
npm_installer: Option<Arc<CliNpmInstaller>>,
npm_resolver: CliNpmResolver,
sys: CliSys,
options: CliMainWorkerOptions,

View file

@ -875,6 +875,7 @@ deno_core::extension!(deno_node,
},
);
#[sys_traits::auto_impl]
pub trait ExtNodeSys:
sys_traits::BaseFsCanonicalize
+ sys_traits::BaseFsMetadata
@ -884,16 +885,6 @@ pub trait ExtNodeSys:
{
}
impl<
T: sys_traits::BaseFsCanonicalize
+ sys_traits::BaseFsMetadata
+ sys_traits::BaseFsRead
+ sys_traits::EnvCurrentDir
+ Clone,
> ExtNodeSys for T
{
}
pub type NodeResolver<TInNpmPackageChecker, TNpmPackageFolderResolver, TSys> =
node_resolver::NodeResolver<
TInNpmPackageChecker,

View file

@ -28,6 +28,7 @@ deno_cache_dir.workspace = true
deno_config.workspace = true
deno_error.workspace = true
deno_graph = { workspace = true, optional = true }
deno_lockfile.workspace = true
deno_media_type.workspace = true
deno_npm.workspace = true
deno_package_json.workspace = true
@ -35,6 +36,7 @@ deno_path_util.workspace = true
deno_semver.workspace = true
deno_terminal.workspace = true
deno_unsync.workspace = true
dissimilar.workspace = true
futures.workspace = true
import_map.workspace = true
indexmap.workspace = true

View file

@ -1,12 +1,14 @@
// Copyright 2018-2025 the Deno authors. MIT license.
//! It would be best to move these utilities out of this
//! crate as this is not specific to resolution, but for
//! the time being it's fine for this to live here.
use std::fmt::Write as _;
use deno_terminal::colors;
use dissimilar::diff as difference;
use dissimilar::Chunk;
use crate::colors;
/// Print diff of the same file_path, before and after formatting.
///
/// Diff format is loosely based on GitHub diff formatting.
@ -171,6 +173,78 @@ fn fmt_rem_text_highlight(x: &str) -> String {
colors::white_on_red(x).to_string()
}
pub struct DisplayTreeNode {
pub text: String,
pub children: Vec<DisplayTreeNode>,
}
impl DisplayTreeNode {
pub fn from_text(text: String) -> Self {
Self {
text,
children: Default::default(),
}
}
pub fn print<TWrite: std::fmt::Write>(
&self,
writer: &mut TWrite,
) -> std::fmt::Result {
fn print_children<TWrite: std::fmt::Write>(
writer: &mut TWrite,
prefix: &str,
children: &[DisplayTreeNode],
) -> std::fmt::Result {
const SIBLING_CONNECTOR: char = '├';
const LAST_SIBLING_CONNECTOR: char = '└';
const CHILD_DEPS_CONNECTOR: char = '┬';
const CHILD_NO_DEPS_CONNECTOR: char = '─';
const VERTICAL_CONNECTOR: char = '│';
const EMPTY_CONNECTOR: char = ' ';
let child_len = children.len();
for (index, child) in children.iter().enumerate() {
let is_last = index + 1 == child_len;
let sibling_connector = if is_last {
LAST_SIBLING_CONNECTOR
} else {
SIBLING_CONNECTOR
};
let child_connector = if child.children.is_empty() {
CHILD_NO_DEPS_CONNECTOR
} else {
CHILD_DEPS_CONNECTOR
};
writeln!(
writer,
"{} {}",
colors::gray(format!(
"{prefix}{sibling_connector}─{child_connector}"
)),
child.text
)?;
let child_prefix = format!(
"{}{}{}",
prefix,
if is_last {
EMPTY_CONNECTOR
} else {
VERTICAL_CONNECTOR
},
EMPTY_CONNECTOR
);
print_children(writer, &child_prefix, &child.children)?;
}
Ok(())
}
writeln!(writer, "{}", self.text)?;
print_children(writer, "", &self.children)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -227,6 +227,7 @@ pub struct WorkspaceFactoryOptions<
pub type WorkspaceFactoryRc<TSys> =
crate::sync::MaybeArc<WorkspaceFactory<TSys>>;
#[sys_traits::auto_impl]
pub trait WorkspaceFactorySys:
EnvCacheDir
+ EnvHomeDir
@ -251,33 +252,7 @@ pub trait WorkspaceFactorySys:
{
}
impl<
T: EnvCacheDir
+ EnvHomeDir
+ EnvVar
+ EnvCurrentDir
+ FsCanonicalize
+ FsCreateDirAll
+ FsMetadata
+ FsOpen
+ FsRead
+ FsReadDir
+ FsRemoveFile
+ FsRename
+ SystemRandom
+ SystemTimeNow
+ ThreadSleep
+ std::fmt::Debug
+ MaybeSend
+ MaybeSync
+ Clone
+ 'static,
> WorkspaceFactorySys for T
{
}
pub struct WorkspaceFactory<TSys: WorkspaceFactorySys + sys_traits::ThreadSleep>
{
pub struct WorkspaceFactory<TSys: WorkspaceFactorySys> {
sys: TSys,
deno_dir_path: DenoDirPathProviderRc<TSys>,
global_http_cache: Deferred<GlobalHttpCacheRc<TSys>>,

View file

@ -43,9 +43,11 @@ use crate::workspace::WorkspaceResolvePkgJsonFolderError;
use crate::workspace::WorkspaceResolver;
pub mod cjs;
pub mod display;
pub mod factory;
#[cfg(feature = "graph")]
pub mod graph;
pub mod lockfile;
pub mod npm;
pub mod npmrc;
mod sync;
@ -132,16 +134,12 @@ pub struct NodeAndNpmReqResolver<
>,
}
#[sys_traits::auto_impl]
pub trait DenoResolverSys:
FsCanonicalize + FsMetadata + FsRead + FsReadDir + std::fmt::Debug
{
}
impl<T> DenoResolverSys for T where
T: FsCanonicalize + FsMetadata + FsRead + FsReadDir + std::fmt::Debug
{
}
pub struct DenoResolverOptions<
'a,
TInNpmPackageChecker: InNpmPackageChecker,

View file

@ -3,45 +3,37 @@
use std::collections::HashSet;
use std::path::PathBuf;
use deno_config::deno_json::ConfigFile;
use anyhow::Context;
use anyhow::Error as AnyError;
use deno_config::workspace::Workspace;
use deno_core::anyhow::Context;
use deno_core::error::AnyError;
use deno_core::parking_lot::Mutex;
use deno_core::parking_lot::MutexGuard;
use deno_core::serde_json;
use deno_error::JsErrorBox;
use deno_lockfile::Lockfile;
use deno_lockfile::NpmPackageInfoProvider;
use deno_lockfile::WorkspaceMemberConfig;
use deno_package_json::PackageJsonDepValue;
use deno_path_util::fs::atomic_write_file_with_retries;
use deno_runtime::deno_node::PackageJson;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::npm::NpmPackageReqReference;
use indexmap::IndexMap;
use crate::args::deno_json::import_map_deps;
use crate::args::DenoSubcommand;
use crate::args::InstallFlags;
use crate::cache;
use crate::sys::CliSys;
use crate::Flags;
use node_resolver::PackageJson;
use parking_lot::Mutex;
use parking_lot::MutexGuard;
#[derive(Debug)]
pub struct CliLockfileReadFromPathOptions {
pub struct LockfileReadFromPathOptions {
pub file_path: PathBuf,
pub frozen: bool,
/// Causes the lockfile to only be read from, but not written to.
pub skip_write: bool,
}
#[derive(Debug)]
pub struct CliLockfile {
sys: CliSys,
lockfile: Mutex<Lockfile>,
pub filename: PathBuf,
frozen: bool,
skip_write: bool,
#[sys_traits::auto_impl]
pub trait LockfileSys:
deno_path_util::fs::AtomicWriteFileWithRetriesSys
+ sys_traits::FsRead
+ std::fmt::Debug
{
}
pub struct Guard<'a, T> {
@ -62,8 +54,18 @@ impl<T> std::ops::DerefMut for Guard<'_, T> {
}
}
#[derive(Debug)]
pub struct LockfileFlags {
pub no_lock: bool,
pub frozen_lockfile: Option<bool>,
pub lock: Option<PathBuf>,
pub skip_write: bool,
pub no_config: bool,
pub no_npm: bool,
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum AtomicWriteFileWithRetriesError {
pub enum LockfileWriteError {
#[class(inherit)]
#[error(transparent)]
Changed(JsErrorBox),
@ -72,7 +74,16 @@ pub enum AtomicWriteFileWithRetriesError {
Io(#[source] std::io::Error),
}
impl CliLockfile {
#[derive(Debug)]
pub struct LockfileLock<TSys: LockfileSys> {
sys: TSys,
lockfile: Mutex<Lockfile>,
pub filename: PathBuf,
frozen: bool,
skip_write: bool,
}
impl<TSys: LockfileSys> LockfileLock<TSys> {
/// Get the inner deno_lockfile::Lockfile.
pub fn lock(&self) -> Guard<Lockfile> {
Guard {
@ -91,40 +102,39 @@ impl CliLockfile {
self.lockfile.lock().overwrite
}
pub fn write_if_changed(
&self,
) -> Result<(), AtomicWriteFileWithRetriesError> {
pub fn write_if_changed(&self) -> Result<(), LockfileWriteError> {
if self.skip_write {
return Ok(());
}
self
.error_if_changed()
.map_err(AtomicWriteFileWithRetriesError::Changed)?;
.map_err(LockfileWriteError::Changed)?;
let mut lockfile = self.lockfile.lock();
let Some(bytes) = lockfile.resolve_write_bytes() else {
return Ok(()); // nothing to do
};
// do an atomic write to reduce the chance of multiple deno
// processes corrupting the file
const CACHE_PERM: u32 = 0o644;
atomic_write_file_with_retries(
&self.sys,
&lockfile.filename,
&bytes,
cache::CACHE_PERM,
CACHE_PERM,
)
.map_err(AtomicWriteFileWithRetriesError::Io)?;
.map_err(LockfileWriteError::Io)?;
lockfile.has_content_changed = false;
Ok(())
}
pub async fn discover(
sys: &CliSys,
flags: &Flags,
sys: TSys,
flags: LockfileFlags,
workspace: &Workspace,
maybe_external_import_map: Option<&serde_json::Value>,
api: &(dyn NpmPackageInfoProvider + Send + Sync),
) -> Result<Option<CliLockfile>, AnyError> {
) -> Result<Option<Self>, AnyError> {
fn pkg_json_deps(
maybe_pkg_json: Option<&PackageJson>,
) -> HashSet<JsrDepPackageReq> {
@ -150,24 +160,12 @@ impl CliLockfile {
})
.collect()
}
fn deno_json_deps(
maybe_deno_json: Option<&ConfigFile>,
) -> HashSet<JsrDepPackageReq> {
maybe_deno_json
.map(crate::args::deno_json::deno_json_deps)
.unwrap_or_default()
}
if flags.no_lock
|| matches!(
flags.subcommand,
DenoSubcommand::Install(InstallFlags::Global(..))
| DenoSubcommand::Uninstall(_)
)
{
if flags.no_lock {
return Ok(None);
}
let file_path = match flags.lock {
Some(ref lock) => PathBuf::from(lock),
Some(path) => path,
None => match workspace.resolve_lockfile_path()? {
Some(path) => path,
None => return Ok(None),
@ -183,10 +181,10 @@ impl CliLockfile {
});
let lockfile = Self::read_from_path(
sys,
CliLockfileReadFromPathOptions {
LockfileReadFromPathOptions {
file_path,
frozen,
skip_write: flags.internal.lockfile_skip_write,
skip_write: flags.skip_write,
},
api,
)
@ -198,7 +196,11 @@ impl CliLockfile {
dependencies: if let Some(map) = maybe_external_import_map {
import_map_deps(map)
} else {
deno_json_deps(root_folder.deno_json.as_deref())
root_folder
.deno_json
.as_deref()
.map(deno_json_deps)
.unwrap_or_default()
},
},
members: workspace
@ -220,7 +222,11 @@ impl CliLockfile {
{
let config = WorkspaceMemberConfig {
package_json_deps: pkg_json_deps(folder.pkg_json.as_deref()),
dependencies: deno_json_deps(folder.deno_json.as_deref()),
dependencies: folder
.deno_json
.as_deref()
.map(deno_json_deps)
.unwrap_or_default(),
};
if config.package_json_deps.is_empty()
&& config.dependencies.is_empty()
@ -284,18 +290,18 @@ impl CliLockfile {
};
lockfile.set_workspace_config(deno_lockfile::SetWorkspaceConfigOptions {
no_npm: flags.no_npm,
no_config: flags.config_flag == super::ConfigFlag::Disabled,
no_config: flags.no_config,
config,
});
Ok(Some(lockfile))
}
pub async fn read_from_path(
sys: &CliSys,
opts: CliLockfileReadFromPathOptions,
sys: TSys,
opts: LockfileReadFromPathOptions,
api: &(dyn deno_lockfile::NpmPackageInfoProvider + Send + Sync),
) -> Result<CliLockfile, AnyError> {
let lockfile = match std::fs::read_to_string(&opts.file_path) {
) -> Result<LockfileLock<TSys>, AnyError> {
let lockfile = match sys.fs_read_to_string(&opts.file_path) {
Ok(text) => {
Lockfile::new(
deno_lockfile::NewLockfileOptions {
@ -316,8 +322,8 @@ impl CliLockfile {
});
}
};
Ok(CliLockfile {
sys: sys.clone(),
Ok(LockfileLock {
sys,
filename: lockfile.filename.clone(),
lockfile: Mutex::new(lockfile),
frozen: opts.frozen,
@ -331,10 +337,12 @@ impl CliLockfile {
}
let lockfile = self.lockfile.lock();
if lockfile.has_content_changed {
let contents =
std::fs::read_to_string(&lockfile.filename).unwrap_or_default();
let contents = self
.sys
.fs_read_to_string(&lockfile.filename)
.unwrap_or_default();
let new_contents = lockfile.as_json_string();
let diff = crate::util::diff::diff(&contents, &new_contents);
let diff = crate::display::diff(&contents, &new_contents);
// has an extra newline at the end
let diff = diff.trim_end();
Err(JsErrorBox::generic(format!("The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}")))
@ -343,3 +351,97 @@ impl CliLockfile {
}
}
}
fn import_map_deps(
import_map: &serde_json::Value,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(import_map.get("imports"))
.into_iter()
.chain(scope_values(import_map.get("scopes")));
values_to_set(values)
}
fn deno_json_deps(
config: &deno_config::deno_json::ConfigFile,
) -> HashSet<JsrDepPackageReq> {
let values = imports_values(config.json.imports.as_ref())
.into_iter()
.chain(scope_values(config.json.scopes.as_ref()));
let mut set = values_to_set(values);
if let Some(serde_json::Value::Object(compiler_options)) =
&config.json.compiler_options
{
// add jsxImportSource
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSource")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add jsxImportSourceTypes
if let Some(serde_json::Value::String(value)) =
compiler_options.get("jsxImportSourceTypes")
{
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
// add the dependencies in the types array
if let Some(serde_json::Value::Array(types)) = compiler_options.get("types")
{
for value in types {
if let serde_json::Value::String(value) = value {
if let Some(dep_req) = value_to_dep_req(value) {
set.insert(dep_req);
}
}
}
}
}
set
}
fn imports_values(value: Option<&serde_json::Value>) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
let mut items = Vec::with_capacity(obj.len());
for value in obj.values() {
if let serde_json::Value::String(value) = value {
items.push(value);
}
}
items
}
fn scope_values(value: Option<&serde_json::Value>) -> Vec<&String> {
let Some(obj) = value.and_then(|v| v.as_object()) else {
return Vec::new();
};
obj.values().flat_map(|v| imports_values(Some(v))).collect()
}
fn values_to_set<'a>(
values: impl Iterator<Item = &'a String>,
) -> HashSet<JsrDepPackageReq> {
let mut entries = HashSet::new();
for value in values {
if let Some(dep_req) = value_to_dep_req(value) {
entries.insert(dep_req);
}
}
entries
}
fn value_to_dep_req(value: &str) -> Option<JsrDepPackageReq> {
if let Ok(req_ref) = JsrPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::jsr(req_ref.into_inner().req))
} else if let Ok(req_ref) = NpmPackageReqReference::from_str(value) {
Some(JsrDepPackageReq::npm(req_ref.into_inner().req))
} else {
None
}
}

View file

@ -4,6 +4,7 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::fmt;
use std::path::Path;
use std::path::PathBuf;
@ -17,6 +18,7 @@ use deno_config::workspace::Workspace;
use deno_config::workspace::WorkspaceDirectory;
use deno_error::JsError;
use deno_media_type::MediaType;
use deno_npm::registry::NpmPackageVersionInfo;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
@ -26,10 +28,14 @@ use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_path_util::url_to_file_path;
use deno_semver::jsr::JsrPackageReqReference;
use deno_semver::package::PackageName;
use deno_semver::package::PackageReq;
use deno_semver::RangeSetOrTag;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use deno_terminal::colors;
use import_map::specifier::SpecifierError;
use import_map::ImportMap;
use import_map::ImportMapDiagnostic;
@ -1672,6 +1678,126 @@ impl ScopedJsxImportSourceConfig {
}
}
#[derive(Debug, Default)]
pub struct WorkspaceNpmPatchPackages(
pub HashMap<PackageName, Vec<NpmPackageVersionInfo>>,
);
impl WorkspaceNpmPatchPackages {
pub fn from_workspace(workspace: &Workspace) -> Self {
let mut entries: HashMap<PackageName, Vec<NpmPackageVersionInfo>> =
HashMap::new();
if workspace.has_unstable("npm-patch") {
for pkg_json in workspace.patch_pkg_jsons() {
let Some(name) = pkg_json.name.as_ref() else {
log::warn!(
"{} Patch package ignored because package.json was missing name field.\n at {}",
colors::yellow("Warning"),
pkg_json.path.display(),
);
continue;
};
match pkg_json_to_version_info(pkg_json) {
Ok(version_info) => {
let entry = entries.entry(PackageName::from_str(name)).or_default();
entry.push(version_info);
}
Err(err) => {
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
err,
pkg_json.path.display(),
);
}
}
}
} else if workspace.patch_pkg_jsons().next().is_some() {
log::warn!(
"{} {}\n at {}",
colors::yellow("Warning"),
"Patching npm packages is only supported when setting \"unstable\": [\"npm-patch\"] in the root deno.json",
workspace
.root_deno_json()
.map(|d| d.specifier.to_string())
.unwrap_or_else(|| workspace.root_dir().to_string()),
);
}
Self(entries)
}
}
#[derive(Debug, Error)]
enum PkgJsonToVersionInfoError {
#[error(
"Patch package ignored because package.json was missing version field."
)]
VersionMissing,
#[error("Patch package ignored because package.json version field could not be parsed.")]
VersionInvalid {
#[source]
source: deno_semver::npm::NpmVersionParseError,
},
}
fn pkg_json_to_version_info(
pkg_json: &deno_package_json::PackageJson,
) -> Result<NpmPackageVersionInfo, PkgJsonToVersionInfoError> {
fn parse_deps(
deps: Option<&IndexMap<String, String>>,
) -> HashMap<StackString, StackString> {
deps
.map(|d| {
d.into_iter()
.map(|(k, v)| (StackString::from_str(k), StackString::from_str(v)))
.collect()
})
.unwrap_or_default()
}
fn parse_array(v: &[String]) -> Vec<SmallStackString> {
v.iter().map(|s| SmallStackString::from_str(s)).collect()
}
let Some(version) = &pkg_json.version else {
return Err(PkgJsonToVersionInfoError::VersionMissing);
};
let version = Version::parse_from_npm(version)
.map_err(|source| PkgJsonToVersionInfoError::VersionInvalid { source })?;
Ok(NpmPackageVersionInfo {
version,
dist: None,
bin: pkg_json
.bin
.as_ref()
.and_then(|v| serde_json::from_value(v.clone()).ok()),
dependencies: parse_deps(pkg_json.dependencies.as_ref()),
optional_dependencies: parse_deps(pkg_json.optional_dependencies.as_ref()),
peer_dependencies: parse_deps(pkg_json.peer_dependencies.as_ref()),
peer_dependencies_meta: pkg_json
.peer_dependencies_meta
.clone()
.and_then(|m| serde_json::from_value(m).ok())
.unwrap_or_default(),
os: pkg_json.os.as_deref().map(parse_array).unwrap_or_default(),
cpu: pkg_json.cpu.as_deref().map(parse_array).unwrap_or_default(),
scripts: pkg_json
.scripts
.as_ref()
.map(|scripts| {
scripts
.iter()
.map(|(k, v)| (SmallStackString::from_str(k), v.clone()))
.collect()
})
.unwrap_or_default(),
// not worth increasing memory for showing a deprecated
// message for patched packages
deprecated: None,
})
}
#[cfg(test)]
mod test {
use std::path::Path;
@ -1680,6 +1806,7 @@ mod test {
use deno_config::workspace::WorkspaceDirectory;
use deno_config::workspace::WorkspaceDiscoverOptions;
use deno_config::workspace::WorkspaceDiscoverStart;
use deno_npm::registry::NpmPeerDependencyMeta;
use deno_path_util::url_from_directory_path;
use deno_path_util::url_from_file_path;
use deno_semver::VersionReq;
@ -2849,4 +2976,100 @@ mod test {
)
.unwrap()
}
#[test]
fn test_pkg_json_to_version_info() {
fn convert(
text: &str,
) -> Result<NpmPackageVersionInfo, PkgJsonToVersionInfoError> {
let pkg_json = deno_package_json::PackageJson::load_from_string(
PathBuf::from("package.json"),
text,
)
.unwrap();
pkg_json_to_version_info(&pkg_json)
}
assert_eq!(
convert(
r#"{
"name": "pkg",
"version": "1.0.0",
"bin": "./bin.js",
"dependencies": {
"my-dep": "1"
},
"optionalDependencies": {
"optional-dep": "~1"
},
"peerDependencies": {
"my-peer-dep": "^2"
},
"peerDependenciesMeta": {
"my-peer-dep": {
"optional": true
}
},
"os": ["win32"],
"cpu": ["x86_64"],
"scripts": {
"script": "testing",
"postInstall": "testing2"
},
"deprecated": "ignored for now"
}"#
)
.unwrap(),
NpmPackageVersionInfo {
version: Version::parse_from_npm("1.0.0").unwrap(),
dist: None,
bin: Some(deno_npm::registry::NpmPackageVersionBinEntry::String(
"./bin.js".to_string()
)),
dependencies: HashMap::from([(
StackString::from_static("my-dep"),
StackString::from_static("1")
)]),
optional_dependencies: HashMap::from([(
StackString::from_static("optional-dep"),
StackString::from_static("~1")
)]),
peer_dependencies: HashMap::from([(
StackString::from_static("my-peer-dep"),
StackString::from_static("^2")
)]),
peer_dependencies_meta: HashMap::from([(
StackString::from_static("my-peer-dep"),
NpmPeerDependencyMeta { optional: true }
)]),
os: vec![SmallStackString::from_static("win32")],
cpu: vec![SmallStackString::from_static("x86_64")],
scripts: HashMap::from([
(
SmallStackString::from_static("script"),
"testing".to_string(),
),
(
SmallStackString::from_static("postInstall"),
"testing2".to_string(),
)
]),
// we don't bother ever setting this because we don't store it in deno_package_json
deprecated: None,
}
);
match convert("{}").unwrap_err() {
PkgJsonToVersionInfoError::VersionMissing => {
// ok
}
_ => unreachable!(),
}
match convert(r#"{ "version": "1.0.~" }"#).unwrap_err() {
PkgJsonToVersionInfoError::VersionInvalid { source: err } => {
assert_eq!(err.to_string(), "Invalid npm version");
}
_ => unreachable!(),
}
}
}

View file

@ -83,7 +83,7 @@ pub struct NpmCacheHttpClientBytesResponse {
}
#[async_trait::async_trait(?Send)]
pub trait NpmCacheHttpClient: Send + Sync + 'static {
pub trait NpmCacheHttpClient: std::fmt::Debug + Send + Sync + 'static {
async fn download_with_retries_on_any_tokio_runtime(
&self,
url: Url,
@ -142,6 +142,7 @@ impl NpmCacheSetting {
}
}
#[sys_traits::auto_impl]
pub trait NpmCacheSys:
FsCanonicalize
+ FsCreateDirAll
@ -158,30 +159,11 @@ pub trait NpmCacheSys:
+ Send
+ Sync
+ Clone
+ std::fmt::Debug
+ 'static
{
}
impl<T> NpmCacheSys for T where
T: FsCanonicalize
+ FsCreateDirAll
+ FsHardLink
+ FsMetadata
+ FsOpen
+ FsRead
+ FsReadDir
+ FsRemoveDirAll
+ FsRemoveFile
+ FsRename
+ ThreadSleep
+ SystemRandom
+ Send
+ Sync
+ Clone
+ 'static
{
}
/// Stores a single copy of npm packages in a cache.
#[derive(Debug)]
pub struct NpmCache<TSys: NpmCacheSys> {

View file

@ -0,0 +1,54 @@
# Copyright 2018-2025 the Deno authors. MIT license.
[package]
name = "deno_npm_installer"
version = "0.0.1"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Installer of npm packages used in Deno"
[lib]
path = "lib.rs"
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
bincode.workspace = true
boxed_error.workspace = true
capacity_builder.workspace = true
deno_config.workspace = true
deno_error.workspace = true
deno_lockfile.workspace = true
deno_npm.workspace = true
deno_npm_cache.workspace = true
deno_package_json.workspace = true
deno_path_util.workspace = true
deno_resolver = { workspace = true, features = ["sync"] }
deno_semver.workspace = true
deno_terminal.workspace = true
deno_unsync.workspace = true
fs3.workspace = true
futures.workspace = true
log.workspace = true
parking_lot.workspace = true
pathdiff.workspace = true
rustc-hash.workspace = true
serde.workspace = true
serde_json.workspace = true
sys_traits.workspace = true
thiserror.workspace = true
tokio.workspace = true
tokio-util.workspace = true
twox-hash.workspace = true
url.workspace = true
[target.'cfg(windows)'.dependencies]
junction.workspace = true
winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", "shlobj", "tlhelp32", "winbase", "winerror", "winsock2"] }
[dev-dependencies]
sys_traits = { workspace = true, features = ["memory", "real", "serde_json"] }
test_util.workspace = true

View file

@ -0,0 +1,6 @@
# deno_npm_installer
[![crates](https://img.shields.io/crates/v/deno_npm_installer.svg)](https://crates.io/crates/deno_npm_installer)
[![docs](https://docs.rs/deno_npm_installer/badge.svg)](https://docs.rs/deno_npm_installer)
Installer for npm packages.

View file

@ -460,6 +460,10 @@ fn symlink_bin_entry<'a>(
let link = bin_node_modules_dir_path.join(bin_name);
let original = package_path.join(bin_script);
fn relative_path(from: &Path, to: &Path) -> Option<PathBuf> {
pathdiff::diff_paths(to, from)
}
let found = make_executable_if_exists(&original).map_err(|source| {
BinEntriesError::SetUpBin {
name: bin_name.to_string(),
@ -478,8 +482,7 @@ fn symlink_bin_entry<'a>(
}
let original_relative =
crate::util::path::relative_path(bin_node_modules_dir_path, &original)
.unwrap_or(original);
relative_path(bin_node_modules_dir_path, &original).unwrap_or(original);
if let Err(err) = symlink(&original_relative, &link) {
if err.kind() == io::ErrorKind::AlreadyExists {

View file

@ -3,31 +3,13 @@
use std::path::Path;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::parking_lot::RwLock;
use deno_error::JsErrorBox;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::NpmPackageExtraInfo;
use deno_npm::NpmResolutionPackage;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_semver::package::PackageNv;
pub use deno_task_executor::DenoTaskLifeCycleScriptsExecutor;
use super::PackageCaching;
use crate::npm::CliNpmCache;
use crate::npm::WorkspaceNpmPatchPackages;
pub mod bin_entries;
mod deno_task_executor;
pub mod lifecycle_scripts;
/// Part of the resolution that interacts with the file system.
#[async_trait(?Send)]
pub trait NpmPackageFsInstaller: std::fmt::Debug + Send + Sync {
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), JsErrorBox>;
}
use parking_lot::RwLock;
pub struct CachedNpmPackageExtraInfoProvider {
inner: Arc<NpmPackageExtraInfoProvider>,
@ -64,34 +46,6 @@ impl CachedNpmPackageExtraInfoProvider {
}
}
pub struct NpmPackageExtraInfoProvider {
npm_cache: Arc<CliNpmCache>,
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
}
impl std::fmt::Debug for NpmPackageExtraInfoProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("NpmPackageExtraInfoProvider")
.field("npm_cache", &self.npm_cache)
.finish()
}
}
impl NpmPackageExtraInfoProvider {
pub fn new(
npm_cache: Arc<CliNpmCache>,
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
npm_cache,
npm_registry_info_provider,
workspace_patch_packages,
}
}
}
#[derive(Debug, Clone, Copy, Default)]
pub struct ExpectedExtraInfo {
pub deprecated: bool,
@ -109,6 +63,29 @@ impl ExpectedExtraInfo {
}
}
pub struct NpmPackageExtraInfoProvider {
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
}
impl std::fmt::Debug for NpmPackageExtraInfoProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("NpmPackageExtraInfoProvider").finish()
}
}
impl NpmPackageExtraInfoProvider {
pub fn new(
npm_registry_info_provider: Arc<dyn NpmRegistryApi + Send + Sync>,
workspace_patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
npm_registry_info_provider,
workspace_patch_packages,
}
}
}
impl NpmPackageExtraInfoProvider {
pub async fn get_package_extra_info(
&self,
@ -171,12 +148,11 @@ impl NpmPackageExtraInfoProvider {
) -> Result<NpmPackageExtraInfo, JsErrorBox> {
let package_json_path = package_path.join("package.json");
let extra_info: NpmPackageExtraInfo =
deno_core::unsync::spawn_blocking(move || {
deno_unsync::spawn_blocking(move || {
let package_json = std::fs::read_to_string(&package_json_path)
.map_err(JsErrorBox::from_err)?;
let extra_info: NpmPackageExtraInfo =
deno_core::serde_json::from_str(&package_json)
.map_err(JsErrorBox::from_err)?;
serde_json::from_str(&package_json).map_err(JsErrorBox::from_err)?;
Ok::<_, JsErrorBox>(extra_info)
})

View file

@ -0,0 +1,274 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use crate::Reporter;
struct LaxSingleProcessFsFlagInner {
file_path: PathBuf,
fs_file: std::fs::File,
finished_token: Arc<tokio_util::sync::CancellationToken>,
}
impl Drop for LaxSingleProcessFsFlagInner {
fn drop(&mut self) {
// kill the poll thread
self.finished_token.cancel();
// release the file lock
if let Err(err) = fs3::FileExt::unlock(&self.fs_file) {
log::debug!(
"Failed releasing lock for {}. {:#}",
self.file_path.display(),
err
);
}
}
}
/// A file system based flag that will attempt to synchronize multiple
/// processes so they go one after the other. In scenarios where
/// synchronization cannot be achieved, it will allow the current process
/// to proceed.
///
/// This should only be used in places where it's ideal for multiple
/// processes to not update something on the file system at the same time,
/// but it's not that big of a deal.
pub struct LaxSingleProcessFsFlag(
#[allow(dead_code)] Option<LaxSingleProcessFsFlagInner>,
);
impl LaxSingleProcessFsFlag {
pub async fn lock(
file_path: PathBuf,
reporter: &impl Reporter,
long_wait_message: &str,
) -> Self {
log::debug!("Acquiring file lock at {}", file_path.display());
use fs3::FileExt;
let last_updated_path = file_path.with_extension("lock.poll");
let start_instant = std::time::Instant::now();
let open_result = std::fs::OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(false)
.open(&file_path);
match open_result {
Ok(fs_file) => {
let mut pb_update_guard = None;
let mut error_count = 0;
while error_count < 10 {
let lock_result = fs_file.try_lock_exclusive();
let poll_file_update_ms = 100;
match lock_result {
Ok(_) => {
log::debug!("Acquired file lock at {}", file_path.display());
let _ignore = std::fs::write(&last_updated_path, "");
let token = Arc::new(tokio_util::sync::CancellationToken::new());
// Spawn a blocking task that will continually update a file
// signalling the lock is alive. This is a fail safe for when
// a file lock is never released. For example, on some operating
// systems, if a process does not release the lock (say it's
// killed), then the OS may release it at an indeterminate time
//
// This uses a blocking task because we use a single threaded
// runtime and this is time sensitive so we don't want it to update
// at the whims of whatever is occurring on the runtime thread.
deno_unsync::spawn_blocking({
let token = token.clone();
let last_updated_path = last_updated_path.clone();
move || {
let mut i = 0;
while !token.is_cancelled() {
i += 1;
let _ignore =
std::fs::write(&last_updated_path, i.to_string());
std::thread::sleep(Duration::from_millis(
poll_file_update_ms,
));
}
}
});
return Self(Some(LaxSingleProcessFsFlagInner {
file_path,
fs_file,
finished_token: token,
}));
}
Err(_) => {
// show a message if it's been a while
if pb_update_guard.is_none()
&& start_instant.elapsed().as_millis() > 1_000
{
let guard = reporter.on_blocking(long_wait_message);
pb_update_guard = Some(guard);
}
// sleep for a little bit
tokio::time::sleep(Duration::from_millis(20)).await;
// Poll the last updated path to check if it's stopped updating,
// which is an indication that the file lock is claimed, but
// was never properly released.
match std::fs::metadata(&last_updated_path)
.and_then(|p| p.modified())
{
Ok(last_updated_time) => {
let current_time = std::time::SystemTime::now();
match current_time.duration_since(last_updated_time) {
Ok(duration) => {
if duration.as_millis()
> (poll_file_update_ms * 2) as u128
{
// the other process hasn't updated this file in a long time
// so maybe it was killed and the operating system hasn't
// released the file lock yet
return Self(None);
} else {
error_count = 0; // reset
}
}
Err(_) => {
error_count += 1;
}
}
}
Err(_) => {
error_count += 1;
}
}
}
}
}
drop(pb_update_guard); // explicit for clarity
Self(None)
}
Err(err) => {
log::debug!(
"Failed to open file lock at {}. {:#}",
file_path.display(),
err
);
Self(None) // let the process through
}
}
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;
use std::time::Duration;
use parking_lot::Mutex;
use test_util::TempDir;
use tokio::sync::Notify;
use super::*;
use crate::LogReporter;
#[tokio::test]
async fn lax_fs_lock() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let signal1 = Arc::new(Notify::new());
let signal2 = Arc::new(Notify::new());
let signal3 = Arc::new(Notify::new());
let signal4 = Arc::new(Notify::new());
tokio::spawn({
let lock_path = lock_path.clone();
let signal1 = signal1.clone();
let signal2 = signal2.clone();
let signal3 = signal3.clone();
let signal4 = signal4.clone();
let temp_dir = temp_dir.clone();
async move {
let flag = LaxSingleProcessFsFlag::lock(
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
signal1.notify_one();
signal2.notified().await;
tokio::time::sleep(Duration::from_millis(10)).await; // give the other thread time to acquire the lock
temp_dir.write("file.txt", "update1");
signal3.notify_one();
signal4.notified().await;
drop(flag);
}
});
let signal5 = Arc::new(Notify::new());
tokio::spawn({
let temp_dir = temp_dir.clone();
let signal5 = signal5.clone();
async move {
signal1.notified().await;
signal2.notify_one();
let flag = LaxSingleProcessFsFlag::lock(
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
temp_dir.write("file.txt", "update2");
signal5.notify_one();
drop(flag);
}
});
signal3.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update1");
signal4.notify_one();
signal5.notified().await;
assert_eq!(temp_dir.read_to_string("file.txt"), "update2");
}
#[tokio::test]
async fn lax_fs_lock_ordered() {
let temp_dir = TempDir::new();
let lock_path = temp_dir.path().join("file.lock");
let output_path = temp_dir.path().join("output");
let expected_order = Arc::new(Mutex::new(Vec::new()));
let count = 10;
let mut tasks = Vec::with_capacity(count);
std::fs::write(&output_path, "").unwrap();
for i in 0..count {
let lock_path = lock_path.clone();
let output_path = output_path.clone();
let expected_order = expected_order.clone();
tasks.push(tokio::spawn(async move {
let flag = LaxSingleProcessFsFlag::lock(
lock_path.to_path_buf(),
&LogReporter,
"waiting",
)
.await;
expected_order.lock().push(i.to_string());
// be extremely racy
let mut output = std::fs::read_to_string(&output_path).unwrap();
if !output.is_empty() {
output.push('\n');
}
output.push_str(&i.to_string());
std::fs::write(&output_path, output).unwrap();
drop(flag);
}));
}
futures::future::join_all(tasks).await;
let expected_output = expected_order.lock().join("\n");
assert_eq!(
std::fs::read_to_string(output_path).unwrap(),
expected_output
);
}
}

View file

@ -0,0 +1,187 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::io::Error;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use sys_traits::FsCreateDirAll;
use sys_traits::FsDirEntry;
use sys_traits::FsSymlinkDir;
#[sys_traits::auto_impl]
pub trait CloneDirRecursiveSys:
CopyDirRecursiveSys + sys_traits::FsRemoveFile + sys_traits::ThreadSleep
{
}
/// Clones a directory to another directory. The exact method
/// is not guaranteed - it may be a hardlink, copy, or other platform-specific
/// operation.
///
/// Note: Does not handle symlinks.
pub fn clone_dir_recursive<TSys: CloneDirRecursiveSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
if cfg!(target_vendor = "apple") {
if let Some(parent) = to.parent() {
sys.fs_create_dir_all(parent)?;
}
// Try to clone the whole directory
if let Err(err) = sys.fs_clone_file(from, to) {
if !matches!(
err.kind(),
std::io::ErrorKind::AlreadyExists | std::io::ErrorKind::Unsupported
) {
log::debug!(
"Failed to clone dir {:?} to {:?} via clonefile: {}",
from,
to,
err
);
}
// clonefile won't overwrite existing files, so if the dir exists
// we need to handle it recursively.
copy_dir_recursive(sys, from, to)?;
}
} else if let Err(e) = deno_npm_cache::hard_link_dir_recursive(sys, from, to)
{
log::debug!("Failed to hard link dir {:?} to {:?}: {}", from, to, e);
copy_dir_recursive(sys, from, to)?;
}
Ok(())
}
#[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CopyDirRecursiveError {
#[class(inherit)]
#[error("Creating {path}")]
Creating {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Reading {path}")]
Reading {
path: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error("Dir {from} to {to}")]
Dir {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Box<Self>,
},
#[class(inherit)]
#[error("Copying {from} to {to}")]
Copying {
from: PathBuf,
to: PathBuf,
#[source]
#[inherit]
source: Error,
},
#[class(inherit)]
#[error(transparent)]
Other(#[from] Error),
}
#[sys_traits::auto_impl]
pub trait CopyDirRecursiveSys:
sys_traits::FsCopy
+ sys_traits::FsCloneFile
+ sys_traits::FsCreateDir
+ sys_traits::FsHardLink
+ sys_traits::FsReadDir
{
}
/// Copies a directory to another directory.
///
/// Note: Does not handle symlinks.
pub fn copy_dir_recursive<TSys: CopyDirRecursiveSys>(
sys: &TSys,
from: &Path,
to: &Path,
) -> Result<(), CopyDirRecursiveError> {
sys.fs_create_dir_all(to).map_err(|source| {
CopyDirRecursiveError::Creating {
path: to.to_path_buf(),
source,
}
})?;
let read_dir =
sys
.fs_read_dir(from)
.map_err(|source| CopyDirRecursiveError::Reading {
path: from.to_path_buf(),
source,
})?;
for entry in read_dir {
let entry = entry?;
let file_type = entry.file_type()?;
let new_from = from.join(entry.file_name());
let new_to = to.join(entry.file_name());
if file_type.is_dir() {
copy_dir_recursive(sys, &new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Dir {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source: Box::new(source),
}
})?;
} else if file_type.is_file() {
sys.fs_copy(&new_from, &new_to).map_err(|source| {
CopyDirRecursiveError::Copying {
from: new_from.to_path_buf(),
to: new_to.to_path_buf(),
source,
}
})?;
}
}
Ok(())
}
pub fn symlink_dir<TSys: sys_traits::BaseFsSymlinkDir>(
sys: &TSys,
oldpath: &Path,
newpath: &Path,
) -> Result<(), Error> {
let err_mapper = |err: Error, kind: Option<ErrorKind>| {
Error::new(
kind.unwrap_or_else(|| err.kind()),
format!(
"{}, symlink '{}' -> '{}'",
err,
oldpath.display(),
newpath.display()
),
)
};
sys.fs_symlink_dir(oldpath, newpath).map_err(|err| {
#[cfg(windows)]
if let Some(code) = err.raw_os_error() {
if code as u32 == winapi::shared::winerror::ERROR_PRIVILEGE_NOT_HELD
|| code as u32 == winapi::shared::winerror::ERROR_INVALID_FUNCTION
{
return err_mapper(err, Some(ErrorKind::PermissionDenied));
}
}
err_mapper(err, None)
})
}

View file

@ -5,32 +5,39 @@ use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use async_trait::async_trait;
use deno_core::futures::stream::FuturesUnordered;
use deno_core::futures::StreamExt;
use deno_error::JsErrorBox;
use deno_lib::util::hash::FastInsecureHasher;
use deno_npm::NpmResolutionPackage;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCache;
use deno_npm_cache::NpmCacheHttpClient;
use deno_npm_cache::NpmCacheSys;
use deno_npm_cache::TarballCache;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_terminal::colors;
use futures::stream::FuturesUnordered;
use futures::StreamExt;
use super::common::NpmPackageFsInstaller;
use super::PackageCaching;
use crate::args::LifecycleScriptsConfig;
use crate::colors;
use crate::npm::CliNpmCache;
use crate::npm::CliNpmTarballCache;
use crate::lifecycle_scripts::LifecycleScripts;
use crate::lifecycle_scripts::LifecycleScriptsStrategy;
use crate::LifecycleScriptsConfig;
use crate::NpmPackageFsInstaller;
use crate::PackageCaching;
/// Resolves packages from the global npm cache.
pub struct GlobalNpmPackageInstaller {
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
pub struct GlobalNpmPackageInstaller<
THttpClient: NpmCacheHttpClient,
TSys: NpmCacheSys,
> {
cache: Arc<NpmCache<TSys>>,
tarball_cache: Arc<TarballCache<THttpClient, TSys>>,
resolution: Arc<NpmResolutionCell>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
}
impl std::fmt::Debug for GlobalNpmPackageInstaller {
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys> std::fmt::Debug
for GlobalNpmPackageInstaller<THttpClient, TSys>
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("GlobalNpmPackageInstaller")
.field("cache", &self.cache)
@ -42,10 +49,12 @@ impl std::fmt::Debug for GlobalNpmPackageInstaller {
}
}
impl GlobalNpmPackageInstaller {
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys>
GlobalNpmPackageInstaller<THttpClient, TSys>
{
pub fn new(
cache: Arc<CliNpmCache>,
tarball_cache: Arc<CliNpmTarballCache>,
cache: Arc<NpmCache<TSys>>,
tarball_cache: Arc<TarballCache<THttpClient, TSys>>,
resolution: Arc<NpmResolutionCell>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
@ -58,10 +67,34 @@ impl GlobalNpmPackageInstaller {
system_info,
}
}
async fn cache_packages(
&self,
packages: &[NpmResolutionPackage],
) -> Result<(), deno_npm_cache::EnsurePackageError> {
let mut futures_unordered = FuturesUnordered::new();
for package in packages {
if let Some(dist) = &package.dist {
futures_unordered.push(async move {
self
.tarball_cache
.ensure_package(&package.id.nv, dist)
.await
});
}
}
while let Some(result) = futures_unordered.next().await {
// surface the first error
result?;
}
Ok(())
}
}
#[async_trait(?Send)]
impl NpmPackageFsInstaller for GlobalNpmPackageInstaller {
#[async_trait::async_trait(?Send)]
impl<THttpClient: NpmCacheHttpClient, TSys: NpmCacheSys> NpmPackageFsInstaller
for GlobalNpmPackageInstaller<THttpClient, TSys>
{
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
@ -75,7 +108,8 @@ impl NpmPackageFsInstaller for GlobalNpmPackageInstaller {
.subset(&reqs)
.all_system_packages_partitioned(&self.system_info),
};
cache_packages(&package_partitions.packages, &self.tarball_cache)
self
.cache_packages(&package_partitions.packages)
.await
.map_err(JsErrorBox::from_err)?;
@ -87,14 +121,13 @@ impl NpmPackageFsInstaller for GlobalNpmPackageInstaller {
.map_err(JsErrorBox::from_err)?;
}
let mut lifecycle_scripts =
super::common::lifecycle_scripts::LifecycleScripts::new(
&self.lifecycle_scripts,
GlobalLifecycleScripts::new(
self.cache.as_ref(),
&self.lifecycle_scripts.root_dir,
),
);
let mut lifecycle_scripts = LifecycleScripts::new(
&self.lifecycle_scripts,
GlobalLifecycleScripts::new(
self.cache.as_ref(),
&self.lifecycle_scripts.root_dir,
),
);
// For the global cache, we don't run scripts so we just care that there _are_
// scripts. Kind of hacky, but avoids fetching the "extra" info from the registry.
@ -119,33 +152,15 @@ impl NpmPackageFsInstaller for GlobalNpmPackageInstaller {
}
}
async fn cache_packages(
packages: &[NpmResolutionPackage],
tarball_cache: &Arc<CliNpmTarballCache>,
) -> Result<(), deno_npm_cache::EnsurePackageError> {
let mut futures_unordered = FuturesUnordered::new();
for package in packages {
if let Some(dist) = &package.dist {
futures_unordered.push(async move {
tarball_cache.ensure_package(&package.id.nv, dist).await
});
}
}
while let Some(result) = futures_unordered.next().await {
// surface the first error
result?;
}
Ok(())
}
struct GlobalLifecycleScripts<'a> {
cache: &'a CliNpmCache,
struct GlobalLifecycleScripts<'a, TSys: NpmCacheSys> {
cache: &'a NpmCache<TSys>,
path_hash: u64,
}
impl<'a> GlobalLifecycleScripts<'a> {
fn new(cache: &'a CliNpmCache, root_dir: &Path) -> Self {
let mut hasher = FastInsecureHasher::new_without_deno_version();
impl<'a, TSys: NpmCacheSys> GlobalLifecycleScripts<'a, TSys> {
fn new(cache: &'a NpmCache<TSys>, root_dir: &Path) -> Self {
use std::hash::Hasher;
let mut hasher = twox_hash::XxHash64::default();
hasher.write(root_dir.to_string_lossy().as_bytes());
let path_hash = hasher.finish();
Self { cache, path_hash }
@ -159,8 +174,8 @@ impl<'a> GlobalLifecycleScripts<'a> {
}
}
impl super::common::lifecycle_scripts::LifecycleScriptsStrategy
for GlobalLifecycleScripts<'_>
impl<TSys: NpmCacheSys> LifecycleScriptsStrategy
for GlobalLifecycleScripts<'_, TSys>
{
fn can_run_scripts(&self) -> bool {
false

View file

@ -3,48 +3,44 @@
use std::path::PathBuf;
use std::sync::Arc;
use deno_core::parking_lot::Mutex;
use deno_error::JsError;
use deno_error::JsErrorBox;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::lockfile::LockfileSys;
use deno_resolver::npm::managed::NpmResolutionCell;
use parking_lot::Mutex;
use thiserror::Error;
use super::WorkspaceNpmPatchPackages;
use crate::args::CliLockfile;
use crate::sys::CliSys;
pub type CliManagedNpmResolverCreateOptions =
ManagedNpmResolverCreateOptions<CliSys>;
#[derive(Debug, Clone)]
pub enum CliNpmResolverManagedSnapshotOption {
ResolveFromLockfile(Arc<CliLockfile>),
pub enum NpmResolverManagedSnapshotOption<TSys: LockfileSys> {
ResolveFromLockfile(Arc<LockfileLock<TSys>>),
Specified(Option<ValidSerializedNpmResolutionSnapshot>),
}
#[derive(Debug)]
enum SyncState {
Pending(Option<CliNpmResolverManagedSnapshotOption>),
enum SyncState<TSys: LockfileSys> {
Pending(Option<NpmResolverManagedSnapshotOption<TSys>>),
Err(ResolveSnapshotError),
Success,
}
#[derive(Debug)]
pub struct NpmResolutionInitializer {
pub struct NpmResolutionInitializer<TSys: LockfileSys> {
npm_resolution: Arc<NpmResolutionCell>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
queue: tokio::sync::Mutex<()>,
sync_state: Mutex<SyncState>,
sync_state: Mutex<SyncState<TSys>>,
}
impl NpmResolutionInitializer {
impl<TSys: LockfileSys> NpmResolutionInitializer<TSys> {
pub fn new(
npm_resolution: Arc<NpmResolutionCell>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
snapshot_option: CliNpmResolverManagedSnapshotOption,
snapshot_option: NpmResolverManagedSnapshotOption<TSys>,
) -> Self {
Self {
npm_resolution,
@ -124,13 +120,13 @@ pub struct ResolveSnapshotError {
source: SnapshotFromLockfileError,
}
fn resolve_snapshot(
snapshot: CliNpmResolverManagedSnapshotOption,
fn resolve_snapshot<TSys: LockfileSys>(
snapshot: NpmResolverManagedSnapshotOption<TSys>,
patch_packages: &WorkspaceNpmPatchPackages,
) -> Result<Option<ValidSerializedNpmResolutionSnapshot>, ResolveSnapshotError>
{
match snapshot {
CliNpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
NpmResolverManagedSnapshotOption::ResolveFromLockfile(lockfile) => {
if !lockfile.overwrite() {
let snapshot = snapshot_from_lockfile(lockfile.clone(), patch_packages)
.map_err(|source| ResolveSnapshotError {
@ -142,7 +138,7 @@ fn resolve_snapshot(
Ok(None)
}
}
CliNpmResolverManagedSnapshotOption::Specified(snapshot) => Ok(snapshot),
NpmResolverManagedSnapshotOption::Specified(snapshot) => Ok(snapshot),
}
}
@ -153,38 +149,15 @@ pub enum SnapshotFromLockfileError {
SnapshotFromLockfile(#[from] deno_npm::resolution::SnapshotFromLockfileError),
}
pub(crate) struct DefaultTarballUrl;
impl deno_npm::resolution::DefaultTarballUrlProvider for DefaultTarballUrl {
fn default_tarball_url(
&self,
nv: &deno_semver::package::PackageNv,
) -> String {
let scope = nv.scope();
let package_name = if let Some(scope) = scope {
nv.name
.strip_prefix(scope)
.unwrap_or(&nv.name)
.trim_start_matches('/')
} else {
&nv.name
};
format!(
"https://registry.npmjs.org/{}/-/{}-{}.tgz",
nv.name, package_name, nv.version
)
}
}
fn snapshot_from_lockfile(
lockfile: Arc<CliLockfile>,
fn snapshot_from_lockfile<TSys: LockfileSys>(
lockfile: Arc<LockfileLock<TSys>>,
patch_packages: &WorkspaceNpmPatchPackages,
) -> Result<ValidSerializedNpmResolutionSnapshot, SnapshotFromLockfileError> {
let snapshot = deno_npm::resolution::snapshot_from_lockfile(
deno_npm::resolution::SnapshotFromLockfileParams {
patch_packages: &patch_packages.0,
lockfile: &lockfile.lock(),
default_tarball_url: &DefaultTarballUrl,
default_tarball_url: Default::default(),
},
)?;

View file

@ -4,41 +4,49 @@ use std::borrow::Cow;
use std::path::PathBuf;
use std::sync::Arc;
pub use common::DenoTaskLifeCycleScriptsExecutor;
use deno_core::unsync::sync::AtomicFlag;
use deno_error::JsErrorBox;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::NpmSystemInfo;
use deno_npm_cache::NpmCache;
use deno_npm_cache::NpmCacheHttpClient;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_runtime::colors;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_semver::package::PackageReq;
pub use local::SetupCache;
mod bin_entries;
mod extra_info;
mod flag;
mod fs;
mod global;
pub mod initializer;
pub mod lifecycle_scripts;
mod local;
pub mod package_json;
pub mod process_state;
pub mod resolution;
pub use bin_entries::BinEntries;
pub use bin_entries::BinEntriesError;
use deno_terminal::colors;
use deno_unsync::sync::AtomicFlag;
use rustc_hash::FxHashSet;
pub use self::common::lifecycle_scripts::LifecycleScriptsExecutor;
pub use self::common::lifecycle_scripts::NullLifecycleScriptsExecutor;
use self::common::NpmPackageExtraInfoProvider;
pub use self::common::NpmPackageFsInstaller;
pub use self::extra_info::CachedNpmPackageExtraInfoProvider;
pub use self::extra_info::ExpectedExtraInfo;
pub use self::extra_info::NpmPackageExtraInfoProvider;
use self::global::GlobalNpmPackageInstaller;
use self::initializer::NpmResolutionInitializer;
use self::lifecycle_scripts::LifecycleScriptsExecutor;
use self::local::LocalNpmInstallSys;
use self::local::LocalNpmPackageInstaller;
pub use self::resolution::AddPkgReqsResult;
pub use self::resolution::NpmResolutionInstaller;
use super::CliNpmCache;
use super::CliNpmTarballCache;
use super::NpmResolutionInitializer;
use super::WorkspaceNpmPatchPackages;
use crate::args::CliLockfile;
use crate::args::LifecycleScriptsConfig;
use crate::args::NpmInstallDepsProvider;
use crate::args::PackageJsonDepValueParseWithLocationError;
use crate::sys::CliSys;
use crate::util::progress_bar::ProgressBar;
mod common;
mod global;
mod local;
mod resolution;
pub use self::local::LocalSetupCache;
use self::package_json::NpmInstallDepsProvider;
use self::package_json::PackageJsonDepValueParseWithLocationError;
use self::resolution::AddPkgReqsResult;
use self::resolution::NpmResolutionInstaller;
use self::resolution::NpmResolutionInstallerSys;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum PackageCaching<'a> {
@ -46,34 +54,105 @@ pub enum PackageCaching<'a> {
All,
}
#[derive(Debug, Clone, Eq, PartialEq, Default)]
/// The set of npm packages that are allowed to run lifecycle scripts.
pub enum PackagesAllowedScripts {
All,
Some(Vec<String>),
#[default]
None,
}
/// Info needed to run NPM lifecycle scripts
#[derive(Clone, Debug, Eq, PartialEq, Default)]
pub struct LifecycleScriptsConfig {
pub allowed: PackagesAllowedScripts,
pub initial_cwd: PathBuf,
pub root_dir: PathBuf,
/// Part of an explicit `deno install`
pub explicit_install: bool,
}
pub trait Reporter: std::fmt::Debug + Send + Sync + Clone + 'static {
type Guard;
type ClearGuard;
fn on_blocking(&self, message: &str) -> Self::Guard;
fn on_initializing(&self, message: &str) -> Self::Guard;
fn clear_guard(&self) -> Self::ClearGuard;
}
#[derive(Debug, Clone)]
pub struct LogReporter;
impl Reporter for LogReporter {
type Guard = ();
type ClearGuard = ();
fn on_blocking(&self, message: &str) -> Self::Guard {
log::info!("{} {}", deno_terminal::colors::cyan("Blocking"), message);
}
fn on_initializing(&self, message: &str) -> Self::Guard {
log::info!("{} {}", deno_terminal::colors::green("Initialize"), message);
}
fn clear_guard(&self) -> Self::ClearGuard {}
}
/// Part of the resolution that interacts with the file system.
#[async_trait::async_trait(?Send)]
pub(crate) trait NpmPackageFsInstaller:
std::fmt::Debug + Send + Sync
{
async fn cache_packages<'a>(
&self,
caching: PackageCaching<'a>,
) -> Result<(), JsErrorBox>;
}
#[sys_traits::auto_impl]
pub trait NpmInstallerSys:
NpmResolutionInstallerSys + LocalNpmInstallSys
{
}
#[derive(Debug)]
pub struct NpmInstaller {
pub struct NpmInstaller<
TNpmCacheHttpClient: NpmCacheHttpClient,
TSys: NpmInstallerSys,
> {
fs_installer: Arc<dyn NpmPackageFsInstaller>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
npm_resolution_initializer: Arc<NpmResolutionInitializer>,
npm_resolution_installer: Arc<NpmResolutionInstaller>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_resolution_initializer: Arc<NpmResolutionInitializer<TSys>>,
npm_resolution_installer:
Arc<NpmResolutionInstaller<TNpmCacheHttpClient, TSys>>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
npm_resolution: Arc<NpmResolutionCell>,
top_level_install_flag: AtomicFlag,
cached_reqs: tokio::sync::Mutex<FxHashSet<PackageReq>>,
}
impl NpmInstaller {
impl<TNpmCacheHttpClient: NpmCacheHttpClient, TSys: NpmInstallerSys>
NpmInstaller<TNpmCacheHttpClient, TSys>
{
#[allow(clippy::too_many_arguments)]
pub fn new(
pub fn new<TReporter: Reporter>(
lifecycle_scripts_executor: Arc<dyn LifecycleScriptsExecutor>,
npm_cache: Arc<CliNpmCache>,
npm_cache: Arc<NpmCache<TSys>>,
npm_install_deps_provider: Arc<NpmInstallDepsProvider>,
npm_registry_info_provider: Arc<
dyn deno_npm::registry::NpmRegistryApi + Send + Sync,
>,
npm_resolution: Arc<NpmResolutionCell>,
npm_resolution_initializer: Arc<NpmResolutionInitializer>,
npm_resolution_installer: Arc<NpmResolutionInstaller>,
progress_bar: &ProgressBar,
sys: CliSys,
tarball_cache: Arc<CliNpmTarballCache>,
maybe_lockfile: Option<Arc<CliLockfile>>,
npm_resolution_initializer: Arc<NpmResolutionInitializer<TSys>>,
npm_resolution_installer: Arc<
NpmResolutionInstaller<TNpmCacheHttpClient, TSys>,
>,
reporter: &TReporter,
sys: TSys,
tarball_cache: Arc<deno_npm_cache::TarballCache<TNpmCacheHttpClient, TSys>>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
maybe_node_modules_path: Option<PathBuf>,
lifecycle_scripts: LifecycleScriptsConfig,
system_info: NpmSystemInfo,
@ -85,12 +164,11 @@ impl NpmInstaller {
lifecycle_scripts_executor,
npm_cache.clone(),
Arc::new(NpmPackageExtraInfoProvider::new(
npm_cache,
npm_registry_info_provider,
workspace_patch_packages,
)),
npm_install_deps_provider.clone(),
progress_bar.clone(),
reporter.clone(),
npm_resolution.clone(),
sys,
tarball_cache,

View file

@ -5,7 +5,7 @@ use std::collections::HashMap;
use std::path::Path;
use std::path::PathBuf;
use deno_core::error::AnyError;
use anyhow::Error as AnyError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmPackageExtraInfo;
use deno_npm::NpmResolutionPackage;
@ -13,9 +13,9 @@ use deno_semver::package::PackageNv;
use deno_semver::SmallStackString;
use deno_semver::Version;
use super::CachedNpmPackageExtraInfoProvider;
use crate::args::LifecycleScriptsConfig;
use crate::util::progress_bar::ProgressBar;
use crate::CachedNpmPackageExtraInfoProvider;
use crate::LifecycleScriptsConfig;
use crate::PackagesAllowedScripts;
pub struct PackageWithScript<'a> {
pub package: &'a NpmResolutionPackage,
@ -27,7 +27,6 @@ pub struct LifecycleScriptsExecutorOptions<'a> {
pub init_cwd: &'a Path,
pub process_state: &'a str,
pub root_node_modules_dir_path: &'a Path,
pub progress_bar: &'a ProgressBar,
pub on_ran_pkg_scripts:
&'a dyn Fn(&NpmResolutionPackage) -> std::io::Result<()>,
pub snapshot: &'a NpmResolutionSnapshot,
@ -125,7 +124,6 @@ impl<'a> LifecycleScripts<'a> {
if !self.strategy.can_run_scripts() {
return false;
}
use crate::args::PackagesAllowedScripts;
match &self.config.allowed {
PackagesAllowedScripts::All => true,
// TODO: make this more correct

File diff suppressed because it is too large Load diff

View file

@ -4,8 +4,6 @@ use std::path::PathBuf;
use std::sync::Arc;
use deno_config::workspace::Workspace;
use deno_core::serde_json;
use deno_core::url::Url;
use deno_package_json::PackageJsonDepValue;
use deno_package_json::PackageJsonDepValueParseError;
use deno_package_json::PackageJsonDepWorkspaceReq;
@ -16,7 +14,9 @@ use deno_semver::package::PackageReq;
use deno_semver::StackString;
use deno_semver::Version;
use deno_semver::VersionReq;
use serde_json;
use thiserror::Error;
use url::Url;
#[derive(Debug)]
pub struct InstallNpmRemotePkg {

View file

@ -0,0 +1,46 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::path::Path;
use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot;
use serde::Deserialize;
use serde::Serialize;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum NpmProcessStateKind {
Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot),
Byonm,
}
/// The serialized npm process state which can be written to a file and then
/// the FD or path can be passed to a spawned deno process via the
/// `DENO_DONT_USE_INTERNAL_NODE_COMPAT_STATE_FD` env var.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NpmProcessState {
pub kind: NpmProcessStateKind,
pub local_node_modules_path: Option<String>,
}
impl NpmProcessState {
pub fn new_managed(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: Option<&Path>,
) -> Self {
NpmProcessState {
kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()),
local_node_modules_path: node_modules_path
.map(|p| p.to_string_lossy().to_string()),
}
}
pub fn new_local(
snapshot: ValidSerializedNpmResolutionSnapshot,
node_modules_path: &Path,
) -> Self {
NpmProcessState::new_managed(snapshot, Some(node_modules_path))
}
pub fn as_serialized(&self) -> String {
serde_json::to_string(self).unwrap()
}
}

View file

@ -0,0 +1,307 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::sync::Arc;
use capacity_builder::StringBuilder;
use deno_error::JsErrorBox;
use deno_lockfile::NpmPackageDependencyLockfileInfo;
use deno_lockfile::NpmPackageLockfileInfo;
use deno_npm::registry::NpmPackageInfo;
use deno_npm::registry::NpmRegistryApi;
use deno_npm::registry::NpmRegistryPackageInfoLoadError;
use deno_npm::resolution::AddPkgReqsOptions;
use deno_npm::resolution::DefaultTarballUrlProvider;
use deno_npm::resolution::NpmResolutionError;
use deno_npm::resolution::NpmResolutionSnapshot;
use deno_npm::NpmResolutionPackage;
use deno_npm_cache::NpmCacheHttpClient;
use deno_npm_cache::NpmCacheSys;
use deno_npm_cache::RegistryInfoProvider;
use deno_resolver::display::DisplayTreeNode;
use deno_resolver::lockfile::LockfileLock;
use deno_resolver::lockfile::LockfileSys;
use deno_resolver::npm::managed::NpmResolutionCell;
use deno_resolver::workspace::WorkspaceNpmPatchPackages;
use deno_semver::jsr::JsrDepPackageReq;
use deno_semver::package::PackageNv;
use deno_semver::package::PackageReq;
use deno_semver::SmallStackString;
use deno_semver::StackString;
use deno_semver::VersionReq;
use deno_terminal::colors;
use deno_unsync::sync::TaskQueue;
pub struct AddPkgReqsResult {
/// Results from adding the individual packages.
///
/// The indexes of the results correspond to the indexes of the provided
/// package requirements.
pub results: Vec<Result<PackageNv, NpmResolutionError>>,
/// The final result of resolving and caching all the package requirements.
pub dependencies_result: Result<(), JsErrorBox>,
}
#[sys_traits::auto_impl]
pub trait NpmResolutionInstallerSys: LockfileSys + NpmCacheSys {}
/// Updates the npm resolution with the provided package requirements.
#[derive(Debug)]
pub struct NpmResolutionInstaller<
TNpmCacheHttpClient: NpmCacheHttpClient,
TSys: NpmResolutionInstallerSys,
> {
registry_info_provider: Arc<RegistryInfoProvider<TNpmCacheHttpClient, TSys>>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
update_queue: TaskQueue,
}
impl<
TNpmCacheHttpClient: NpmCacheHttpClient,
TSys: NpmResolutionInstallerSys,
> NpmResolutionInstaller<TNpmCacheHttpClient, TSys>
{
pub fn new(
registry_info_provider: Arc<
RegistryInfoProvider<TNpmCacheHttpClient, TSys>,
>,
resolution: Arc<NpmResolutionCell>,
maybe_lockfile: Option<Arc<LockfileLock<TSys>>>,
patch_packages: Arc<WorkspaceNpmPatchPackages>,
) -> Self {
Self {
registry_info_provider,
resolution,
maybe_lockfile,
patch_packages,
update_queue: Default::default(),
}
}
pub async fn cache_package_info(
&self,
package_name: &str,
) -> Result<Arc<NpmPackageInfo>, NpmRegistryPackageInfoLoadError> {
// this will internally cache the package information
self.registry_info_provider.package_info(package_name).await
}
pub async fn add_package_reqs(
&self,
package_reqs: &[PackageReq],
) -> AddPkgReqsResult {
// only allow one thread in here at a time
let _snapshot_lock = self.update_queue.acquire().await;
let result = self.add_package_reqs_to_snapshot(package_reqs).await;
AddPkgReqsResult {
results: result.results,
dependencies_result: match result.dep_graph_result {
Ok(snapshot) => {
self.resolution.set_snapshot(snapshot);
Ok(())
}
Err(err) => Err(JsErrorBox::from_err(err)),
},
}
}
async fn add_package_reqs_to_snapshot(
&self,
package_reqs: &[PackageReq],
) -> deno_npm::resolution::AddPkgReqsResult {
fn get_types_node_version() -> VersionReq {
// WARNING: When bumping this version, check if anything needs to be
// updated in the `setNodeOnlyGlobalNames` call in 99_main_compiler.js
VersionReq::parse_from_npm("22.9.0 - 22.15.15").unwrap()
}
let snapshot = self.resolution.snapshot();
if package_reqs
.iter()
.all(|req| snapshot.package_reqs().contains_key(req))
{
log::debug!("Snapshot already up to date. Skipping npm resolution.");
return deno_npm::resolution::AddPkgReqsResult {
results: package_reqs
.iter()
.map(|req| Ok(snapshot.package_reqs().get(req).unwrap().clone()))
.collect(),
dep_graph_result: Ok(snapshot),
unmet_peer_diagnostics: Default::default(),
};
}
log::debug!(
/* this string is used in tests */
"Running npm resolution."
);
let npm_registry_api = self.registry_info_provider.as_npm_registry_api();
let result = snapshot
.add_pkg_reqs(
&npm_registry_api,
AddPkgReqsOptions {
package_reqs,
types_node_version_req: Some(get_types_node_version()),
patch_packages: &self.patch_packages.0,
},
)
.await;
let result = match &result.dep_graph_result {
Err(NpmResolutionError::Resolution(err))
if npm_registry_api.mark_force_reload() =>
{
log::debug!("{err:#}");
log::debug!("npm resolution failed. Trying again...");
// try again with forced reloading
let snapshot = self.resolution.snapshot();
snapshot
.add_pkg_reqs(
&npm_registry_api,
AddPkgReqsOptions {
package_reqs,
types_node_version_req: Some(get_types_node_version()),
patch_packages: &self.patch_packages.0,
},
)
.await
}
_ => result,
};
self.registry_info_provider.clear_memory_cache();
if !result.unmet_peer_diagnostics.is_empty()
&& log::log_enabled!(log::Level::Warn)
{
let root_node = DisplayTreeNode {
text: format!(
"{} The following peer dependency issues were found:",
colors::yellow("Warning")
),
children: result
.unmet_peer_diagnostics
.iter()
.map(|diagnostic| {
let mut node = DisplayTreeNode {
text: format!(
"peer {}: resolved to {}",
diagnostic.dependency, diagnostic.resolved
),
children: Vec::new(),
};
for ancestor in &diagnostic.ancestors {
node = DisplayTreeNode {
text: ancestor.to_string(),
children: vec![node],
};
}
node
})
.collect(),
};
let mut text = String::new();
_ = root_node.print(&mut text);
log::warn!("{}", text);
}
if let Ok(snapshot) = &result.dep_graph_result {
self.populate_lockfile_from_snapshot(snapshot);
}
result
}
fn populate_lockfile_from_snapshot(&self, snapshot: &NpmResolutionSnapshot) {
fn npm_package_to_lockfile_info(
pkg: &NpmResolutionPackage,
) -> NpmPackageLockfileInfo {
let dependencies = pkg
.dependencies
.iter()
.filter_map(|(name, id)| {
if pkg.optional_dependencies.contains(name) {
None
} else {
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
}
})
.collect();
let optional_dependencies = pkg
.optional_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
let optional_peers = pkg
.optional_peer_dependencies
.iter()
.filter_map(|name| {
let id = pkg.dependencies.get(name)?;
Some(NpmPackageDependencyLockfileInfo {
name: name.clone(),
id: id.as_serialized(),
})
})
.collect();
NpmPackageLockfileInfo {
serialized_id: pkg.id.as_serialized(),
integrity: pkg.dist.as_ref().and_then(|dist| {
dist.integrity().for_lockfile().map(|s| s.into_owned())
}),
dependencies,
optional_dependencies,
os: pkg.system.os.clone(),
cpu: pkg.system.cpu.clone(),
tarball: pkg.dist.as_ref().and_then(|dist| {
// Omit the tarball URL if it's the standard NPM registry URL
let tarbal_url_provider =
deno_npm::resolution::NpmRegistryDefaultTarballUrlProvider;
if dist.tarball == tarbal_url_provider.default_tarball_url(&pkg.id.nv)
{
None
} else {
Some(StackString::from_str(&dist.tarball))
}
}),
deprecated: pkg.is_deprecated,
bin: pkg.has_bin,
scripts: pkg.has_scripts,
optional_peers,
}
}
let Some(lockfile) = &self.maybe_lockfile else {
return;
};
let mut lockfile = lockfile.lock();
for (package_req, nv) in snapshot.package_reqs() {
let id = &snapshot.resolve_package_from_deno_module(nv).unwrap().id;
lockfile.insert_package_specifier(
JsrDepPackageReq::npm(package_req.clone()),
{
StringBuilder::<SmallStackString>::build(|builder| {
builder.append(&id.nv.version);
builder.append(&id.peer_dependencies);
})
.unwrap()
},
);
}
for package in snapshot.all_packages_for_every_system() {
lockfile.insert_npm_package(npm_package_to_lockfile_info(package));
}
}
}

View file

@ -0,0 +1,4 @@
This crate is very much a work in progress.
- Use MaybeArc in some places
- Use sys_traits

View file

@ -627,15 +627,15 @@ fn lock_file_missing_top_level_package() {
assert!(!output.status.success());
let stderr = String::from_utf8(output.stderr).unwrap();
assert_eq!(
stderr,
test_util::assertions::assert_wildcard_match(
&stderr,
concat!(
"error: failed reading lockfile 'deno.lock'\n",
"error: failed reading lockfile '[WILDLINE]deno.lock'\n",
"\n",
"Caused by:\n",
" 0: The lockfile is corrupt. Remove the lockfile to regenerate it.\n",
" 1: Could not find 'cowsay@1.5.0' in the list of packages.\n"
)
),
);
}
@ -1141,7 +1141,7 @@ fn reload_info_not_found_cache_but_exists_remote() {
.args("run --cached-only main.ts")
.run();
output.assert_matches_text(concat!(
"error: failed reading lockfile '[WILDCARD]deno.lock'\n",
"error: failed reading lockfile '[WILDLINE]deno.lock'\n",
"\n",
"Caused by:\n",
" 0: Could not find '@denotest/esm-basic@1.0.0' specified in the lockfile.\n",