refactor: extract out sloppy imports resolution from CLI crate (#25920)

This is slow progress towards creating a `deno_resolver`  crate.

Waiting on:

* https://github.com/denoland/deno/pull/25918
* https://github.com/denoland/deno/pull/25916
This commit is contained in:
David Sherret 2024-09-28 19:17:48 -04:00 committed by GitHub
parent 3138478f66
commit 5faf769ac6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 665 additions and 527 deletions

24
resolvers/deno/Cargo.toml Normal file
View file

@ -0,0 +1,24 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
[package]
name = "deno_resolver"
version = "0.0.1"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Deno resolution algorithm"
[lib]
path = "lib.rs"
[features]
[dependencies]
deno_media_type.workspace = true
deno_path_util.workspace = true
url.workspace = true
[dev-dependencies]
test_util.workspace = true

3
resolvers/deno/README.md Normal file
View file

@ -0,0 +1,3 @@
# deno_resolver
Deno resolution algorithm.

3
resolvers/deno/lib.rs Normal file
View file

@ -0,0 +1,3 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
pub mod sloppy_imports;

View file

@ -0,0 +1,511 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::path::Path;
use std::path::PathBuf;
use deno_media_type::MediaType;
use deno_path_util::url_to_file_path;
use url::Url;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SloppyImportsFsEntry {
File,
Dir,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SloppyImportsResolution {
/// Ex. `./file.js` to `./file.ts`
JsToTs(Url),
/// Ex. `./file` to `./file.ts`
NoExtension(Url),
/// Ex. `./dir` to `./dir/index.ts`
Directory(Url),
}
impl SloppyImportsResolution {
pub fn as_specifier(&self) -> &Url {
match self {
Self::JsToTs(specifier) => specifier,
Self::NoExtension(specifier) => specifier,
Self::Directory(specifier) => specifier,
}
}
pub fn into_specifier(self) -> Url {
match self {
Self::JsToTs(specifier) => specifier,
Self::NoExtension(specifier) => specifier,
Self::Directory(specifier) => specifier,
}
}
pub fn as_suggestion_message(&self) -> String {
format!("Maybe {}", self.as_base_message())
}
pub fn as_quick_fix_message(&self) -> String {
let message = self.as_base_message();
let mut chars = message.chars();
format!(
"{}{}.",
chars.next().unwrap().to_uppercase(),
chars.as_str()
)
}
fn as_base_message(&self) -> String {
match self {
SloppyImportsResolution::JsToTs(specifier) => {
let media_type = MediaType::from_specifier(specifier);
format!("change the extension to '{}'", media_type.as_ts_extension())
}
SloppyImportsResolution::NoExtension(specifier) => {
let media_type = MediaType::from_specifier(specifier);
format!("add a '{}' extension", media_type.as_ts_extension())
}
SloppyImportsResolution::Directory(specifier) => {
let file_name = specifier
.path()
.rsplit_once('/')
.map(|(_, file_name)| file_name)
.unwrap_or(specifier.path());
format!("specify path to '{}' file in directory instead", file_name)
}
}
}
}
/// The kind of resolution currently being done.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SloppyImportsResolutionMode {
/// Resolving for code that will be executed.
Execution,
/// Resolving for code that will be used for type information.
Types,
}
impl SloppyImportsResolutionMode {
pub fn is_types(&self) -> bool {
*self == SloppyImportsResolutionMode::Types
}
}
pub trait SloppyImportResolverFs {
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry>;
fn is_file(&self, path: &Path) -> bool {
self.stat_sync(path) == Some(SloppyImportsFsEntry::File)
}
}
#[derive(Debug)]
pub struct SloppyImportsResolver<Fs: SloppyImportResolverFs> {
fs: Fs,
}
impl<Fs: SloppyImportResolverFs> SloppyImportsResolver<Fs> {
pub fn new(fs: Fs) -> Self {
Self { fs }
}
pub fn resolve(
&self,
specifier: &Url,
mode: SloppyImportsResolutionMode,
) -> Option<SloppyImportsResolution> {
fn path_without_ext(
path: &Path,
media_type: MediaType,
) -> Option<Cow<str>> {
let old_path_str = path.to_string_lossy();
match media_type {
MediaType::Unknown => Some(old_path_str),
_ => old_path_str
.strip_suffix(media_type.as_ts_extension())
.map(|s| Cow::Owned(s.to_string())),
}
}
fn media_types_to_paths(
path_no_ext: &str,
original_media_type: MediaType,
probe_media_type_types: Vec<MediaType>,
reason: SloppyImportsResolutionReason,
) -> Vec<(PathBuf, SloppyImportsResolutionReason)> {
probe_media_type_types
.into_iter()
.filter(|media_type| *media_type != original_media_type)
.map(|media_type| {
(
PathBuf::from(format!(
"{}{}",
path_no_ext,
media_type.as_ts_extension()
)),
reason,
)
})
.collect::<Vec<_>>()
}
if specifier.scheme() != "file" {
return None;
}
let path = url_to_file_path(specifier).ok()?;
#[derive(Clone, Copy)]
enum SloppyImportsResolutionReason {
JsToTs,
NoExtension,
Directory,
}
let probe_paths: Vec<(PathBuf, SloppyImportsResolutionReason)> =
match self.fs.stat_sync(&path) {
Some(SloppyImportsFsEntry::File) => {
if mode.is_types() {
let media_type = MediaType::from_specifier(specifier);
// attempt to resolve the .d.ts file before the .js file
let probe_media_type_types = match media_type {
MediaType::JavaScript => {
vec![(MediaType::Dts), MediaType::JavaScript]
}
MediaType::Mjs => {
vec![MediaType::Dmts, MediaType::Dts, MediaType::Mjs]
}
MediaType::Cjs => {
vec![MediaType::Dcts, MediaType::Dts, MediaType::Cjs]
}
_ => return None,
};
let path_no_ext = path_without_ext(&path, media_type)?;
media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types,
SloppyImportsResolutionReason::JsToTs,
)
} else {
return None;
}
}
entry @ None | entry @ Some(SloppyImportsFsEntry::Dir) => {
let media_type = MediaType::from_specifier(specifier);
let probe_media_type_types = match media_type {
MediaType::JavaScript => (
if mode.is_types() {
vec![MediaType::TypeScript, MediaType::Tsx, MediaType::Dts]
} else {
vec![MediaType::TypeScript, MediaType::Tsx]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Jsx => {
(vec![MediaType::Tsx], SloppyImportsResolutionReason::JsToTs)
}
MediaType::Mjs => (
if mode.is_types() {
vec![MediaType::Mts, MediaType::Dmts, MediaType::Dts]
} else {
vec![MediaType::Mts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::Cjs => (
if mode.is_types() {
vec![MediaType::Cts, MediaType::Dcts, MediaType::Dts]
} else {
vec![MediaType::Cts]
},
SloppyImportsResolutionReason::JsToTs,
),
MediaType::TypeScript
| MediaType::Mts
| MediaType::Cts
| MediaType::Dts
| MediaType::Dmts
| MediaType::Dcts
| MediaType::Tsx
| MediaType::Json
| MediaType::Wasm
| MediaType::TsBuildInfo
| MediaType::SourceMap => {
return None;
}
MediaType::Unknown => (
if mode.is_types() {
vec![
MediaType::TypeScript,
MediaType::Tsx,
MediaType::Mts,
MediaType::Dts,
MediaType::Dmts,
MediaType::Dcts,
MediaType::JavaScript,
MediaType::Jsx,
MediaType::Mjs,
]
} else {
vec![
MediaType::TypeScript,
MediaType::JavaScript,
MediaType::Tsx,
MediaType::Jsx,
MediaType::Mts,
MediaType::Mjs,
]
},
SloppyImportsResolutionReason::NoExtension,
),
};
let mut probe_paths = match path_without_ext(&path, media_type) {
Some(path_no_ext) => media_types_to_paths(
&path_no_ext,
media_type,
probe_media_type_types.0,
probe_media_type_types.1,
),
None => vec![],
};
if matches!(entry, Some(SloppyImportsFsEntry::Dir)) {
// try to resolve at the index file
if mode.is_types() {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.d.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
} else {
probe_paths.push((
path.join("index.ts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mts"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.tsx"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.js"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.mjs"),
SloppyImportsResolutionReason::Directory,
));
probe_paths.push((
path.join("index.jsx"),
SloppyImportsResolutionReason::Directory,
));
}
}
if probe_paths.is_empty() {
return None;
}
probe_paths
}
};
for (probe_path, reason) in probe_paths {
if self.fs.is_file(&probe_path) {
if let Ok(specifier) = Url::from_file_path(probe_path) {
match reason {
SloppyImportsResolutionReason::JsToTs => {
return Some(SloppyImportsResolution::JsToTs(specifier));
}
SloppyImportsResolutionReason::NoExtension => {
return Some(SloppyImportsResolution::NoExtension(specifier));
}
SloppyImportsResolutionReason::Directory => {
return Some(SloppyImportsResolution::Directory(specifier));
}
}
}
}
}
None
}
}
#[cfg(test)]
mod test {
use test_util::TestContext;
use super::*;
#[test]
fn test_unstable_sloppy_imports() {
fn resolve(specifier: &Url) -> Option<SloppyImportsResolution> {
resolve_with_mode(specifier, SloppyImportsResolutionMode::Execution)
}
fn resolve_types(specifier: &Url) -> Option<SloppyImportsResolution> {
resolve_with_mode(specifier, SloppyImportsResolutionMode::Types)
}
fn resolve_with_mode(
specifier: &Url,
mode: SloppyImportsResolutionMode,
) -> Option<SloppyImportsResolution> {
struct RealSloppyImportsResolverFs;
impl SloppyImportResolverFs for RealSloppyImportsResolverFs {
fn stat_sync(&self, path: &Path) -> Option<SloppyImportsFsEntry> {
let stat = std::fs::metadata(path).ok()?;
if stat.is_dir() {
Some(SloppyImportsFsEntry::Dir)
} else if stat.is_file() {
Some(SloppyImportsFsEntry::File)
} else {
None
}
}
}
SloppyImportsResolver::new(RealSloppyImportsResolverFs)
.resolve(specifier, mode)
}
let context = TestContext::default();
let temp_dir = context.temp_dir().path();
// scenarios like resolving ./example.js to ./example.ts
for (ext_from, ext_to) in [("js", "ts"), ("js", "tsx"), ("mjs", "mts")] {
let ts_file = temp_dir.join(format!("file.{}", ext_to));
ts_file.write("");
assert_eq!(resolve(&ts_file.url_file()), None);
assert_eq!(
resolve(
&temp_dir
.url_dir()
.join(&format!("file.{}", ext_from))
.unwrap()
),
Some(SloppyImportsResolution::JsToTs(ts_file.url_file())),
);
ts_file.remove_file();
}
// no extension scenarios
for ext in ["js", "ts", "js", "tsx", "jsx", "mjs", "mts"] {
let file = temp_dir.join(format!("file.{}", ext));
file.write("");
assert_eq!(
resolve(
&temp_dir
.url_dir()
.join("file") // no ext
.unwrap()
),
Some(SloppyImportsResolution::NoExtension(file.url_file()))
);
file.remove_file();
}
// .ts and .js exists, .js specified (goes to specified)
{
let ts_file = temp_dir.join("file.ts");
ts_file.write("");
let js_file = temp_dir.join("file.js");
js_file.write("");
assert_eq!(resolve(&js_file.url_file()), None);
}
// only js exists, .js specified
{
let js_only_file = temp_dir.join("js_only.js");
js_only_file.write("");
assert_eq!(resolve(&js_only_file.url_file()), None);
assert_eq!(resolve_types(&js_only_file.url_file()), None);
}
// resolving a directory to an index file
{
let routes_dir = temp_dir.join("routes");
routes_dir.create_dir_all();
let index_file = routes_dir.join("index.ts");
index_file.write("");
assert_eq!(
resolve(&routes_dir.url_file()),
Some(SloppyImportsResolution::Directory(index_file.url_file())),
);
}
// both a directory and a file with specifier is present
{
let api_dir = temp_dir.join("api");
api_dir.create_dir_all();
let bar_file = api_dir.join("bar.ts");
bar_file.write("");
let api_file = temp_dir.join("api.ts");
api_file.write("");
assert_eq!(
resolve(&api_dir.url_file()),
Some(SloppyImportsResolution::NoExtension(api_file.url_file())),
);
}
}
#[test]
fn test_sloppy_import_resolution_suggestion_message() {
// directory
assert_eq!(
SloppyImportsResolution::Directory(
Url::parse("file:///dir/index.js").unwrap()
)
.as_suggestion_message(),
"Maybe specify path to 'index.js' file in directory instead"
);
// no ext
assert_eq!(
SloppyImportsResolution::NoExtension(
Url::parse("file:///dir/index.mjs").unwrap()
)
.as_suggestion_message(),
"Maybe add a '.mjs' extension"
);
// js to ts
assert_eq!(
SloppyImportsResolution::JsToTs(
Url::parse("file:///dir/index.mts").unwrap()
)
.as_suggestion_message(),
"Maybe change the extension to '.mts'"
);
}
}

32
resolvers/node/Cargo.toml Normal file
View file

@ -0,0 +1,32 @@
# Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
[package]
name = "node_resolver"
version = "0.7.0"
authors.workspace = true
edition.workspace = true
license.workspace = true
readme = "README.md"
repository.workspace = true
description = "Node.js module resolution algorithm used in Deno"
[lib]
path = "lib.rs"
[features]
sync = ["deno_package_json/sync"]
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
deno_media_type.workspace = true
deno_package_json.workspace = true
futures.workspace = true
lazy-regex.workspace = true
once_cell.workspace = true
path-clean = "=0.1.0"
regex.workspace = true
serde_json.workspace = true
thiserror.workspace = true
tokio.workspace = true
url.workspace = true

6
resolvers/node/README.md Normal file
View file

@ -0,0 +1,6 @@
# Node Resolver
[![crates](https://img.shields.io/crates/v/node_resolver.svg)](https://crates.io/crates/node_resolver)
[![docs](https://docs.rs/node_resolver/badge.svg)](https://docs.rs/node_resolver)
Provides Node.js compatible resolution for the Deno project.

654
resolvers/node/analyze.rs Normal file
View file

@ -0,0 +1,654 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
use futures::future::LocalBoxFuture;
use futures::stream::FuturesUnordered;
use futures::FutureExt;
use futures::StreamExt;
use once_cell::sync::Lazy;
use anyhow::Context;
use anyhow::Error as AnyError;
use url::Url;
use crate::env::NodeResolverEnv;
use crate::package_json::load_pkg_json;
use crate::path::to_file_specifier;
use crate::resolution::NodeResolverRc;
use crate::NodeModuleKind;
use crate::NodeResolutionMode;
use crate::NpmResolverRc;
use crate::PathClean;
#[derive(Debug, Clone)]
pub enum CjsAnalysis {
/// File was found to be an ES module and the translator should
/// load the code as ESM.
Esm(String),
Cjs(CjsAnalysisExports),
}
#[derive(Debug, Clone)]
pub struct CjsAnalysisExports {
pub exports: Vec<String>,
pub reexports: Vec<String>,
}
/// Code analyzer for CJS and ESM files.
#[async_trait::async_trait(?Send)]
pub trait CjsCodeAnalyzer {
/// Analyzes CommonJs code for exports and reexports, which is
/// then used to determine the wrapper ESM module exports.
///
/// Note that the source is provided by the caller when the caller
/// already has it. If the source is needed by the implementation,
/// then it can use the provided source, or otherwise load it if
/// necessary.
async fn analyze_cjs(
&self,
specifier: &Url,
maybe_source: Option<String>,
) -> Result<CjsAnalysis, AnyError>;
}
pub struct NodeCodeTranslator<
TCjsCodeAnalyzer: CjsCodeAnalyzer,
TNodeResolverEnv: NodeResolverEnv,
> {
cjs_code_analyzer: TCjsCodeAnalyzer,
env: TNodeResolverEnv,
node_resolver: NodeResolverRc<TNodeResolverEnv>,
npm_resolver: NpmResolverRc,
}
impl<TCjsCodeAnalyzer: CjsCodeAnalyzer, TNodeResolverEnv: NodeResolverEnv>
NodeCodeTranslator<TCjsCodeAnalyzer, TNodeResolverEnv>
{
pub fn new(
cjs_code_analyzer: TCjsCodeAnalyzer,
env: TNodeResolverEnv,
node_resolver: NodeResolverRc<TNodeResolverEnv>,
npm_resolver: NpmResolverRc,
) -> Self {
Self {
cjs_code_analyzer,
env,
node_resolver,
npm_resolver,
}
}
/// Translates given CJS module into ESM. This function will perform static
/// analysis on the file to find defined exports and reexports.
///
/// For all discovered reexports the analysis will be performed recursively.
///
/// If successful a source code for equivalent ES module is returned.
pub async fn translate_cjs_to_esm(
&self,
entry_specifier: &Url,
source: Option<String>,
) -> Result<String, AnyError> {
let mut temp_var_count = 0;
let analysis = self
.cjs_code_analyzer
.analyze_cjs(entry_specifier, source)
.await?;
let analysis = match analysis {
CjsAnalysis::Esm(source) => return Ok(source),
CjsAnalysis::Cjs(analysis) => analysis,
};
let mut source = vec![
r#"import {createRequire as __internalCreateRequire} from "node:module";
const require = __internalCreateRequire(import.meta.url);"#
.to_string(),
];
// use a BTreeSet to make the output deterministic for v8's code cache
let mut all_exports = analysis.exports.into_iter().collect::<BTreeSet<_>>();
if !analysis.reexports.is_empty() {
let mut errors = Vec::new();
self
.analyze_reexports(
entry_specifier,
analysis.reexports,
&mut all_exports,
&mut errors,
)
.await;
// surface errors afterwards in a deterministic way
if !errors.is_empty() {
errors.sort_by_cached_key(|e| e.to_string());
return Err(errors.remove(0));
}
}
source.push(format!(
"const mod = require(\"{}\");",
entry_specifier
.to_file_path()
.unwrap()
.to_str()
.unwrap()
.replace('\\', "\\\\")
.replace('\'', "\\\'")
.replace('\"', "\\\"")
));
for export in &all_exports {
if export.as_str() != "default" {
add_export(
&mut source,
export,
&format!("mod[\"{}\"]", escape_for_double_quote_string(export)),
&mut temp_var_count,
);
}
}
source.push("export default mod;".to_string());
let translated_source = source.join("\n");
Ok(translated_source)
}
async fn analyze_reexports<'a>(
&'a self,
entry_specifier: &url::Url,
reexports: Vec<String>,
all_exports: &mut BTreeSet<String>,
// this goes through the modules concurrently, so collect
// the errors in order to be deterministic
errors: &mut Vec<anyhow::Error>,
) {
struct Analysis {
reexport_specifier: url::Url,
referrer: url::Url,
analysis: CjsAnalysis,
}
type AnalysisFuture<'a> = LocalBoxFuture<'a, Result<Analysis, AnyError>>;
let mut handled_reexports: HashSet<Url> = HashSet::default();
handled_reexports.insert(entry_specifier.clone());
let mut analyze_futures: FuturesUnordered<AnalysisFuture<'a>> =
FuturesUnordered::new();
let cjs_code_analyzer = &self.cjs_code_analyzer;
let mut handle_reexports =
|referrer: url::Url,
reexports: Vec<String>,
analyze_futures: &mut FuturesUnordered<AnalysisFuture<'a>>,
errors: &mut Vec<anyhow::Error>| {
// 1. Resolve the re-exports and start a future to analyze each one
for reexport in reexports {
let result = self.resolve(
&reexport,
&referrer,
// FIXME(bartlomieju): check if these conditions are okay, probably
// should be `deno-require`, because `deno` is already used in `esm_resolver.rs`
&["deno", "node", "require", "default"],
NodeResolutionMode::Execution,
);
let reexport_specifier = match result {
Ok(Some(specifier)) => specifier,
Ok(None) => continue,
Err(err) => {
errors.push(err);
continue;
}
};
if !handled_reexports.insert(reexport_specifier.clone()) {
continue;
}
let referrer = referrer.clone();
let future = async move {
let analysis = cjs_code_analyzer
.analyze_cjs(&reexport_specifier, None)
.await
.with_context(|| {
format!(
"Could not load '{}' ({}) referenced from {}",
reexport, reexport_specifier, referrer
)
})?;
Ok(Analysis {
reexport_specifier,
referrer,
analysis,
})
}
.boxed_local();
analyze_futures.push(future);
}
};
handle_reexports(
entry_specifier.clone(),
reexports,
&mut analyze_futures,
errors,
);
while let Some(analysis_result) = analyze_futures.next().await {
// 2. Look at the analysis result and resolve its exports and re-exports
let Analysis {
reexport_specifier,
referrer,
analysis,
} = match analysis_result {
Ok(analysis) => analysis,
Err(err) => {
errors.push(err);
continue;
}
};
match analysis {
CjsAnalysis::Esm(_) => {
// todo(dsherret): support this once supporting requiring ES modules
errors.push(anyhow::anyhow!(
"Cannot require ES module '{}' from '{}'",
reexport_specifier,
referrer,
));
}
CjsAnalysis::Cjs(analysis) => {
if !analysis.reexports.is_empty() {
handle_reexports(
reexport_specifier.clone(),
analysis.reexports,
&mut analyze_futures,
errors,
);
}
all_exports.extend(
analysis
.exports
.into_iter()
.filter(|e| e.as_str() != "default"),
);
}
}
}
}
// todo(dsherret): what is going on here? Isn't this a bunch of duplicate code?
fn resolve(
&self,
specifier: &str,
referrer: &Url,
conditions: &[&str],
mode: NodeResolutionMode,
) -> Result<Option<Url>, AnyError> {
if specifier.starts_with('/') {
todo!();
}
let referrer_path = referrer.to_file_path().unwrap();
if specifier.starts_with("./") || specifier.starts_with("../") {
if let Some(parent) = referrer_path.parent() {
return Some(
self
.file_extension_probe(parent.join(specifier), &referrer_path)
.map(|p| to_file_specifier(&p)),
)
.transpose();
} else {
todo!();
}
}
// We've got a bare specifier or maybe bare_specifier/blah.js"
let (package_specifier, package_subpath) =
parse_specifier(specifier).unwrap();
let module_dir = match self
.npm_resolver
.resolve_package_folder_from_package(package_specifier.as_str(), referrer)
{
Err(err)
if matches!(
err.as_kind(),
crate::errors::PackageFolderResolveErrorKind::PackageNotFound(..)
) =>
{
return Ok(None);
}
other => other,
}?;
let package_json_path = module_dir.join("package.json");
let maybe_package_json =
load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
if let Some(package_json) = maybe_package_json {
if let Some(exports) = &package_json.exports {
return Some(
self
.node_resolver
.package_exports_resolve(
&package_json_path,
&package_subpath,
exports,
Some(referrer),
NodeModuleKind::Esm,
conditions,
mode,
)
.map_err(AnyError::from),
)
.transpose();
}
// old school
if package_subpath != "." {
let d = module_dir.join(package_subpath);
if self.env.is_dir_sync(&d) {
// subdir might have a package.json that specifies the entrypoint
let package_json_path = d.join("package.json");
let maybe_package_json =
load_pkg_json(self.env.pkg_json_fs(), &package_json_path)?;
if let Some(package_json) = maybe_package_json {
if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
return Ok(Some(to_file_specifier(&d.join(main).clean())));
}
}
return Ok(Some(to_file_specifier(&d.join("index.js").clean())));
}
return Some(
self
.file_extension_probe(d, &referrer_path)
.map(|p| to_file_specifier(&p)),
)
.transpose();
} else if let Some(main) = package_json.main(NodeModuleKind::Cjs) {
return Ok(Some(to_file_specifier(&module_dir.join(main).clean())));
} else {
return Ok(Some(to_file_specifier(
&module_dir.join("index.js").clean(),
)));
}
}
// as a fallback, attempt to resolve it via the ancestor directories
let mut last = referrer_path.as_path();
while let Some(parent) = last.parent() {
if !self.npm_resolver.in_npm_package_at_dir_path(parent) {
break;
}
let path = if parent.ends_with("node_modules") {
parent.join(specifier)
} else {
parent.join("node_modules").join(specifier)
};
if let Ok(path) = self.file_extension_probe(path, &referrer_path) {
return Ok(Some(to_file_specifier(&path)));
}
last = parent;
}
Err(not_found(specifier, &referrer_path))
}
fn file_extension_probe(
&self,
p: PathBuf,
referrer: &Path,
) -> Result<PathBuf, AnyError> {
let p = p.clean();
if self.env.exists_sync(&p) {
let file_name = p.file_name().unwrap();
let p_js =
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
if self.env.is_file_sync(&p_js) {
return Ok(p_js);
} else if self.env.is_dir_sync(&p) {
return Ok(p.join("index.js"));
} else {
return Ok(p);
}
} else if let Some(file_name) = p.file_name() {
{
let p_js =
p.with_file_name(format!("{}.js", file_name.to_str().unwrap()));
if self.env.is_file_sync(&p_js) {
return Ok(p_js);
}
}
{
let p_json =
p.with_file_name(format!("{}.json", file_name.to_str().unwrap()));
if self.env.is_file_sync(&p_json) {
return Ok(p_json);
}
}
}
Err(not_found(&p.to_string_lossy(), referrer))
}
}
static RESERVED_WORDS: Lazy<HashSet<&str>> = Lazy::new(|| {
HashSet::from([
"abstract",
"arguments",
"async",
"await",
"boolean",
"break",
"byte",
"case",
"catch",
"char",
"class",
"const",
"continue",
"debugger",
"default",
"delete",
"do",
"double",
"else",
"enum",
"eval",
"export",
"extends",
"false",
"final",
"finally",
"float",
"for",
"function",
"get",
"goto",
"if",
"implements",
"import",
"in",
"instanceof",
"int",
"interface",
"let",
"long",
"mod",
"native",
"new",
"null",
"package",
"private",
"protected",
"public",
"return",
"set",
"short",
"static",
"super",
"switch",
"synchronized",
"this",
"throw",
"throws",
"transient",
"true",
"try",
"typeof",
"var",
"void",
"volatile",
"while",
"with",
"yield",
])
});
fn add_export(
source: &mut Vec<String>,
name: &str,
initializer: &str,
temp_var_count: &mut usize,
) {
fn is_valid_var_decl(name: &str) -> bool {
// it's ok to be super strict here
if name.is_empty() {
return false;
}
if let Some(first) = name.chars().next() {
if !first.is_ascii_alphabetic() && first != '_' && first != '$' {
return false;
}
}
name
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '$')
}
// TODO(bartlomieju): Node actually checks if a given export exists in `exports` object,
// but it might not be necessary here since our analysis is more detailed?
if RESERVED_WORDS.contains(name) || !is_valid_var_decl(name) {
*temp_var_count += 1;
// we can't create an identifier with a reserved word or invalid identifier name,
// so assign it to a temporary variable that won't have a conflict, then re-export
// it as a string
source.push(format!(
"const __deno_export_{temp_var_count}__ = {initializer};"
));
source.push(format!(
"export {{ __deno_export_{temp_var_count}__ as \"{}\" }};",
escape_for_double_quote_string(name)
));
} else {
source.push(format!("export const {name} = {initializer};"));
}
}
fn parse_specifier(specifier: &str) -> Option<(String, String)> {
let mut separator_index = specifier.find('/');
let mut valid_package_name = true;
// let mut is_scoped = false;
if specifier.is_empty() {
valid_package_name = false;
} else if specifier.starts_with('@') {
// is_scoped = true;
if let Some(index) = separator_index {
separator_index = specifier[index + 1..].find('/').map(|i| i + index + 1);
} else {
valid_package_name = false;
}
}
let package_name = if let Some(index) = separator_index {
specifier[0..index].to_string()
} else {
specifier.to_string()
};
// Package name cannot have leading . and cannot have percent-encoding or separators.
for ch in package_name.chars() {
if ch == '%' || ch == '\\' {
valid_package_name = false;
break;
}
}
if !valid_package_name {
return None;
}
let package_subpath = if let Some(index) = separator_index {
format!(".{}", specifier.chars().skip(index).collect::<String>())
} else {
".".to_string()
};
Some((package_name, package_subpath))
}
fn not_found(path: &str, referrer: &Path) -> AnyError {
let msg = format!(
"[ERR_MODULE_NOT_FOUND] Cannot find module \"{}\" imported from \"{}\"",
path,
referrer.to_string_lossy()
);
std::io::Error::new(std::io::ErrorKind::NotFound, msg).into()
}
fn escape_for_double_quote_string(text: &str) -> Cow<str> {
// this should be rare, so doing a scan first before allocating is ok
if text.chars().any(|c| matches!(c, '"' | '\\')) {
// don't bother making this more complex for perf because it's rare
Cow::Owned(text.replace('\\', "\\\\").replace('"', "\\\""))
} else {
Cow::Borrowed(text)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add_export() {
let mut temp_var_count = 0;
let mut source = vec![];
let exports = vec!["static", "server", "app", "dashed-export", "3d"];
for export in exports {
add_export(&mut source, export, "init", &mut temp_var_count);
}
assert_eq!(
source,
vec![
"const __deno_export_1__ = init;".to_string(),
"export { __deno_export_1__ as \"static\" };".to_string(),
"export const server = init;".to_string(),
"export const app = init;".to_string(),
"const __deno_export_2__ = init;".to_string(),
"export { __deno_export_2__ as \"dashed-export\" };".to_string(),
"const __deno_export_3__ = init;".to_string(),
"export { __deno_export_3__ as \"3d\" };".to_string(),
]
)
}
#[test]
fn test_parse_specifier() {
assert_eq!(
parse_specifier("@some-package/core/actions"),
Some(("@some-package/core".to_string(), "./actions".to_string()))
);
}
}

View file

@ -0,0 +1,48 @@
disallowed-methods = [
{ path = "std::env::current_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::is_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::is_file", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::try_exists", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::exists", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::is_file", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::env::set_current_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::env::temp_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::copy", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::create_dir_all", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::create_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::DirBuilder::new", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::hard_link", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::OpenOptions::new", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::read_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::read_link", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::read_to_string", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::read", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::remove_dir_all", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::remove_dir", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::remove_file", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::rename", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::set_permissions", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::symlink_metadata", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::fs::write", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::canonicalize", reason = "File system operations should be done using NodeResolverFs trait" },
{ path = "std::path::Path::exists", reason = "File system operations should be done using NodeResolverFs trait" },
]
disallowed-types = [
{ path = "std::sync::Arc", reason = "use crate::sync::MaybeArc instead" },
]

39
resolvers/node/env.rs Normal file
View file

@ -0,0 +1,39 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use std::path::PathBuf;
use crate::sync::MaybeSend;
use crate::sync::MaybeSync;
pub struct NodeResolverFsStat {
pub is_file: bool,
pub is_dir: bool,
pub is_symlink: bool,
}
pub trait NodeResolverEnv: std::fmt::Debug + MaybeSend + MaybeSync {
fn is_builtin_node_module(&self, specifier: &str) -> bool;
fn realpath_sync(&self, path: &Path) -> std::io::Result<PathBuf>;
fn stat_sync(&self, path: &Path) -> std::io::Result<NodeResolverFsStat>;
fn exists_sync(&self, path: &Path) -> bool;
fn is_file_sync(&self, path: &Path) -> bool {
self
.stat_sync(path)
.map(|stat| stat.is_file)
.unwrap_or(false)
}
fn is_dir_sync(&self, path: &Path) -> bool {
self
.stat_sync(path)
.map(|stat| stat.is_dir)
.unwrap_or(false)
}
fn pkg_json_fs(&self) -> &dyn deno_package_json::fs::DenoPkgJsonFs;
}

769
resolvers/node/errors.rs Normal file
View file

@ -0,0 +1,769 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::borrow::Cow;
use std::fmt::Write;
use std::path::PathBuf;
use thiserror::Error;
use url::Url;
use crate::NodeModuleKind;
use crate::NodeResolutionMode;
macro_rules! kinded_err {
($name:ident, $kind_name:ident) => {
#[derive(Error, Debug)]
#[error(transparent)]
pub struct $name(pub Box<$kind_name>);
impl $name {
pub fn as_kind(&self) -> &$kind_name {
&self.0
}
pub fn into_kind(self) -> $kind_name {
*self.0
}
}
impl<E> From<E> for $name
where
$kind_name: From<E>,
{
fn from(err: E) -> Self {
$name(Box::new($kind_name::from(err)))
}
}
};
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
pub enum NodeJsErrorCode {
ERR_INVALID_MODULE_SPECIFIER,
ERR_INVALID_PACKAGE_CONFIG,
ERR_INVALID_PACKAGE_TARGET,
ERR_MODULE_NOT_FOUND,
ERR_PACKAGE_IMPORT_NOT_DEFINED,
ERR_PACKAGE_PATH_NOT_EXPORTED,
ERR_UNKNOWN_FILE_EXTENSION,
ERR_UNSUPPORTED_DIR_IMPORT,
ERR_UNSUPPORTED_ESM_URL_SCHEME,
/// Deno specific since Node doesn't support TypeScript.
ERR_TYPES_NOT_FOUND,
}
impl std::fmt::Display for NodeJsErrorCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl NodeJsErrorCode {
pub fn as_str(&self) -> &'static str {
use NodeJsErrorCode::*;
match self {
ERR_INVALID_MODULE_SPECIFIER => "ERR_INVALID_MODULE_SPECIFIER",
ERR_INVALID_PACKAGE_CONFIG => "ERR_INVALID_PACKAGE_CONFIG",
ERR_INVALID_PACKAGE_TARGET => "ERR_INVALID_PACKAGE_TARGET",
ERR_MODULE_NOT_FOUND => "ERR_MODULE_NOT_FOUND",
ERR_PACKAGE_IMPORT_NOT_DEFINED => "ERR_PACKAGE_IMPORT_NOT_DEFINED",
ERR_PACKAGE_PATH_NOT_EXPORTED => "ERR_PACKAGE_PATH_NOT_EXPORTED",
ERR_UNKNOWN_FILE_EXTENSION => "ERR_UNKNOWN_FILE_EXTENSION",
ERR_UNSUPPORTED_DIR_IMPORT => "ERR_UNSUPPORTED_DIR_IMPORT",
ERR_UNSUPPORTED_ESM_URL_SCHEME => "ERR_UNSUPPORTED_ESM_URL_SCHEME",
ERR_TYPES_NOT_FOUND => "ERR_TYPES_NOT_FOUND",
}
}
}
pub trait NodeJsErrorCoded {
fn code(&self) -> NodeJsErrorCode;
}
kinded_err!(
ResolvePkgSubpathFromDenoModuleError,
ResolvePkgSubpathFromDenoModuleErrorKind
);
impl NodeJsErrorCoded for ResolvePkgSubpathFromDenoModuleError {
fn code(&self) -> NodeJsErrorCode {
use ResolvePkgSubpathFromDenoModuleErrorKind::*;
match self.as_kind() {
PackageSubpathResolve(e) => e.code(),
UrlToNodeResolution(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum ResolvePkgSubpathFromDenoModuleErrorKind {
#[error(transparent)]
PackageSubpathResolve(#[from] PackageSubpathResolveError),
#[error(transparent)]
UrlToNodeResolution(#[from] UrlToNodeResolutionError),
}
// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError
#[derive(Debug, Clone, Error)]
#[error(
"[{}] Invalid module '{}' {}{}",
self.code(),
request,
reason,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default()
)]
pub struct InvalidModuleSpecifierError {
pub request: String,
pub reason: Cow<'static, str>,
pub maybe_referrer: Option<String>,
}
impl NodeJsErrorCoded for InvalidModuleSpecifierError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_MODULE_SPECIFIER
}
}
kinded_err!(LegacyResolveError, LegacyResolveErrorKind);
#[derive(Debug, Error)]
pub enum LegacyResolveErrorKind {
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[error(transparent)]
ModuleNotFound(#[from] ModuleNotFoundError),
}
impl NodeJsErrorCoded for LegacyResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
LegacyResolveErrorKind::TypesNotFound(e) => e.code(),
LegacyResolveErrorKind::ModuleNotFound(e) => e.code(),
}
}
}
kinded_err!(PackageFolderResolveError, PackageFolderResolveErrorKind);
#[derive(Debug, Error)]
#[error(
"Could not find package '{}' from referrer '{}'{}.",
package_name,
referrer,
referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
)]
pub struct PackageNotFoundError {
pub package_name: String,
pub referrer: Url,
/// Extra information about the referrer.
pub referrer_extra: Option<String>,
}
impl NodeJsErrorCoded for PackageNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error)]
#[error(
"Could not find referrer npm package '{}'{}.",
referrer,
referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default()
)]
pub struct ReferrerNotFoundError {
pub referrer: Url,
/// Extra information about the referrer.
pub referrer_extra: Option<String>,
}
impl NodeJsErrorCoded for ReferrerNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error)]
#[error("Failed resolving '{package_name}' from referrer '{referrer}'.")]
pub struct PackageFolderResolveIoError {
pub package_name: String,
pub referrer: Url,
#[source]
pub source: std::io::Error,
}
impl NodeJsErrorCoded for PackageFolderResolveIoError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
impl NodeJsErrorCoded for PackageFolderResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageFolderResolveErrorKind::PackageNotFound(e) => e.code(),
PackageFolderResolveErrorKind::ReferrerNotFound(e) => e.code(),
PackageFolderResolveErrorKind::Io(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum PackageFolderResolveErrorKind {
#[error(transparent)]
PackageNotFound(#[from] PackageNotFoundError),
#[error(transparent)]
ReferrerNotFound(#[from] ReferrerNotFoundError),
#[error(transparent)]
Io(#[from] PackageFolderResolveIoError),
}
kinded_err!(PackageSubpathResolveError, PackageSubpathResolveErrorKind);
impl NodeJsErrorCoded for PackageSubpathResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageSubpathResolveErrorKind::PkgJsonLoad(e) => e.code(),
PackageSubpathResolveErrorKind::Exports(e) => e.code(),
PackageSubpathResolveErrorKind::LegacyResolve(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum PackageSubpathResolveErrorKind {
#[error(transparent)]
PkgJsonLoad(#[from] PackageJsonLoadError),
#[error(transparent)]
Exports(PackageExportsResolveError),
#[error(transparent)]
LegacyResolve(LegacyResolveError),
}
#[derive(Debug, Error)]
#[error(
"Target '{}' not found from '{}'{}{}.",
target,
pkg_json_path.display(),
maybe_referrer.as_ref().map(|r|
format!(
" from{} referrer {}",
match referrer_kind {
NodeModuleKind::Esm => "",
NodeModuleKind::Cjs => " cjs",
},
r
)
).unwrap_or_default(),
match mode {
NodeResolutionMode::Execution => "",
NodeResolutionMode::Types => " for types",
}
)]
pub struct PackageTargetNotFoundError {
pub pkg_json_path: PathBuf,
pub target: String,
pub maybe_referrer: Option<Url>,
pub referrer_kind: NodeModuleKind,
pub mode: NodeResolutionMode,
}
impl NodeJsErrorCoded for PackageTargetNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
kinded_err!(PackageTargetResolveError, PackageTargetResolveErrorKind);
impl NodeJsErrorCoded for PackageTargetResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageTargetResolveErrorKind::NotFound(e) => e.code(),
PackageTargetResolveErrorKind::InvalidPackageTarget(e) => e.code(),
PackageTargetResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
PackageTargetResolveErrorKind::PackageResolve(e) => e.code(),
PackageTargetResolveErrorKind::TypesNotFound(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum PackageTargetResolveErrorKind {
#[error(transparent)]
NotFound(#[from] PackageTargetNotFoundError),
#[error(transparent)]
InvalidPackageTarget(#[from] InvalidPackageTargetError),
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[error(transparent)]
PackageResolve(#[from] PackageResolveError),
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
}
kinded_err!(PackageExportsResolveError, PackageExportsResolveErrorKind);
impl NodeJsErrorCoded for PackageExportsResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageExportsResolveErrorKind::PackagePathNotExported(e) => e.code(),
PackageExportsResolveErrorKind::PackageTargetResolve(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum PackageExportsResolveErrorKind {
#[error(transparent)]
PackagePathNotExported(#[from] PackagePathNotExportedError),
#[error(transparent)]
PackageTargetResolve(#[from] PackageTargetResolveError),
}
#[derive(Debug, Error)]
#[error(
"[{}] Could not find types for '{}'{}",
self.code(),
self.0.code_specifier,
self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
)]
pub struct TypesNotFoundError(pub Box<TypesNotFoundErrorData>);
#[derive(Debug)]
pub struct TypesNotFoundErrorData {
pub code_specifier: Url,
pub maybe_referrer: Option<Url>,
}
impl NodeJsErrorCoded for TypesNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_TYPES_NOT_FOUND
}
}
#[derive(Debug, Error)]
#[error(
"[{}] Invalid package config. {}",
self.code(),
self.0
)]
pub struct PackageJsonLoadError(
#[source]
#[from]
pub deno_package_json::PackageJsonLoadError,
);
impl NodeJsErrorCoded for PackageJsonLoadError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_PACKAGE_CONFIG
}
}
kinded_err!(ClosestPkgJsonError, ClosestPkgJsonErrorKind);
impl NodeJsErrorCoded for ClosestPkgJsonError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
ClosestPkgJsonErrorKind::CanonicalizingDir(e) => e.code(),
ClosestPkgJsonErrorKind::Load(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum ClosestPkgJsonErrorKind {
#[error(transparent)]
CanonicalizingDir(#[from] CanonicalizingPkgJsonDirError),
#[error(transparent)]
Load(#[from] PackageJsonLoadError),
}
#[derive(Debug, Error)]
#[error("[{}] Failed canonicalizing package.json directory '{}'.", self.code(), dir_path.display())]
pub struct CanonicalizingPkgJsonDirError {
pub dir_path: PathBuf,
#[source]
pub source: std::io::Error,
}
impl NodeJsErrorCoded for CanonicalizingPkgJsonDirError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error)]
#[error("TypeScript files are not supported in npm packages: {specifier}")]
pub struct TypeScriptNotSupportedInNpmError {
pub specifier: Url,
}
impl NodeJsErrorCoded for TypeScriptNotSupportedInNpmError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_UNKNOWN_FILE_EXTENSION
}
}
kinded_err!(UrlToNodeResolutionError, UrlToNodeResolutionErrorKind);
impl NodeJsErrorCoded for UrlToNodeResolutionError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
UrlToNodeResolutionErrorKind::TypeScriptNotSupported(e) => e.code(),
UrlToNodeResolutionErrorKind::ClosestPkgJson(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum UrlToNodeResolutionErrorKind {
#[error(transparent)]
TypeScriptNotSupported(#[from] TypeScriptNotSupportedInNpmError),
#[error(transparent)]
ClosestPkgJson(#[from] ClosestPkgJsonError),
}
// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError
#[derive(Debug, Error)]
#[error(
"[{}] Package import specifier \"{}\" is not defined{}{}",
self.code(),
name,
package_json_path.as_ref().map(|p| format!(" in package {}", p.display())).unwrap_or_default(),
maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(),
)]
pub struct PackageImportNotDefinedError {
pub name: String,
pub package_json_path: Option<PathBuf>,
pub maybe_referrer: Option<Url>,
}
impl NodeJsErrorCoded for PackageImportNotDefinedError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_PACKAGE_IMPORT_NOT_DEFINED
}
}
kinded_err!(PackageImportsResolveError, PackageImportsResolveErrorKind);
#[derive(Debug, Error)]
pub enum PackageImportsResolveErrorKind {
#[error(transparent)]
ClosestPkgJson(ClosestPkgJsonError),
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[error(transparent)]
NotDefined(#[from] PackageImportNotDefinedError),
#[error(transparent)]
Target(#[from] PackageTargetResolveError),
}
impl NodeJsErrorCoded for PackageImportsResolveErrorKind {
fn code(&self) -> NodeJsErrorCode {
match self {
Self::ClosestPkgJson(e) => e.code(),
Self::InvalidModuleSpecifier(e) => e.code(),
Self::NotDefined(e) => e.code(),
Self::Target(e) => e.code(),
}
}
}
kinded_err!(PackageResolveError, PackageResolveErrorKind);
impl NodeJsErrorCoded for PackageResolveError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
PackageResolveErrorKind::ClosestPkgJson(e) => e.code(),
PackageResolveErrorKind::InvalidModuleSpecifier(e) => e.code(),
PackageResolveErrorKind::PackageFolderResolve(e) => e.code(),
PackageResolveErrorKind::ExportsResolve(e) => e.code(),
PackageResolveErrorKind::SubpathResolve(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
pub enum PackageResolveErrorKind {
#[error(transparent)]
ClosestPkgJson(#[from] ClosestPkgJsonError),
#[error(transparent)]
InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError),
#[error(transparent)]
PackageFolderResolve(#[from] PackageFolderResolveError),
#[error(transparent)]
ExportsResolve(#[from] PackageExportsResolveError),
#[error(transparent)]
SubpathResolve(#[from] PackageSubpathResolveError),
}
#[derive(Debug, Error)]
#[error("Failed joining '{path}' from '{base}'.")]
pub struct NodeResolveRelativeJoinError {
pub path: String,
pub base: Url,
#[source]
pub source: url::ParseError,
}
#[derive(Debug, Error)]
#[error("Failed resolving specifier from data url referrer.")]
pub struct DataUrlReferrerError {
#[source]
pub source: url::ParseError,
}
kinded_err!(NodeResolveError, NodeResolveErrorKind);
#[derive(Debug, Error)]
pub enum NodeResolveErrorKind {
#[error(transparent)]
RelativeJoin(#[from] NodeResolveRelativeJoinError),
#[error(transparent)]
PackageImportsResolve(#[from] PackageImportsResolveError),
#[error(transparent)]
UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError),
#[error(transparent)]
DataUrlReferrer(#[from] DataUrlReferrerError),
#[error(transparent)]
PackageResolve(#[from] PackageResolveError),
#[error(transparent)]
TypesNotFound(#[from] TypesNotFoundError),
#[error(transparent)]
FinalizeResolution(#[from] FinalizeResolutionError),
#[error(transparent)]
UrlToNodeResolution(#[from] UrlToNodeResolutionError),
}
kinded_err!(FinalizeResolutionError, FinalizeResolutionErrorKind);
#[derive(Debug, Error)]
pub enum FinalizeResolutionErrorKind {
#[error(transparent)]
InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError),
#[error(transparent)]
ModuleNotFound(#[from] ModuleNotFoundError),
#[error(transparent)]
UnsupportedDirImport(#[from] UnsupportedDirImportError),
}
impl NodeJsErrorCoded for FinalizeResolutionError {
fn code(&self) -> NodeJsErrorCode {
match self.as_kind() {
FinalizeResolutionErrorKind::InvalidModuleSpecifierError(e) => e.code(),
FinalizeResolutionErrorKind::ModuleNotFound(e) => e.code(),
FinalizeResolutionErrorKind::UnsupportedDirImport(e) => e.code(),
}
}
}
#[derive(Debug, Error)]
#[error(
"[{}] Cannot find {} '{}'{}",
self.code(),
typ,
specifier,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default()
)]
pub struct ModuleNotFoundError {
pub specifier: Url,
pub maybe_referrer: Option<Url>,
pub typ: &'static str,
}
impl NodeJsErrorCoded for ModuleNotFoundError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_MODULE_NOT_FOUND
}
}
#[derive(Debug, Error)]
#[error(
"[{}] Directory import '{}' is not supported resolving ES modules{}",
self.code(),
dir_url,
maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default(),
)]
pub struct UnsupportedDirImportError {
pub dir_url: Url,
pub maybe_referrer: Option<Url>,
}
impl NodeJsErrorCoded for UnsupportedDirImportError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_UNSUPPORTED_DIR_IMPORT
}
}
#[derive(Debug)]
pub struct InvalidPackageTargetError {
pub pkg_json_path: PathBuf,
pub sub_path: String,
pub target: String,
pub is_import: bool,
pub maybe_referrer: Option<Url>,
}
impl std::error::Error for InvalidPackageTargetError {}
impl std::fmt::Display for InvalidPackageTargetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let rel_error = !self.is_import
&& !self.target.is_empty()
&& !self.target.starts_with("./");
f.write_char('[')?;
f.write_str(self.code().as_str())?;
f.write_char(']')?;
if self.sub_path == "." {
assert!(!self.is_import);
write!(
f,
" Invalid \"exports\" main target {} defined in the package config {}",
self.target,
self.pkg_json_path.display()
)?;
} else {
let ie = if self.is_import { "imports" } else { "exports" };
write!(
f,
" Invalid \"{}\" target {} defined for '{}' in the package config {}",
ie,
self.target,
self.sub_path,
self.pkg_json_path.display()
)?;
};
if let Some(referrer) = &self.maybe_referrer {
write!(f, " imported from '{}'", referrer)?;
}
if rel_error {
write!(f, "; target must start with \"./\"")?;
}
Ok(())
}
}
impl NodeJsErrorCoded for InvalidPackageTargetError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_INVALID_PACKAGE_TARGET
}
}
#[derive(Debug)]
pub struct PackagePathNotExportedError {
pub pkg_json_path: PathBuf,
pub subpath: String,
pub maybe_referrer: Option<Url>,
pub mode: NodeResolutionMode,
}
impl NodeJsErrorCoded for PackagePathNotExportedError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_PACKAGE_PATH_NOT_EXPORTED
}
}
impl std::error::Error for PackagePathNotExportedError {}
impl std::fmt::Display for PackagePathNotExportedError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_char('[')?;
f.write_str(self.code().as_str())?;
f.write_char(']')?;
let types_msg = match self.mode {
NodeResolutionMode::Execution => String::new(),
NodeResolutionMode::Types => " for types".to_string(),
};
if self.subpath == "." {
write!(
f,
" No \"exports\" main defined{} in '{}'",
types_msg,
self.pkg_json_path.display()
)?;
} else {
write!(
f,
" Package subpath '{}' is not defined{} by \"exports\" in '{}'",
self.subpath,
types_msg,
self.pkg_json_path.display()
)?;
};
if let Some(referrer) = &self.maybe_referrer {
write!(f, " imported from '{}'", referrer)?;
}
Ok(())
}
}
#[derive(Debug, Clone, Error)]
#[error(
"[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'",
self.code(),
if cfg!(windows) && url_scheme.len() == 2 { " On Windows, absolute path must be valid file:// URLS."} else { "" },
url_scheme
)]
pub struct UnsupportedEsmUrlSchemeError {
pub url_scheme: String,
}
impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError {
fn code(&self) -> NodeJsErrorCode {
NodeJsErrorCode::ERR_UNSUPPORTED_ESM_URL_SCHEME
}
}
#[derive(Debug, Error)]
pub enum ResolvePkgJsonBinExportError {
#[error(transparent)]
PkgJsonLoad(#[from] PackageJsonLoadError),
#[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())]
MissingPkgJson { pkg_json_path: PathBuf },
#[error("Failed resolving binary export. {message}")]
InvalidBinProperty { message: String },
#[error(transparent)]
UrlToNodeResolution(#[from] UrlToNodeResolutionError),
}
#[derive(Debug, Error)]
pub enum ResolveBinaryCommandsError {
#[error(transparent)]
PkgJsonLoad(#[from] PackageJsonLoadError),
#[error("'{}' did not have a name", pkg_json_path.display())]
MissingPkgJsonName { pkg_json_path: PathBuf },
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn types_resolution_package_path_not_exported() {
let separator_char = if cfg!(windows) { '\\' } else { '/' };
assert_eq!(
PackagePathNotExportedError {
pkg_json_path: PathBuf::from("test_path").join("package.json"),
subpath: "./jsx-runtime".to_string(),
maybe_referrer: None,
mode: NodeResolutionMode::Types
}.to_string(),
format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] Package subpath './jsx-runtime' is not defined for types by \"exports\" in 'test_path{separator_char}package.json'")
);
assert_eq!(
PackagePathNotExportedError {
pkg_json_path: PathBuf::from("test_path").join("package.json"),
subpath: ".".to_string(),
maybe_referrer: None,
mode: NodeResolutionMode::Types
}.to_string(),
format!("[ERR_PACKAGE_PATH_NOT_EXPORTED] No \"exports\" main defined for types in 'test_path{separator_char}package.json'")
);
}
}

27
resolvers/node/lib.rs Normal file
View file

@ -0,0 +1,27 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
#![deny(clippy::print_stderr)]
#![deny(clippy::print_stdout)]
pub mod analyze;
pub mod env;
pub mod errors;
mod npm;
mod package_json;
mod path;
mod resolution;
mod sync;
pub use deno_package_json::PackageJson;
pub use npm::NpmResolver;
pub use npm::NpmResolverRc;
pub use package_json::load_pkg_json;
pub use package_json::PackageJsonThreadLocalCache;
pub use path::PathClean;
pub use resolution::parse_npm_pkg_name;
pub use resolution::NodeModuleKind;
pub use resolution::NodeResolution;
pub use resolution::NodeResolutionMode;
pub use resolution::NodeResolver;
pub use resolution::DEFAULT_CONDITIONS;
pub use resolution::REQUIRE_CONDITIONS;

41
resolvers/node/npm.rs Normal file
View file

@ -0,0 +1,41 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Path;
use std::path::PathBuf;
use url::Url;
use crate::errors;
use crate::path::PathClean;
use crate::sync::MaybeSend;
use crate::sync::MaybeSync;
#[allow(clippy::disallowed_types)]
pub type NpmResolverRc = crate::sync::MaybeArc<dyn NpmResolver>;
pub trait NpmResolver: std::fmt::Debug + MaybeSend + MaybeSync {
/// Resolves an npm package folder path from an npm package referrer.
fn resolve_package_folder_from_package(
&self,
specifier: &str,
referrer: &Url,
) -> Result<PathBuf, errors::PackageFolderResolveError>;
fn in_npm_package(&self, specifier: &Url) -> bool;
fn in_npm_package_at_dir_path(&self, path: &Path) -> bool {
let specifier = match Url::from_directory_path(path.to_path_buf().clean()) {
Ok(p) => p,
Err(_) => return false,
};
self.in_npm_package(&specifier)
}
fn in_npm_package_at_file_path(&self, path: &Path) -> bool {
let specifier = match Url::from_file_path(path.to_path_buf().clean()) {
Ok(p) => p,
Err(_) => return false,
};
self.in_npm_package(&specifier)
}
}

View file

@ -0,0 +1,53 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use deno_package_json::PackageJson;
use deno_package_json::PackageJsonRc;
use std::cell::RefCell;
use std::collections::HashMap;
use std::io::ErrorKind;
use std::path::Path;
use std::path::PathBuf;
use crate::errors::PackageJsonLoadError;
// use a thread local cache so that workers have their own distinct cache
thread_local! {
static CACHE: RefCell<HashMap<PathBuf, PackageJsonRc>> = RefCell::new(HashMap::new());
}
pub struct PackageJsonThreadLocalCache;
impl PackageJsonThreadLocalCache {
pub fn clear() {
CACHE.with(|cache| cache.borrow_mut().clear());
}
}
impl deno_package_json::PackageJsonCache for PackageJsonThreadLocalCache {
fn get(&self, path: &Path) -> Option<PackageJsonRc> {
CACHE.with(|cache| cache.borrow().get(path).cloned())
}
fn set(&self, path: PathBuf, package_json: PackageJsonRc) {
CACHE.with(|cache| cache.borrow_mut().insert(path, package_json));
}
}
/// Helper to load a package.json file using the thread local cache
/// in node_resolver.
pub fn load_pkg_json(
fs: &dyn deno_package_json::fs::DenoPkgJsonFs,
path: &Path,
) -> Result<Option<PackageJsonRc>, PackageJsonLoadError> {
let result =
PackageJson::load_from_path(path, fs, Some(&PackageJsonThreadLocalCache));
match result {
Ok(pkg_json) => Ok(Some(pkg_json)),
Err(deno_package_json::PackageJsonLoadError::Io { source, .. })
if source.kind() == ErrorKind::NotFound =>
{
Ok(None)
}
Err(err) => Err(PackageJsonLoadError(err)),
}
}

179
resolvers/node/path.rs Normal file
View file

@ -0,0 +1,179 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
use std::path::Component;
use std::path::Path;
use std::path::PathBuf;
use url::Url;
/// Extension to path_clean::PathClean
pub trait PathClean<T> {
fn clean(&self) -> T;
}
impl PathClean<PathBuf> for PathBuf {
fn clean(&self) -> PathBuf {
fn is_clean_path(path: &Path) -> bool {
let path = path.to_string_lossy();
let mut current_index = 0;
while let Some(index) = path[current_index..].find("\\.") {
let trailing_index = index + current_index + 2;
let mut trailing_chars = path[trailing_index..].chars();
match trailing_chars.next() {
Some('.') => match trailing_chars.next() {
Some('/') | Some('\\') | None => {
return false;
}
_ => {}
},
Some('/') | Some('\\') => {
return false;
}
_ => {}
}
current_index = trailing_index;
}
true
}
let path = path_clean::PathClean::clean(self);
if cfg!(windows) && !is_clean_path(&path) {
// temporary workaround because path_clean::PathClean::clean is
// not good enough on windows
let mut components = Vec::new();
for component in path.components() {
match component {
Component::CurDir => {
// skip
}
Component::ParentDir => {
let maybe_last_component = components.pop();
if !matches!(maybe_last_component, Some(Component::Normal(_))) {
panic!("Error normalizing: {}", path.display());
}
}
Component::Normal(_) | Component::RootDir | Component::Prefix(_) => {
components.push(component);
}
}
}
components.into_iter().collect::<PathBuf>()
} else {
path
}
}
}
pub(crate) fn to_file_specifier(path: &Path) -> Url {
match Url::from_file_path(path) {
Ok(url) => url,
Err(_) => panic!("Invalid path: {}", path.display()),
}
}
// todo(dsherret): we have the below code also in deno_core and it
// would be good to somehow re-use it in both places (we don't want
// to create a dependency on deno_core here)
#[cfg(not(windows))]
#[inline]
pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
path
}
/// Strips the unc prefix (ex. \\?\) from Windows paths.
#[cfg(windows)]
pub fn strip_unc_prefix(path: PathBuf) -> PathBuf {
use std::path::Component;
use std::path::Prefix;
let mut components = path.components();
match components.next() {
Some(Component::Prefix(prefix)) => {
match prefix.kind() {
// \\?\device
Prefix::Verbatim(device) => {
let mut path = PathBuf::new();
path.push(format!(r"\\{}\", device.to_string_lossy()));
path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
path
}
// \\?\c:\path
Prefix::VerbatimDisk(_) => {
let mut path = PathBuf::new();
path.push(prefix.as_os_str().to_string_lossy().replace(r"\\?\", ""));
path.extend(components);
path
}
// \\?\UNC\hostname\share_name\path
Prefix::VerbatimUNC(hostname, share_name) => {
let mut path = PathBuf::new();
path.push(format!(
r"\\{}\{}\",
hostname.to_string_lossy(),
share_name.to_string_lossy()
));
path.extend(components.filter(|c| !matches!(c, Component::RootDir)));
path
}
_ => path,
}
}
_ => path,
}
}
#[cfg(test)]
mod test {
#[cfg(windows)]
#[test]
fn test_path_clean() {
use super::*;
run_test("C:\\test\\./file.txt", "C:\\test\\file.txt");
run_test("C:\\test\\../other/file.txt", "C:\\other\\file.txt");
run_test("C:\\test\\../other\\file.txt", "C:\\other\\file.txt");
fn run_test(input: &str, expected: &str) {
assert_eq!(PathBuf::from(input).clean(), PathBuf::from(expected));
}
}
#[cfg(windows)]
#[test]
fn test_strip_unc_prefix() {
use std::path::PathBuf;
run_test(r"C:\", r"C:\");
run_test(r"C:\test\file.txt", r"C:\test\file.txt");
run_test(r"\\?\C:\", r"C:\");
run_test(r"\\?\C:\test\file.txt", r"C:\test\file.txt");
run_test(r"\\.\C:\", r"\\.\C:\");
run_test(r"\\.\C:\Test\file.txt", r"\\.\C:\Test\file.txt");
run_test(r"\\?\UNC\localhost\", r"\\localhost");
run_test(r"\\?\UNC\localhost\c$\", r"\\localhost\c$");
run_test(
r"\\?\UNC\localhost\c$\Windows\file.txt",
r"\\localhost\c$\Windows\file.txt",
);
run_test(r"\\?\UNC\wsl$\deno.json", r"\\wsl$\deno.json");
run_test(r"\\?\server1", r"\\server1");
run_test(r"\\?\server1\e$\", r"\\server1\e$\");
run_test(
r"\\?\server1\e$\test\file.txt",
r"\\server1\e$\test\file.txt",
);
fn run_test(input: &str, expected: &str) {
assert_eq!(
super::strip_unc_prefix(PathBuf::from(input)),
PathBuf::from(expected)
);
}
}
}

2023
resolvers/node/resolution.rs Normal file

File diff suppressed because it is too large Load diff

23
resolvers/node/sync.rs Normal file
View file

@ -0,0 +1,23 @@
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
pub use inner::*;
#[cfg(feature = "sync")]
mod inner {
#![allow(clippy::disallowed_types)]
pub use std::sync::Arc as MaybeArc;
pub use core::marker::Send as MaybeSend;
pub use core::marker::Sync as MaybeSync;
}
#[cfg(not(feature = "sync"))]
mod inner {
pub use std::rc::Rc as MaybeArc;
pub trait MaybeSync {}
impl<T> MaybeSync for T where T: ?Sized {}
pub trait MaybeSend {}
impl<T> MaybeSend for T where T: ?Sized {}
}