chore: Rust 1.86.0 (#29435)

Since `rust 1.87.0` reported `undefined symbol:
ring::pbkdf2::PBKDF2_HMAC_SHA1::*` in CI and it was difficult to debug
locally, use `rust 1.86.0` in CI tests for troubleshoot the errors
This commit is contained in:
林炳权 2025-05-26 05:40:48 +08:00 committed by GitHub
parent bc8b191b7c
commit cb738ee5da
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
45 changed files with 138 additions and 133 deletions

View file

@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
// Bump this number when you want to purge the cache. // Bump this number when you want to purge the cache.
// Note: the tools/release/01_bump_crate_versions.ts script will update this version // Note: the tools/release/01_bump_crate_versions.ts script will update this version
// automatically via regex, so ensure that this line maintains this format. // automatically via regex, so ensure that this line maintains this format.
const cacheVersion = 56; const cacheVersion = 57;
const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86Runner = "ubuntu-24.04";
const ubuntuX86XlRunner = "ubuntu-24.04-xl"; const ubuntuX86XlRunner = "ubuntu-24.04-xl";
@ -88,7 +88,7 @@ export DEBIAN_FRONTEND=noninteractive
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove \ sudo apt-get -qq remove \
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'llvm-17*' 'llvm-18*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" |
@ -105,7 +105,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s
clang-${llvmVersion} -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC clang-${llvmVersion} -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20250207/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev

View file

@ -187,8 +187,8 @@ jobs:
~/.cargo/registry/index ~/.cargo/registry/index
~/.cargo/registry/cache ~/.cargo/registry/cache
~/.cargo/git/db ~/.cargo/git/db
key: '56-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' key: '57-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
restore-keys: '56-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' restore-keys: '57-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
if: '!(matrix.skip)' if: '!(matrix.skip)'
- uses: dsherret/rust-toolchain-file@v1 - uses: dsherret/rust-toolchain-file@v1
if: '!(matrix.skip)' if: '!(matrix.skip)'
@ -268,7 +268,7 @@ jobs:
# to complete. # to complete.
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
# Remove older clang before we install # Remove older clang before we install
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'llvm-17*' 'llvm-18*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
# Install clang-XXX, lld-XXX, and debootstrap. # Install clang-XXX, lld-XXX, and debootstrap.
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" | echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" |
@ -285,7 +285,7 @@ jobs:
clang-19 -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC clang-19 -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC
echo "Decompressing sysroot..." echo "Decompressing sysroot..."
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20250207/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
cd / cd /
xzcat /tmp/sysroot.tar.xz | sudo tar -x xzcat /tmp/sysroot.tar.xz | sudo tar -x
sudo mount --rbind /dev /sysroot/dev sudo mount --rbind /dev /sysroot/dev
@ -390,7 +390,7 @@ jobs:
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: never_saved key: never_saved
restore-keys: '56-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' restore-keys: '57-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
- name: Apply and update mtime cache - name: Apply and update mtime cache
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
uses: ./.github/mtime_cache uses: ./.github/mtime_cache
@ -779,7 +779,7 @@ jobs:
!./target/*/gn_root !./target/*/gn_root
!./target/*/*.zip !./target/*/*.zip
!./target/*/*.tar.gz !./target/*/*.tar.gz
key: '56-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' key: '57-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
wasm: wasm:
name: build wasm32 name: build wasm32
needs: needs:

View file

@ -5645,10 +5645,9 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
} else { } else {
if current.is_empty() { if current.is_empty() {
return Err( return Err(
std::io::Error::new( std::io::Error::other(String::from(
std::io::ErrorKind::Other, "Empty values are not allowed",
String::from("Empty values are not allowed"), ))
)
.into(), .into(),
); );
} }
@ -5659,11 +5658,8 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
} }
} else { } else {
return Err( return Err(
std::io::Error::new( std::io::Error::other(String::from("Empty values are not allowed"))
std::io::ErrorKind::Other, .into(),
String::from("Empty values are not allowed"),
)
.into(),
); );
} }
} else { } else {
@ -5673,11 +5669,8 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
if current.is_empty() { if current.is_empty() {
return Err( return Err(
std::io::Error::new( std::io::Error::other(String::from("Empty values are not allowed"))
std::io::ErrorKind::Other, .into(),
String::from("Empty values are not allowed"),
)
.into(),
); );
} }
@ -6033,18 +6026,15 @@ fn node_modules_and_vendor_dir_arg_parse(
fn reload_arg_validate(urlstr: String) -> Result<String, clap::Error> { fn reload_arg_validate(urlstr: String) -> Result<String, clap::Error> {
if urlstr.is_empty() { if urlstr.is_empty() {
return Err( return Err(
std::io::Error::new( std::io::Error::other(String::from(
std::io::ErrorKind::Other, "Missing url. Check for extra commas.",
String::from("Missing url. Check for extra commas."), ))
)
.into(), .into(),
); );
} }
match Url::from_str(&urlstr) { match Url::from_str(&urlstr) {
Ok(_) => Ok(urlstr), Ok(_) => Ok(urlstr),
Err(e) => { Err(e) => Err(std::io::Error::other(e.to_string()).into()),
Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string()).into())
}
} }
} }

View file

@ -187,6 +187,7 @@ impl ModuleInfoCacheModuleAnalyzer<'_> {
} }
} }
#[allow(clippy::result_large_err)]
pub fn analyze_sync( pub fn analyze_sync(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,

View file

@ -31,6 +31,7 @@ impl<'a> LazyGraphSourceParser<'a> {
Self { cache, graph } Self { cache, graph }
} }
#[allow(clippy::result_large_err)]
pub fn get_or_parse_source( pub fn get_or_parse_source(
&self, &self,
module_specifier: &ModuleSpecifier, module_specifier: &ModuleSpecifier,
@ -52,6 +53,7 @@ pub struct ParsedSourceCache {
} }
impl ParsedSourceCache { impl ParsedSourceCache {
#[allow(clippy::result_large_err)]
pub fn get_parsed_source_from_js_module( pub fn get_parsed_source_from_js_module(
&self, &self,
module: &deno_graph::JsModule, module: &deno_graph::JsModule,
@ -66,6 +68,7 @@ impl ParsedSourceCache {
}) })
} }
#[allow(clippy::result_large_err)]
pub fn remove_or_parse_module( pub fn remove_or_parse_module(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,

View file

@ -153,6 +153,7 @@ impl Emitter {
} }
} }
#[allow(clippy::result_large_err)]
pub fn emit_parsed_source_sync( pub fn emit_parsed_source_sync(
&self, &self,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,
@ -356,6 +357,7 @@ impl EmitParsedSourceHelper<'_> {
} }
} }
#[allow(clippy::result_large_err)]
pub fn transpile( pub fn transpile(
parsed_source_cache: &ParsedSourceCache, parsed_source_cache: &ParsedSourceCache,
specifier: &ModuleSpecifier, specifier: &ModuleSpecifier,

View file

@ -573,6 +573,7 @@ impl ModuleGraphCreator {
self.module_graph_builder.graph_valid(graph) self.module_graph_builder.graph_valid(graph)
} }
#[allow(clippy::result_large_err)]
fn type_check_graph( fn type_check_graph(
&self, &self,
graph: ModuleGraph, graph: ModuleGraph,

View file

@ -557,6 +557,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
} }
} }
#[allow(clippy::result_large_err)]
pub fn create_main_worker( pub fn create_main_worker(
&self, &self,
mode: WorkerExecutionMode, mode: WorkerExecutionMode,
@ -573,6 +574,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
) )
} }
#[allow(clippy::result_large_err)]
pub fn create_custom_worker( pub fn create_custom_worker(
&self, &self,
mode: WorkerExecutionMode, mode: WorkerExecutionMode,
@ -694,6 +696,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
}) })
} }
#[allow(clippy::result_large_err)]
pub fn resolve_npm_binary_entrypoint( pub fn resolve_npm_binary_entrypoint(
&self, &self,
package_folder: &Path, package_folder: &Path,
@ -797,26 +800,31 @@ impl LibMainWorker {
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)]
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> { pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_load_event() self.worker.dispatch_load_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)]
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> { pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
self.worker.dispatch_beforeunload_event() self.worker.dispatch_beforeunload_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)]
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> { pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
self.worker.dispatch_process_beforeexit_event() self.worker.dispatch_process_beforeexit_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)]
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> { pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_unload_event() self.worker.dispatch_unload_event()
} }
#[inline] #[inline]
#[allow(clippy::result_large_err)]
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> { pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
self.worker.dispatch_process_exit_event() self.worker.dispatch_process_exit_event()
} }

View file

@ -964,7 +964,7 @@ impl Config {
let mut folders = vec![]; let mut folders = vec![];
for root_url in root_urls { for root_url in root_urls {
let root_uri = url_to_uri(&root_url).unwrap(); let root_uri = url_to_uri(&root_url).unwrap();
let name = root_url.path_segments().and_then(|s| s.last()); let name = root_url.path_segments().and_then(|mut s| s.next_back());
let name = name.unwrap_or_default().to_string(); let name = name.unwrap_or_default().to_string();
folders.push(( folders.push((
Arc::new(root_url), Arc::new(root_url),

View file

@ -1942,6 +1942,7 @@ fn parse_and_analyze_module(
) )
} }
#[allow(clippy::result_large_err)]
fn parse_source( fn parse_source(
specifier: ModuleSpecifier, specifier: ModuleSpecifier,
text: Arc<str>, text: Arc<str>,

View file

@ -914,7 +914,7 @@ impl Inner {
let mut root_url = uri_to_url(&root_uri); let mut root_url = uri_to_url(&root_uri);
let name = root_url let name = root_url
.path_segments() .path_segments()
.and_then(|s| s.last()) .and_then(|mut s| s.next_back())
.unwrap_or_default() .unwrap_or_default()
.to_string(); .to_string();
if !root_url.path().ends_with('/') { if !root_url.path().ends_with('/') {
@ -1976,7 +1976,6 @@ impl Inner {
} }
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
fn resolution_to_hover_text( fn resolution_to_hover_text(
&self, &self,
resolution: &Resolution, resolution: &Resolution,
@ -4566,7 +4565,6 @@ impl Inner {
} }
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
fn prepare_cache( fn prepare_cache(
&mut self, &mut self,
specifiers: Vec<ModuleSpecifier>, specifiers: Vec<ModuleSpecifier>,
@ -4665,7 +4663,6 @@ impl Inner {
} }
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
fn pre_did_change_workspace_folders( fn pre_did_change_workspace_folders(
&mut self, &mut self,
params: DidChangeWorkspaceFoldersParams, params: DidChangeWorkspaceFoldersParams,

View file

@ -171,7 +171,7 @@ impl TestModule {
self self
.specifier .specifier
.path_segments() .path_segments()
.and_then(|s| s.last().map(|s| s.to_string())) .and_then(|mut s| s.next_back().map(|s| s.to_string()))
.unwrap_or_else(|| "<unknown>".to_string()) .unwrap_or_else(|| "<unknown>".to_string())
} }
} }

View file

@ -614,6 +614,7 @@ impl<TGraphContainer: ModuleGraphContainer>
}) })
} }
#[allow(clippy::result_large_err)]
fn resolve_referrer( fn resolve_referrer(
&self, &self,
referrer: &str, referrer: &str,
@ -641,6 +642,7 @@ impl<TGraphContainer: ModuleGraphContainer>
} }
} }
#[allow(clippy::result_large_err)]
fn inner_resolve( fn inner_resolve(
&self, &self,
raw_specifier: &str, raw_specifier: &str,

View file

@ -205,6 +205,7 @@ pub enum LintError {
#[op2] #[op2]
#[buffer] #[buffer]
#[allow(clippy::result_large_err)]
fn op_lint_create_serialized_ast( fn op_lint_create_serialized_ast(
#[string] file_name: &str, #[string] file_name: &str,
#[string] source: String, #[string] source: String,

View file

@ -56,7 +56,7 @@ pub struct StandaloneData {
/// then checking for the magic trailer string `d3n0l4nd`. If found, /// then checking for the magic trailer string `d3n0l4nd`. If found,
/// the bundle is executed. If not, this function exits with `Ok(None)`. /// the bundle is executed. If not, this function exits with `Ok(None)`.
pub fn extract_standalone( pub fn extract_standalone(
cli_args: Cow<Vec<OsString>>, cli_args: Cow<[OsString]>,
) -> Result<StandaloneData, AnyError> { ) -> Result<StandaloneData, AnyError> {
let data = find_section()?; let data = find_section()?;

View file

@ -935,10 +935,7 @@ impl VfsRoot {
match entry { match entry {
VfsEntryRef::Symlink(symlink) => { VfsEntryRef::Symlink(symlink) => {
if !seen.insert(path.to_path_buf()) { if !seen.insert(path.to_path_buf()) {
return Err(std::io::Error::new( return Err(std::io::Error::other("circular symlinks"));
std::io::ErrorKind::Other,
"circular symlinks",
));
} }
path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path)); path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path));
} }
@ -1388,10 +1385,9 @@ impl FileBackedVfs {
VfsEntryRef::Symlink(symlink) => { VfsEntryRef::Symlink(symlink) => {
Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path)) Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path))
} }
VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => Err(std::io::Error::new( VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => {
std::io::ErrorKind::Other, Err(std::io::Error::other("not a symlink"))
"not a symlink", }
)),
} }
} }
@ -1467,20 +1463,14 @@ impl FileBackedVfs {
match entry { match entry {
VfsEntryRef::Dir(dir) => Ok(dir), VfsEntryRef::Dir(dir) => Ok(dir),
VfsEntryRef::Symlink(_) => unreachable!(), VfsEntryRef::Symlink(_) => unreachable!(),
VfsEntryRef::File(_) => Err(std::io::Error::new( VfsEntryRef::File(_) => Err(std::io::Error::other("path is a file")),
std::io::ErrorKind::Other,
"path is a file",
)),
} }
} }
pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> { pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> {
let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?;
match entry { match entry {
VfsEntryRef::Dir(_) => Err(std::io::Error::new( VfsEntryRef::Dir(_) => Err(std::io::Error::other("path is a directory")),
std::io::ErrorKind::Other,
"path is a directory",
)),
VfsEntryRef::Symlink(_) => unreachable!(), VfsEntryRef::Symlink(_) => unreachable!(),
VfsEntryRef::File(file) => Ok(file), VfsEntryRef::File(file) => Ok(file),
} }

View file

@ -101,15 +101,15 @@ pub async fn format(
paths_with_options.paths = if let Some(paths) = &changed_paths { paths_with_options.paths = if let Some(paths) = &changed_paths {
if fmt_flags.check { if fmt_flags.check {
// check all files on any changed (https://github.com/denoland/deno/issues/12446) // check all files on any changed (https://github.com/denoland/deno/issues/12446)
files if files.iter().any(|path| {
.iter() canonicalize_path(path)
.any(|path| { .map(|path| paths.contains(&path))
canonicalize_path(path) .unwrap_or(false)
.map(|path| paths.contains(&path)) }) {
.unwrap_or(false) files
}) } else {
.then_some(files) [].to_vec()
.unwrap_or_else(|| [].to_vec()) }
} else { } else {
files files
.into_iter() .into_iter()

View file

@ -148,15 +148,15 @@ async fn lint_with_watch_inner(
let files = std::mem::take(&mut paths_with_options.paths); let files = std::mem::take(&mut paths_with_options.paths);
paths_with_options.paths = if let Some(paths) = &changed_paths { paths_with_options.paths = if let Some(paths) = &changed_paths {
// lint all files on any changed (https://github.com/denoland/deno/issues/12446) // lint all files on any changed (https://github.com/denoland/deno/issues/12446)
files if files.iter().any(|path| {
.iter() canonicalize_path(path)
.any(|path| { .map(|p| paths.contains(&p))
canonicalize_path(path) .unwrap_or(false)
.map(|p| paths.contains(&p)) }) {
.unwrap_or(false) files
}) } else {
.then_some(files) [].to_vec()
.unwrap_or_else(|| [].to_vec()) }
} else { } else {
files files
}; };

View file

@ -279,7 +279,7 @@ mod test {
.resolve_lint_rules(Default::default(), None) .resolve_lint_rules(Default::default(), None)
.rules .rules
.into_iter() .into_iter()
.filter(|r| r.tags().iter().any(|t| *t == tags::RECOMMENDED)) .filter(|r| r.tags().contains(&tags::RECOMMENDED))
.map(|r| r.code().to_string()) .map(|r| r.code().to_string())
.filter(|n| n != "no-debugger") .filter(|n| n != "no-debugger")
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View file

@ -719,7 +719,7 @@ fn relative_url(
if resolved.scheme() == "file" { if resolved.scheme() == "file" {
let relative = referrer.make_relative(resolved).unwrap(); let relative = referrer.make_relative(resolved).unwrap();
if relative.is_empty() { if relative.is_empty() {
let last = resolved.path_segments().unwrap().last().unwrap(); let last = resolved.path_segments().unwrap().next_back().unwrap();
format!("./{last}") format!("./{last}")
} else if relative.starts_with("../") { } else if relative.starts_with("../") {
relative relative

View file

@ -59,6 +59,7 @@ pub struct RustylineSyncMessageSender {
} }
impl RustylineSyncMessageSender { impl RustylineSyncMessageSender {
#[allow(clippy::result_large_err)]
pub fn post_message<T: serde::Serialize>( pub fn post_message<T: serde::Serialize>(
&self, &self,
method: &str, method: &str,

View file

@ -1230,6 +1230,7 @@ fn op_respond_inner(state: &mut OpState, args: RespondArgs) {
state.maybe_response = Some(args); state.maybe_response = Some(args);
} }
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Error, deno_error::JsError)] #[derive(Debug, Error, deno_error::JsError)]
pub enum ExecError { pub enum ExecError {
#[class(generic)] #[class(generic)]
@ -1390,6 +1391,7 @@ impl deno_core::ExtCodeCache for TscExtCodeCache {
/// Execute a request on the supplied snapshot, returning a response which /// Execute a request on the supplied snapshot, returning a response which
/// contains information, like any emitted files, diagnostics, statistics and /// contains information, like any emitted files, diagnostics, statistics and
/// optionally an updated TypeScript build info. /// optionally an updated TypeScript build info.
#[allow(clippy::result_large_err)]
pub fn exec( pub fn exec(
request: Request, request: Request,
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>, code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,

View file

@ -140,6 +140,7 @@ impl TypeChecker {
/// ///
/// It is expected that it is determined if a check and/or emit is validated /// It is expected that it is determined if a check and/or emit is validated
/// before the function is called. /// before the function is called.
#[allow(clippy::result_large_err)]
pub fn check( pub fn check(
&self, &self,
graph: ModuleGraph, graph: ModuleGraph,
@ -174,6 +175,7 @@ impl TypeChecker {
/// ///
/// It is expected that it is determined if a check and/or emit is validated /// It is expected that it is determined if a check and/or emit is validated
/// before the function is called. /// before the function is called.
#[allow(clippy::result_large_err)]
pub fn check_diagnostics( pub fn check_diagnostics(
&self, &self,
mut graph: ModuleGraph, mut graph: ModuleGraph,
@ -249,6 +251,7 @@ impl TypeChecker {
/// Groups the roots based on the compiler options, which includes the /// Groups the roots based on the compiler options, which includes the
/// resolved TsConfig and resolved compilerOptions.types /// resolved TsConfig and resolved compilerOptions.types
#[allow(clippy::result_large_err)]
fn group_roots_by_compiler_options<'a>( fn group_roots_by_compiler_options<'a>(
&'a self, &'a self,
graph: &ModuleGraph, graph: &ModuleGraph,
@ -375,6 +378,7 @@ impl Iterator for DiagnosticsByFolderIterator<'_> {
} }
} }
#[allow(clippy::large_enum_variant)]
enum DiagnosticsByFolderIteratorInner<'a> { enum DiagnosticsByFolderIteratorInner<'a> {
Empty(Arc<ModuleGraph>), Empty(Arc<ModuleGraph>),
Real(DiagnosticsByFolderRealIterator<'a>), Real(DiagnosticsByFolderRealIterator<'a>),
@ -444,6 +448,7 @@ pub fn ambient_modules_to_regex_string(ambient_modules: &[String]) -> String {
impl<'a> DiagnosticsByFolderRealIterator<'a> { impl<'a> DiagnosticsByFolderRealIterator<'a> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[allow(clippy::result_large_err)]
fn check_diagnostics_in_folder( fn check_diagnostics_in_folder(
&self, &self,
group_key: &'a CheckGroupKey<'a>, group_key: &'a CheckGroupKey<'a>,

View file

@ -281,8 +281,8 @@ pub enum WatcherRestartMode {
/// Creates a file watcher. /// Creates a file watcher.
/// ///
/// - `operation` is the actual operation we want to run every time the watcher detects file /// - `operation` is the actual operation we want to run every time the watcher detects file
/// changes. For example, in the case where we would like to bundle, then `operation` would /// changes. For example, in the case where we would like to bundle, then `operation` would
/// have the logic for it like bundling the code. /// have the logic for it like bundling the code.
pub async fn watch_recv<O, F>( pub async fn watch_recv<O, F>(
mut flags: Arc<Flags>, mut flags: Arc<Flags>,
print_config: PrintConfig, print_config: PrintConfig,

View file

@ -66,7 +66,7 @@ pub fn mapped_specifier_for_tsc(
&& specifier && specifier
.path() .path()
.split('/') .split('/')
.last() .next_back()
.map(|last| last.contains(".d.")) .map(|last| last.contains(".d."))
.unwrap_or(false) .unwrap_or(false)
{ {

View file

@ -274,6 +274,7 @@ impl CliMainWorker {
Ok(Some(coverage_collector)) Ok(Some(coverage_collector))
} }
#[allow(clippy::result_large_err)]
pub fn execute_script_static( pub fn execute_script_static(
&mut self, &mut self,
name: &'static str, name: &'static str,

View file

@ -247,7 +247,7 @@ impl LscBackend {
let body = http_body_util::BodyDataStream::new(res.into_body()) let body = http_body_util::BodyDataStream::new(res.into_body())
.into_stream() .into_stream()
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)); .map_err(std::io::Error::other);
let body = CacheResponseResource::lsc(body); let body = CacheResponseResource::lsc(body);
Ok(Some((meta, Some(body)))) Ok(Some((meta, Some(body))))

View file

@ -14,6 +14,7 @@ use hyper_util::client::legacy::connect::dns::Name;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use tower::Service; use tower::Service;
#[allow(clippy::large_enum_variant)]
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Resolver { pub enum Resolver {
/// A resolver using blocking `getaddrinfo` calls in a threadpool. /// A resolver using blocking `getaddrinfo` calls in a threadpool.
@ -85,7 +86,7 @@ impl Future for ResolveFut {
if join_err.is_cancelled() { if join_err.is_cancelled() {
Err(io::Error::new(io::ErrorKind::Interrupted, join_err)) Err(io::Error::new(io::ErrorKind::Interrupted, join_err))
} else { } else {
Err(io::Error::new(io::ErrorKind::Other, join_err)) Err(io::Error::other(join_err))
} }
} }
}) })

View file

@ -508,6 +508,8 @@ impl FetchPermissions for deno_permissions::PermissionsContainer {
#[op2(stack_trace)] #[op2(stack_trace)]
#[serde] #[serde]
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[allow(clippy::large_enum_variant)]
#[allow(clippy::result_large_err)]
pub fn op_fetch<FP>( pub fn op_fetch<FP>(
state: &mut OpState, state: &mut OpState,
#[serde] method: ByteString, #[serde] method: ByteString,
@ -865,12 +867,12 @@ impl Resource for FetchResponseResource {
match std::mem::take(&mut *reader) { match std::mem::take(&mut *reader) {
FetchResponseReader::Start(resp) => { FetchResponseReader::Start(resp) => {
let stream: BytesStream = let stream: BytesStream = Box::pin(
Box::pin(resp.into_body().into_data_stream().map(|r| { resp
r.map_err(|err| { .into_body()
std::io::Error::new(std::io::ErrorKind::Other, err) .into_data_stream()
}) .map(|r| r.map_err(std::io::Error::other)),
})); );
*reader = FetchResponseReader::BodyReader(stream.peekable()); *reader = FetchResponseReader::BodyReader(stream.peekable());
} }
FetchResponseReader::BodyReader(_) => unreachable!(), FetchResponseReader::BodyReader(_) => unreachable!(),
@ -977,6 +979,7 @@ fn sync_permission_check<'a, P: FetchPermissions + 'static>(
#[op2(stack_trace)] #[op2(stack_trace)]
#[smi] #[smi]
#[allow(clippy::result_large_err)]
pub fn op_fetch_custom_client<FP>( pub fn op_fetch_custom_client<FP>(
state: &mut OpState, state: &mut OpState,
#[serde] args: CreateHttpClientArgs, #[serde] args: CreateHttpClientArgs,

View file

@ -434,6 +434,7 @@ type BoxFuture<T> = Pin<Box<dyn Future<Output = T> + Send>>;
type BoxError = Box<dyn std::error::Error + Send + Sync>; type BoxError = Box<dyn std::error::Error + Send + Sync>;
// These variatns are not to be inspected. // These variatns are not to be inspected.
#[allow(clippy::large_enum_variant)]
pub enum Proxied<T> { pub enum Proxied<T> {
/// Not proxied /// Not proxied
PassThrough(T), PassThrough(T),

View file

@ -169,10 +169,9 @@ where
None => Cow::Borrowed(path.as_ref()), None => Cow::Borrowed(path.as_ref()),
}; };
let lib = Library::open(real_path.as_ref()).map_err(|e| { let lib = Library::open(real_path.as_ref()).map_err(|e| {
dlopen2::Error::OpeningLibraryError(std::io::Error::new( dlopen2::Error::OpeningLibraryError(std::io::Error::other(format_error(
std::io::ErrorKind::Other, e, &real_path,
format_error(e, &real_path), )))
))
})?; })?;
let mut resource = DynamicLibraryResource { let mut resource = DynamicLibraryResource {
lib, lib,

View file

@ -49,7 +49,7 @@ impl TryFrom<NativeType> for libffi::middle::Type {
libffi::middle::Type::pointer() libffi::middle::Type::pointer()
} }
NativeType::Struct(fields) => { NativeType::Struct(fields) => {
libffi::middle::Type::structure(match fields.len() > 0 { libffi::middle::Type::structure(match !fields.is_empty() {
true => fields true => fields
.iter() .iter()
.map(|field| field.clone().try_into()) .map(|field| field.clone().try_into())

View file

@ -983,12 +983,12 @@ where
NetError::DnsNotFound(e) NetError::DnsNotFound(e)
} }
ResolveErrorKind::Proto(ProtoError { kind, .. }) ResolveErrorKind::Proto(ProtoError { kind, .. })
if matches!(**kind, ProtoErrorKind::NoConnections { .. }) => if matches!(**kind, ProtoErrorKind::NoConnections) =>
{ {
NetError::DnsNotConnected(e) NetError::DnsNotConnected(e)
} }
ResolveErrorKind::Proto(ProtoError { kind, .. }) ResolveErrorKind::Proto(ProtoError { kind, .. })
if matches!(**kind, ProtoErrorKind::Timeout { .. }) => if matches!(**kind, ProtoErrorKind::Timeout) =>
{ {
NetError::DnsTimedOut(e) NetError::DnsTimedOut(e)
} }

View file

@ -85,6 +85,7 @@ macro_rules! network_stream {
( $([$i:ident, $il:ident, $stream:path, $listener:path, $addr:path, $stream_resource:ty]),* ) => { ( $([$i:ident, $il:ident, $stream:path, $listener:path, $addr:path, $stream_resource:ty]),* ) => {
/// A raw stream of one of the types handled by this extension. /// A raw stream of one of the types handled by this extension.
#[pin_project::pin_project(project = NetworkStreamProject)] #[pin_project::pin_project(project = NetworkStreamProject)]
#[allow(clippy::large_enum_variant)]
pub enum NetworkStream { pub enum NetworkStream {
$( $i (#[pin] $stream), )* $( $i (#[pin] $stream), )*
} }

View file

@ -622,12 +622,12 @@ impl Resource for NodeHttpResponseResource {
match std::mem::take(&mut *reader) { match std::mem::take(&mut *reader) {
NodeHttpFetchResponseReader::Start(resp) => { NodeHttpFetchResponseReader::Start(resp) => {
let stream: BytesStream = let stream: BytesStream = Box::pin(
Box::pin(resp.into_body().into_data_stream().map(|r| { resp
r.map_err(|err| { .into_body()
std::io::Error::new(std::io::ErrorKind::Other, err) .into_data_stream()
}) .map(|r| r.map_err(std::io::Error::other)),
})); );
*reader = *reader =
NodeHttpFetchResponseReader::BodyReader(stream.peekable()); NodeHttpFetchResponseReader::BodyReader(stream.peekable());
} }

View file

@ -951,10 +951,7 @@ fn op_spawn_sync(
})?; })?;
if let Some(input) = input { if let Some(input) = input {
let mut stdin = child.stdin.take().ok_or_else(|| { let mut stdin = child.stdin.take().ok_or_else(|| {
ProcessError::Io(std::io::Error::new( ProcessError::Io(std::io::Error::other("stdin is not available"))
std::io::ErrorKind::Other,
"stdin is not available",
))
})?; })?;
stdin.write_all(&input)?; stdin.write_all(&input)?;
stdin.flush()?; stdin.flush()?;

View file

@ -16,6 +16,7 @@ use tokio::io::AsyncWrite;
use tokio::io::ReadBuf; use tokio::io::ReadBuf;
// TODO(bartlomieju): remove this // TODO(bartlomieju): remove this
#[allow(clippy::large_enum_variant)]
pub(crate) enum WsStreamKind { pub(crate) enum WsStreamKind {
Upgraded(TokioIo<Upgraded>), Upgraded(TokioIo<Upgraded>),
Network(NetworkStream), Network(NetworkStream),

View file

@ -120,6 +120,7 @@ pub struct ResolveSnapshotError {
source: SnapshotFromLockfileError, source: SnapshotFromLockfileError,
} }
#[allow(clippy::result_large_err)]
fn resolve_snapshot<TSys: LockfileSys>( fn resolve_snapshot<TSys: LockfileSys>(
snapshot: NpmResolverManagedSnapshotOption<TSys>, snapshot: NpmResolverManagedSnapshotOption<TSys>,
patch_packages: &WorkspaceNpmPatchPackages, patch_packages: &WorkspaceNpmPatchPackages,

View file

@ -84,6 +84,7 @@ pub struct SyncFetchScript {
#[op2] #[op2]
#[serde] #[serde]
#[allow(clippy::result_large_err)]
pub fn op_worker_sync_fetch( pub fn op_worker_sync_fetch(
state: &mut OpState, state: &mut OpState,
#[serde] scripts: Vec<String>, #[serde] scripts: Vec<String>,

View file

@ -126,10 +126,7 @@ fn clear_stdin(
loop { loop {
let r = libc::tcflush(STDIN_FD, libc::TCIFLUSH); let r = libc::tcflush(STDIN_FD, libc::TCIFLUSH);
if r != 0 { if r != 0 {
return Err(std::io::Error::new( return Err(std::io::Error::other("clear_stdin failed (tcflush)"));
std::io::ErrorKind::Other,
"clear_stdin failed (tcflush)",
));
} }
// Initialize timeout for select to be 100ms // Initialize timeout for select to be 100ms
@ -149,10 +146,7 @@ fn clear_stdin(
// Check if select returned an error // Check if select returned an error
if r < 0 { if r < 0 {
return Err(std::io::Error::new( return Err(std::io::Error::other("clear_stdin failed (select)"));
std::io::ErrorKind::Other,
"clear_stdin failed (select)",
));
} }
// Check if select returned due to timeout (stdin is quiescent) // Check if select returned due to timeout (stdin is quiescent)
@ -211,13 +205,10 @@ fn clear_stdin(
unsafe fn flush_input_buffer(stdin: HANDLE) -> Result<(), std::io::Error> { unsafe fn flush_input_buffer(stdin: HANDLE) -> Result<(), std::io::Error> {
let success = FlushConsoleInputBuffer(stdin); let success = FlushConsoleInputBuffer(stdin);
if success != TRUE { if success != TRUE {
return Err(std::io::Error::new( return Err(std::io::Error::other(format!(
std::io::ErrorKind::Other, "Could not flush the console input buffer: {}",
format!( std::io::Error::last_os_error()
"Could not flush the console input buffer: {}", )));
std::io::Error::last_os_error()
),
));
} }
Ok(()) Ok(())
} }
@ -239,13 +230,10 @@ fn clear_stdin(
let success = let success =
WriteConsoleInputW(stdin, &input_record, 1, &mut record_written); WriteConsoleInputW(stdin, &input_record, 1, &mut record_written);
if success != TRUE { if success != TRUE {
return Err(std::io::Error::new( return Err(std::io::Error::other(format!(
std::io::ErrorKind::Other, "Could not emulate enter key press: {}",
format!( std::io::Error::last_os_error()
"Could not emulate enter key press: {}", )));
std::io::Error::last_os_error()
),
));
} }
Ok(()) Ok(())
} }
@ -258,13 +246,10 @@ fn clear_stdin(
let success = let success =
PeekConsoleInputW(stdin, buffer.as_mut_ptr(), 1, &mut events_read); PeekConsoleInputW(stdin, buffer.as_mut_ptr(), 1, &mut events_read);
if success != TRUE { if success != TRUE {
return Err(std::io::Error::new( return Err(std::io::Error::other(format!(
std::io::ErrorKind::Other, "Could not peek the console input buffer: {}",
format!( std::io::Error::last_os_error()
"Could not peek the console input buffer: {}", )));
std::io::Error::last_os_error()
),
));
} }
Ok(events_read == 0) Ok(events_read == 0)
} }

View file

@ -107,6 +107,7 @@ pub enum WebWorkerType {
/// Events that are sent to host from child /// Events that are sent to host from child
/// worker. /// worker.
#[allow(clippy::large_enum_variant)]
pub enum WorkerControlEvent { pub enum WorkerControlEvent {
TerminalError(CoreError), TerminalError(CoreError),
Close, Close,
@ -167,6 +168,7 @@ pub struct WebWorkerInternalHandle {
impl WebWorkerInternalHandle { impl WebWorkerInternalHandle {
/// Post WorkerEvent to parent as a worker /// Post WorkerEvent to parent as a worker
#[allow(clippy::result_large_err)]
pub fn post_event( pub fn post_event(
&self, &self,
event: WorkerControlEvent, event: WorkerControlEvent,
@ -828,6 +830,7 @@ impl WebWorker {
} }
/// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script) /// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script)
#[allow(clippy::result_large_err)]
pub fn execute_script( pub fn execute_script(
&mut self, &mut self,
name: &'static str, name: &'static str,

View file

@ -92,6 +92,7 @@ pub(crate) static SIGUSR2_RX: LazyLock<tokio::sync::watch::Receiver<()>> =
rx rx
}); });
#[allow(clippy::result_large_err)]
pub fn import_meta_resolve_callback( pub fn import_meta_resolve_callback(
loader: &dyn ModuleLoader, loader: &dyn ModuleLoader,
specifier: String, specifier: String,
@ -798,6 +799,7 @@ impl MainWorker {
} }
/// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script) /// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script)
#[allow(clippy::result_large_err)]
pub fn execute_script( pub fn execute_script(
&mut self, &mut self,
script_name: &'static str, script_name: &'static str,
@ -931,6 +933,7 @@ impl MainWorker {
/// Dispatches "load" event to the JavaScript runtime. /// Dispatches "load" event to the JavaScript runtime.
/// ///
/// Does not poll event loop, and thus not await any of the "load" event handlers. /// Does not poll event loop, and thus not await any of the "load" event handlers.
#[allow(clippy::result_large_err)]
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> { pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
let scope = &mut self.js_runtime.handle_scope(); let scope = &mut self.js_runtime.handle_scope();
let tc_scope = &mut v8::TryCatch::new(scope); let tc_scope = &mut v8::TryCatch::new(scope);
@ -948,6 +951,7 @@ impl MainWorker {
/// Dispatches "unload" event to the JavaScript runtime. /// Dispatches "unload" event to the JavaScript runtime.
/// ///
/// Does not poll event loop, and thus not await any of the "unload" event handlers. /// Does not poll event loop, and thus not await any of the "unload" event handlers.
#[allow(clippy::result_large_err)]
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> { pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
let scope = &mut self.js_runtime.handle_scope(); let scope = &mut self.js_runtime.handle_scope();
let tc_scope = &mut v8::TryCatch::new(scope); let tc_scope = &mut v8::TryCatch::new(scope);
@ -963,6 +967,7 @@ impl MainWorker {
} }
/// Dispatches process.emit("exit") event for node compat. /// Dispatches process.emit("exit") event for node compat.
#[allow(clippy::result_large_err)]
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> { pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
let scope = &mut self.js_runtime.handle_scope(); let scope = &mut self.js_runtime.handle_scope();
let tc_scope = &mut v8::TryCatch::new(scope); let tc_scope = &mut v8::TryCatch::new(scope);
@ -980,6 +985,7 @@ impl MainWorker {
/// Dispatches "beforeunload" event to the JavaScript runtime. Returns a boolean /// Dispatches "beforeunload" event to the JavaScript runtime. Returns a boolean
/// indicating if the event was prevented and thus event loop should continue /// indicating if the event was prevented and thus event loop should continue
/// running. /// running.
#[allow(clippy::result_large_err)]
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> { pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
let scope = &mut self.js_runtime.handle_scope(); let scope = &mut self.js_runtime.handle_scope();
let tc_scope = &mut v8::TryCatch::new(scope); let tc_scope = &mut v8::TryCatch::new(scope);
@ -997,6 +1003,7 @@ impl MainWorker {
} }
/// Dispatches process.emit("beforeExit") event for node compat. /// Dispatches process.emit("beforeExit") event for node compat.
#[allow(clippy::result_large_err)]
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> { pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
let scope = &mut self.js_runtime.handle_scope(); let scope = &mut self.js_runtime.handle_scope();
let tc_scope = &mut v8::TryCatch::new(scope); let tc_scope = &mut v8::TryCatch::new(scope);

View file

@ -1,3 +1,3 @@
[toolchain] [toolchain]
channel = "1.85.1" channel = "1.86.0"
components = ["rustfmt", "clippy"] components = ["rustfmt", "clippy"]

View file

@ -129,6 +129,6 @@ pub fn get_tls_config(
Ok(Arc::new(config)) Ok(Arc::new(config))
} }
None => Err(io::Error::new(io::ErrorKind::Other, "Cannot find key")), None => Err(io::Error::other("Cannot find key")),
} }
} }

View file

@ -1423,7 +1423,7 @@ pub fn custom_headers(
if p.contains("/encoding/") { if p.contains("/encoding/") {
let charset = p let charset = p
.split_terminator('/') .split_terminator('/')
.last() .next_back()
.unwrap() .unwrap()
.trim_end_matches(".ts"); .trim_end_matches(".ts");