mirror of
https://github.com/denoland/deno.git
synced 2025-07-07 13:25:07 +00:00
chore: Rust 1.86.0 (#29435)
Since `rust 1.87.0` reported `undefined symbol: ring::pbkdf2::PBKDF2_HMAC_SHA1::*` in CI and it was difficult to debug locally, use `rust 1.86.0` in CI tests for troubleshoot the errors
This commit is contained in:
parent
bc8b191b7c
commit
cb738ee5da
45 changed files with 138 additions and 133 deletions
6
.github/workflows/ci.generate.ts
vendored
6
.github/workflows/ci.generate.ts
vendored
|
@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify";
|
|||
// Bump this number when you want to purge the cache.
|
||||
// Note: the tools/release/01_bump_crate_versions.ts script will update this version
|
||||
// automatically via regex, so ensure that this line maintains this format.
|
||||
const cacheVersion = 56;
|
||||
const cacheVersion = 57;
|
||||
|
||||
const ubuntuX86Runner = "ubuntu-24.04";
|
||||
const ubuntuX86XlRunner = "ubuntu-24.04-xl";
|
||||
|
@ -88,7 +88,7 @@ export DEBIAN_FRONTEND=noninteractive
|
|||
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
|
||||
# Remove older clang before we install
|
||||
sudo apt-get -qq remove \
|
||||
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
|
||||
'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'llvm-17*' 'llvm-18*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
|
||||
|
||||
# Install clang-XXX, lld-XXX, and debootstrap.
|
||||
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-${llvmVersion} main" |
|
||||
|
@ -105,7 +105,7 @@ ${installPkgsCommand} || echo 'Failed. Trying again.' && sudo apt-get clean && s
|
|||
clang-${llvmVersion} -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC
|
||||
|
||||
echo "Decompressing sysroot..."
|
||||
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
|
||||
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20250207/sysroot-\`uname -m\`.tar.xz -O /tmp/sysroot.tar.xz
|
||||
cd /
|
||||
xzcat /tmp/sysroot.tar.xz | sudo tar -x
|
||||
sudo mount --rbind /dev /sysroot/dev
|
||||
|
|
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
|
@ -187,8 +187,8 @@ jobs:
|
|||
~/.cargo/registry/index
|
||||
~/.cargo/registry/cache
|
||||
~/.cargo/git/db
|
||||
key: '56-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '56-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||
key: '57-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}'
|
||||
restore-keys: '57-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-'
|
||||
if: '!(matrix.skip)'
|
||||
- uses: dsherret/rust-toolchain-file@v1
|
||||
if: '!(matrix.skip)'
|
||||
|
@ -268,7 +268,7 @@ jobs:
|
|||
# to complete.
|
||||
sudo apt-get -qq remove --purge -y man-db > /dev/null 2> /dev/null
|
||||
# Remove older clang before we install
|
||||
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
|
||||
sudo apt-get -qq remove 'clang-12*' 'clang-13*' 'clang-14*' 'clang-15*' 'clang-16*' 'clang-17*' 'clang-18*' 'llvm-12*' 'llvm-13*' 'llvm-14*' 'llvm-15*' 'llvm-16*' 'llvm-17*' 'llvm-18*' 'lld-12*' 'lld-13*' 'lld-14*' 'lld-15*' 'lld-16*' 'lld-17*' 'lld-18*' > /dev/null 2> /dev/null
|
||||
|
||||
# Install clang-XXX, lld-XXX, and debootstrap.
|
||||
echo "deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-19 main" |
|
||||
|
@ -285,7 +285,7 @@ jobs:
|
|||
clang-19 -c -o /tmp/memfd_create_shim.o tools/memfd_create_shim.c -fPIC
|
||||
|
||||
echo "Decompressing sysroot..."
|
||||
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20241030/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
|
||||
wget -q https://github.com/denoland/deno_sysroot_build/releases/download/sysroot-20250207/sysroot-`uname -m`.tar.xz -O /tmp/sysroot.tar.xz
|
||||
cd /
|
||||
xzcat /tmp/sysroot.tar.xz | sudo tar -x
|
||||
sudo mount --rbind /dev /sysroot/dev
|
||||
|
@ -390,7 +390,7 @@ jobs:
|
|||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: never_saved
|
||||
restore-keys: '56-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
restore-keys: '57-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-'
|
||||
- name: Apply and update mtime cache
|
||||
if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))'
|
||||
uses: ./.github/mtime_cache
|
||||
|
@ -779,7 +779,7 @@ jobs:
|
|||
!./target/*/gn_root
|
||||
!./target/*/*.zip
|
||||
!./target/*/*.tar.gz
|
||||
key: '56-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
key: '57-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}'
|
||||
wasm:
|
||||
name: build wasm32
|
||||
needs:
|
||||
|
|
|
@ -5645,10 +5645,9 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
|
|||
} else {
|
||||
if current.is_empty() {
|
||||
return Err(
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
String::from("Empty values are not allowed"),
|
||||
)
|
||||
std::io::Error::other(String::from(
|
||||
"Empty values are not allowed",
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
@ -5659,11 +5658,8 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
|
|||
}
|
||||
} else {
|
||||
return Err(
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
String::from("Empty values are not allowed"),
|
||||
)
|
||||
.into(),
|
||||
std::io::Error::other(String::from("Empty values are not allowed"))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
@ -5673,11 +5669,8 @@ fn escape_and_split_commas(s: String) -> Result<Vec<String>, clap::Error> {
|
|||
|
||||
if current.is_empty() {
|
||||
return Err(
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
String::from("Empty values are not allowed"),
|
||||
)
|
||||
.into(),
|
||||
std::io::Error::other(String::from("Empty values are not allowed"))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -6033,18 +6026,15 @@ fn node_modules_and_vendor_dir_arg_parse(
|
|||
fn reload_arg_validate(urlstr: String) -> Result<String, clap::Error> {
|
||||
if urlstr.is_empty() {
|
||||
return Err(
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
String::from("Missing url. Check for extra commas."),
|
||||
)
|
||||
std::io::Error::other(String::from(
|
||||
"Missing url. Check for extra commas.",
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
match Url::from_str(&urlstr) {
|
||||
Ok(_) => Ok(urlstr),
|
||||
Err(e) => {
|
||||
Err(std::io::Error::new(std::io::ErrorKind::Other, e.to_string()).into())
|
||||
}
|
||||
Err(e) => Err(std::io::Error::other(e.to_string()).into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
1
cli/cache/module_info.rs
vendored
1
cli/cache/module_info.rs
vendored
|
@ -187,6 +187,7 @@ impl ModuleInfoCacheModuleAnalyzer<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn analyze_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
3
cli/cache/parsed_source.rs
vendored
3
cli/cache/parsed_source.rs
vendored
|
@ -31,6 +31,7 @@ impl<'a> LazyGraphSourceParser<'a> {
|
|||
Self { cache, graph }
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn get_or_parse_source(
|
||||
&self,
|
||||
module_specifier: &ModuleSpecifier,
|
||||
|
@ -52,6 +53,7 @@ pub struct ParsedSourceCache {
|
|||
}
|
||||
|
||||
impl ParsedSourceCache {
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn get_parsed_source_from_js_module(
|
||||
&self,
|
||||
module: &deno_graph::JsModule,
|
||||
|
@ -66,6 +68,7 @@ impl ParsedSourceCache {
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn remove_or_parse_module(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -153,6 +153,7 @@ impl Emitter {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn emit_parsed_source_sync(
|
||||
&self,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
@ -356,6 +357,7 @@ impl EmitParsedSourceHelper<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn transpile(
|
||||
parsed_source_cache: &ParsedSourceCache,
|
||||
specifier: &ModuleSpecifier,
|
||||
|
|
|
@ -573,6 +573,7 @@ impl ModuleGraphCreator {
|
|||
self.module_graph_builder.graph_valid(graph)
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn type_check_graph(
|
||||
&self,
|
||||
graph: ModuleGraph,
|
||||
|
|
|
@ -557,6 +557,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn create_main_worker(
|
||||
&self,
|
||||
mode: WorkerExecutionMode,
|
||||
|
@ -573,6 +574,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn create_custom_worker(
|
||||
&self,
|
||||
mode: WorkerExecutionMode,
|
||||
|
@ -694,6 +696,7 @@ impl<TSys: DenoLibSys> LibMainWorkerFactory<TSys> {
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn resolve_npm_binary_entrypoint(
|
||||
&self,
|
||||
package_folder: &Path,
|
||||
|
@ -797,26 +800,31 @@ impl LibMainWorker {
|
|||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_load_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
|
||||
self.worker.dispatch_beforeunload_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
|
||||
self.worker.dispatch_process_beforeexit_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_unload_event()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
|
||||
self.worker.dispatch_process_exit_event()
|
||||
}
|
||||
|
|
|
@ -964,7 +964,7 @@ impl Config {
|
|||
let mut folders = vec![];
|
||||
for root_url in root_urls {
|
||||
let root_uri = url_to_uri(&root_url).unwrap();
|
||||
let name = root_url.path_segments().and_then(|s| s.last());
|
||||
let name = root_url.path_segments().and_then(|mut s| s.next_back());
|
||||
let name = name.unwrap_or_default().to_string();
|
||||
folders.push((
|
||||
Arc::new(root_url),
|
||||
|
|
|
@ -1942,6 +1942,7 @@ fn parse_and_analyze_module(
|
|||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn parse_source(
|
||||
specifier: ModuleSpecifier,
|
||||
text: Arc<str>,
|
||||
|
|
|
@ -914,7 +914,7 @@ impl Inner {
|
|||
let mut root_url = uri_to_url(&root_uri);
|
||||
let name = root_url
|
||||
.path_segments()
|
||||
.and_then(|s| s.last())
|
||||
.and_then(|mut s| s.next_back())
|
||||
.unwrap_or_default()
|
||||
.to_string();
|
||||
if !root_url.path().ends_with('/') {
|
||||
|
@ -1976,7 +1976,6 @@ impl Inner {
|
|||
}
|
||||
|
||||
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
|
||||
|
||||
fn resolution_to_hover_text(
|
||||
&self,
|
||||
resolution: &Resolution,
|
||||
|
@ -4566,7 +4565,6 @@ impl Inner {
|
|||
}
|
||||
|
||||
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
|
||||
|
||||
fn prepare_cache(
|
||||
&mut self,
|
||||
specifiers: Vec<ModuleSpecifier>,
|
||||
|
@ -4665,7 +4663,6 @@ impl Inner {
|
|||
}
|
||||
|
||||
#[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))]
|
||||
|
||||
fn pre_did_change_workspace_folders(
|
||||
&mut self,
|
||||
params: DidChangeWorkspaceFoldersParams,
|
||||
|
|
|
@ -171,7 +171,7 @@ impl TestModule {
|
|||
self
|
||||
.specifier
|
||||
.path_segments()
|
||||
.and_then(|s| s.last().map(|s| s.to_string()))
|
||||
.and_then(|mut s| s.next_back().map(|s| s.to_string()))
|
||||
.unwrap_or_else(|| "<unknown>".to_string())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -614,6 +614,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn resolve_referrer(
|
||||
&self,
|
||||
referrer: &str,
|
||||
|
@ -641,6 +642,7 @@ impl<TGraphContainer: ModuleGraphContainer>
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn inner_resolve(
|
||||
&self,
|
||||
raw_specifier: &str,
|
||||
|
|
|
@ -205,6 +205,7 @@ pub enum LintError {
|
|||
|
||||
#[op2]
|
||||
#[buffer]
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn op_lint_create_serialized_ast(
|
||||
#[string] file_name: &str,
|
||||
#[string] source: String,
|
||||
|
|
|
@ -56,7 +56,7 @@ pub struct StandaloneData {
|
|||
/// then checking for the magic trailer string `d3n0l4nd`. If found,
|
||||
/// the bundle is executed. If not, this function exits with `Ok(None)`.
|
||||
pub fn extract_standalone(
|
||||
cli_args: Cow<Vec<OsString>>,
|
||||
cli_args: Cow<[OsString]>,
|
||||
) -> Result<StandaloneData, AnyError> {
|
||||
let data = find_section()?;
|
||||
|
||||
|
|
|
@ -935,10 +935,7 @@ impl VfsRoot {
|
|||
match entry {
|
||||
VfsEntryRef::Symlink(symlink) => {
|
||||
if !seen.insert(path.to_path_buf()) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"circular symlinks",
|
||||
));
|
||||
return Err(std::io::Error::other("circular symlinks"));
|
||||
}
|
||||
path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path));
|
||||
}
|
||||
|
@ -1388,10 +1385,9 @@ impl FileBackedVfs {
|
|||
VfsEntryRef::Symlink(symlink) => {
|
||||
Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path))
|
||||
}
|
||||
VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"not a symlink",
|
||||
)),
|
||||
VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => {
|
||||
Err(std::io::Error::other("not a symlink"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1467,20 +1463,14 @@ impl FileBackedVfs {
|
|||
match entry {
|
||||
VfsEntryRef::Dir(dir) => Ok(dir),
|
||||
VfsEntryRef::Symlink(_) => unreachable!(),
|
||||
VfsEntryRef::File(_) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"path is a file",
|
||||
)),
|
||||
VfsEntryRef::File(_) => Err(std::io::Error::other("path is a file")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> {
|
||||
let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?;
|
||||
match entry {
|
||||
VfsEntryRef::Dir(_) => Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"path is a directory",
|
||||
)),
|
||||
VfsEntryRef::Dir(_) => Err(std::io::Error::other("path is a directory")),
|
||||
VfsEntryRef::Symlink(_) => unreachable!(),
|
||||
VfsEntryRef::File(file) => Ok(file),
|
||||
}
|
||||
|
|
|
@ -101,15 +101,15 @@ pub async fn format(
|
|||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||
if fmt_flags.check {
|
||||
// check all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
if files.iter().any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|path| paths.contains(&path))
|
||||
.unwrap_or(false)
|
||||
}) {
|
||||
files
|
||||
} else {
|
||||
[].to_vec()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
.into_iter()
|
||||
|
|
|
@ -148,15 +148,15 @@ async fn lint_with_watch_inner(
|
|||
let files = std::mem::take(&mut paths_with_options.paths);
|
||||
paths_with_options.paths = if let Some(paths) = &changed_paths {
|
||||
// lint all files on any changed (https://github.com/denoland/deno/issues/12446)
|
||||
files
|
||||
.iter()
|
||||
.any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|p| paths.contains(&p))
|
||||
.unwrap_or(false)
|
||||
})
|
||||
.then_some(files)
|
||||
.unwrap_or_else(|| [].to_vec())
|
||||
if files.iter().any(|path| {
|
||||
canonicalize_path(path)
|
||||
.map(|p| paths.contains(&p))
|
||||
.unwrap_or(false)
|
||||
}) {
|
||||
files
|
||||
} else {
|
||||
[].to_vec()
|
||||
}
|
||||
} else {
|
||||
files
|
||||
};
|
||||
|
|
|
@ -279,7 +279,7 @@ mod test {
|
|||
.resolve_lint_rules(Default::default(), None)
|
||||
.rules
|
||||
.into_iter()
|
||||
.filter(|r| r.tags().iter().any(|t| *t == tags::RECOMMENDED))
|
||||
.filter(|r| r.tags().contains(&tags::RECOMMENDED))
|
||||
.map(|r| r.code().to_string())
|
||||
.filter(|n| n != "no-debugger")
|
||||
.collect::<Vec<_>>();
|
||||
|
|
|
@ -719,7 +719,7 @@ fn relative_url(
|
|||
if resolved.scheme() == "file" {
|
||||
let relative = referrer.make_relative(resolved).unwrap();
|
||||
if relative.is_empty() {
|
||||
let last = resolved.path_segments().unwrap().last().unwrap();
|
||||
let last = resolved.path_segments().unwrap().next_back().unwrap();
|
||||
format!("./{last}")
|
||||
} else if relative.starts_with("../") {
|
||||
relative
|
||||
|
|
|
@ -59,6 +59,7 @@ pub struct RustylineSyncMessageSender {
|
|||
}
|
||||
|
||||
impl RustylineSyncMessageSender {
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn post_message<T: serde::Serialize>(
|
||||
&self,
|
||||
method: &str,
|
||||
|
|
|
@ -1230,6 +1230,7 @@ fn op_respond_inner(state: &mut OpState, args: RespondArgs) {
|
|||
state.maybe_response = Some(args);
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, Error, deno_error::JsError)]
|
||||
pub enum ExecError {
|
||||
#[class(generic)]
|
||||
|
@ -1390,6 +1391,7 @@ impl deno_core::ExtCodeCache for TscExtCodeCache {
|
|||
/// Execute a request on the supplied snapshot, returning a response which
|
||||
/// contains information, like any emitted files, diagnostics, statistics and
|
||||
/// optionally an updated TypeScript build info.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn exec(
|
||||
request: Request,
|
||||
code_cache: Option<Arc<dyn deno_runtime::code_cache::CodeCache>>,
|
||||
|
|
|
@ -140,6 +140,7 @@ impl TypeChecker {
|
|||
///
|
||||
/// It is expected that it is determined if a check and/or emit is validated
|
||||
/// before the function is called.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn check(
|
||||
&self,
|
||||
graph: ModuleGraph,
|
||||
|
@ -174,6 +175,7 @@ impl TypeChecker {
|
|||
///
|
||||
/// It is expected that it is determined if a check and/or emit is validated
|
||||
/// before the function is called.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn check_diagnostics(
|
||||
&self,
|
||||
mut graph: ModuleGraph,
|
||||
|
@ -249,6 +251,7 @@ impl TypeChecker {
|
|||
|
||||
/// Groups the roots based on the compiler options, which includes the
|
||||
/// resolved TsConfig and resolved compilerOptions.types
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn group_roots_by_compiler_options<'a>(
|
||||
&'a self,
|
||||
graph: &ModuleGraph,
|
||||
|
@ -375,6 +378,7 @@ impl Iterator for DiagnosticsByFolderIterator<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum DiagnosticsByFolderIteratorInner<'a> {
|
||||
Empty(Arc<ModuleGraph>),
|
||||
Real(DiagnosticsByFolderRealIterator<'a>),
|
||||
|
@ -444,6 +448,7 @@ pub fn ambient_modules_to_regex_string(ambient_modules: &[String]) -> String {
|
|||
|
||||
impl<'a> DiagnosticsByFolderRealIterator<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn check_diagnostics_in_folder(
|
||||
&self,
|
||||
group_key: &'a CheckGroupKey<'a>,
|
||||
|
|
|
@ -281,8 +281,8 @@ pub enum WatcherRestartMode {
|
|||
/// Creates a file watcher.
|
||||
///
|
||||
/// - `operation` is the actual operation we want to run every time the watcher detects file
|
||||
/// changes. For example, in the case where we would like to bundle, then `operation` would
|
||||
/// have the logic for it like bundling the code.
|
||||
/// changes. For example, in the case where we would like to bundle, then `operation` would
|
||||
/// have the logic for it like bundling the code.
|
||||
pub async fn watch_recv<O, F>(
|
||||
mut flags: Arc<Flags>,
|
||||
print_config: PrintConfig,
|
||||
|
|
|
@ -66,7 +66,7 @@ pub fn mapped_specifier_for_tsc(
|
|||
&& specifier
|
||||
.path()
|
||||
.split('/')
|
||||
.last()
|
||||
.next_back()
|
||||
.map(|last| last.contains(".d."))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
|
|
|
@ -274,6 +274,7 @@ impl CliMainWorker {
|
|||
Ok(Some(coverage_collector))
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn execute_script_static(
|
||||
&mut self,
|
||||
name: &'static str,
|
||||
|
|
2
ext/cache/lscache.rs
vendored
2
ext/cache/lscache.rs
vendored
|
@ -247,7 +247,7 @@ impl LscBackend {
|
|||
|
||||
let body = http_body_util::BodyDataStream::new(res.into_body())
|
||||
.into_stream()
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e));
|
||||
.map_err(std::io::Error::other);
|
||||
let body = CacheResponseResource::lsc(body);
|
||||
|
||||
Ok(Some((meta, Some(body))))
|
||||
|
|
|
@ -14,6 +14,7 @@ use hyper_util::client::legacy::connect::dns::Name;
|
|||
use tokio::task::JoinHandle;
|
||||
use tower::Service;
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Resolver {
|
||||
/// A resolver using blocking `getaddrinfo` calls in a threadpool.
|
||||
|
@ -85,7 +86,7 @@ impl Future for ResolveFut {
|
|||
if join_err.is_cancelled() {
|
||||
Err(io::Error::new(io::ErrorKind::Interrupted, join_err))
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, join_err))
|
||||
Err(io::Error::other(join_err))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -508,6 +508,8 @@ impl FetchPermissions for deno_permissions::PermissionsContainer {
|
|||
#[op2(stack_trace)]
|
||||
#[serde]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn op_fetch<FP>(
|
||||
state: &mut OpState,
|
||||
#[serde] method: ByteString,
|
||||
|
@ -865,12 +867,12 @@ impl Resource for FetchResponseResource {
|
|||
|
||||
match std::mem::take(&mut *reader) {
|
||||
FetchResponseReader::Start(resp) => {
|
||||
let stream: BytesStream =
|
||||
Box::pin(resp.into_body().into_data_stream().map(|r| {
|
||||
r.map_err(|err| {
|
||||
std::io::Error::new(std::io::ErrorKind::Other, err)
|
||||
})
|
||||
}));
|
||||
let stream: BytesStream = Box::pin(
|
||||
resp
|
||||
.into_body()
|
||||
.into_data_stream()
|
||||
.map(|r| r.map_err(std::io::Error::other)),
|
||||
);
|
||||
*reader = FetchResponseReader::BodyReader(stream.peekable());
|
||||
}
|
||||
FetchResponseReader::BodyReader(_) => unreachable!(),
|
||||
|
@ -977,6 +979,7 @@ fn sync_permission_check<'a, P: FetchPermissions + 'static>(
|
|||
|
||||
#[op2(stack_trace)]
|
||||
#[smi]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn op_fetch_custom_client<FP>(
|
||||
state: &mut OpState,
|
||||
#[serde] args: CreateHttpClientArgs,
|
||||
|
|
|
@ -434,6 +434,7 @@ type BoxFuture<T> = Pin<Box<dyn Future<Output = T> + Send>>;
|
|||
type BoxError = Box<dyn std::error::Error + Send + Sync>;
|
||||
|
||||
// These variatns are not to be inspected.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum Proxied<T> {
|
||||
/// Not proxied
|
||||
PassThrough(T),
|
||||
|
|
|
@ -169,10 +169,9 @@ where
|
|||
None => Cow::Borrowed(path.as_ref()),
|
||||
};
|
||||
let lib = Library::open(real_path.as_ref()).map_err(|e| {
|
||||
dlopen2::Error::OpeningLibraryError(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format_error(e, &real_path),
|
||||
))
|
||||
dlopen2::Error::OpeningLibraryError(std::io::Error::other(format_error(
|
||||
e, &real_path,
|
||||
)))
|
||||
})?;
|
||||
let mut resource = DynamicLibraryResource {
|
||||
lib,
|
||||
|
|
|
@ -49,7 +49,7 @@ impl TryFrom<NativeType> for libffi::middle::Type {
|
|||
libffi::middle::Type::pointer()
|
||||
}
|
||||
NativeType::Struct(fields) => {
|
||||
libffi::middle::Type::structure(match fields.len() > 0 {
|
||||
libffi::middle::Type::structure(match !fields.is_empty() {
|
||||
true => fields
|
||||
.iter()
|
||||
.map(|field| field.clone().try_into())
|
||||
|
|
|
@ -983,12 +983,12 @@ where
|
|||
NetError::DnsNotFound(e)
|
||||
}
|
||||
ResolveErrorKind::Proto(ProtoError { kind, .. })
|
||||
if matches!(**kind, ProtoErrorKind::NoConnections { .. }) =>
|
||||
if matches!(**kind, ProtoErrorKind::NoConnections) =>
|
||||
{
|
||||
NetError::DnsNotConnected(e)
|
||||
}
|
||||
ResolveErrorKind::Proto(ProtoError { kind, .. })
|
||||
if matches!(**kind, ProtoErrorKind::Timeout { .. }) =>
|
||||
if matches!(**kind, ProtoErrorKind::Timeout) =>
|
||||
{
|
||||
NetError::DnsTimedOut(e)
|
||||
}
|
||||
|
|
|
@ -85,6 +85,7 @@ macro_rules! network_stream {
|
|||
( $([$i:ident, $il:ident, $stream:path, $listener:path, $addr:path, $stream_resource:ty]),* ) => {
|
||||
/// A raw stream of one of the types handled by this extension.
|
||||
#[pin_project::pin_project(project = NetworkStreamProject)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum NetworkStream {
|
||||
$( $i (#[pin] $stream), )*
|
||||
}
|
||||
|
|
|
@ -622,12 +622,12 @@ impl Resource for NodeHttpResponseResource {
|
|||
|
||||
match std::mem::take(&mut *reader) {
|
||||
NodeHttpFetchResponseReader::Start(resp) => {
|
||||
let stream: BytesStream =
|
||||
Box::pin(resp.into_body().into_data_stream().map(|r| {
|
||||
r.map_err(|err| {
|
||||
std::io::Error::new(std::io::ErrorKind::Other, err)
|
||||
})
|
||||
}));
|
||||
let stream: BytesStream = Box::pin(
|
||||
resp
|
||||
.into_body()
|
||||
.into_data_stream()
|
||||
.map(|r| r.map_err(std::io::Error::other)),
|
||||
);
|
||||
*reader =
|
||||
NodeHttpFetchResponseReader::BodyReader(stream.peekable());
|
||||
}
|
||||
|
|
|
@ -951,10 +951,7 @@ fn op_spawn_sync(
|
|||
})?;
|
||||
if let Some(input) = input {
|
||||
let mut stdin = child.stdin.take().ok_or_else(|| {
|
||||
ProcessError::Io(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"stdin is not available",
|
||||
))
|
||||
ProcessError::Io(std::io::Error::other("stdin is not available"))
|
||||
})?;
|
||||
stdin.write_all(&input)?;
|
||||
stdin.flush()?;
|
||||
|
|
|
@ -16,6 +16,7 @@ use tokio::io::AsyncWrite;
|
|||
use tokio::io::ReadBuf;
|
||||
|
||||
// TODO(bartlomieju): remove this
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum WsStreamKind {
|
||||
Upgraded(TokioIo<Upgraded>),
|
||||
Network(NetworkStream),
|
||||
|
|
|
@ -120,6 +120,7 @@ pub struct ResolveSnapshotError {
|
|||
source: SnapshotFromLockfileError,
|
||||
}
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
fn resolve_snapshot<TSys: LockfileSys>(
|
||||
snapshot: NpmResolverManagedSnapshotOption<TSys>,
|
||||
patch_packages: &WorkspaceNpmPatchPackages,
|
||||
|
|
|
@ -84,6 +84,7 @@ pub struct SyncFetchScript {
|
|||
|
||||
#[op2]
|
||||
#[serde]
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn op_worker_sync_fetch(
|
||||
state: &mut OpState,
|
||||
#[serde] scripts: Vec<String>,
|
||||
|
|
|
@ -126,10 +126,7 @@ fn clear_stdin(
|
|||
loop {
|
||||
let r = libc::tcflush(STDIN_FD, libc::TCIFLUSH);
|
||||
if r != 0 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"clear_stdin failed (tcflush)",
|
||||
));
|
||||
return Err(std::io::Error::other("clear_stdin failed (tcflush)"));
|
||||
}
|
||||
|
||||
// Initialize timeout for select to be 100ms
|
||||
|
@ -149,10 +146,7 @@ fn clear_stdin(
|
|||
|
||||
// Check if select returned an error
|
||||
if r < 0 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"clear_stdin failed (select)",
|
||||
));
|
||||
return Err(std::io::Error::other("clear_stdin failed (select)"));
|
||||
}
|
||||
|
||||
// Check if select returned due to timeout (stdin is quiescent)
|
||||
|
@ -211,13 +205,10 @@ fn clear_stdin(
|
|||
unsafe fn flush_input_buffer(stdin: HANDLE) -> Result<(), std::io::Error> {
|
||||
let success = FlushConsoleInputBuffer(stdin);
|
||||
if success != TRUE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not flush the console input buffer: {}",
|
||||
std::io::Error::last_os_error()
|
||||
),
|
||||
));
|
||||
return Err(std::io::Error::other(format!(
|
||||
"Could not flush the console input buffer: {}",
|
||||
std::io::Error::last_os_error()
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -239,13 +230,10 @@ fn clear_stdin(
|
|||
let success =
|
||||
WriteConsoleInputW(stdin, &input_record, 1, &mut record_written);
|
||||
if success != TRUE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not emulate enter key press: {}",
|
||||
std::io::Error::last_os_error()
|
||||
),
|
||||
));
|
||||
return Err(std::io::Error::other(format!(
|
||||
"Could not emulate enter key press: {}",
|
||||
std::io::Error::last_os_error()
|
||||
)));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -258,13 +246,10 @@ fn clear_stdin(
|
|||
let success =
|
||||
PeekConsoleInputW(stdin, buffer.as_mut_ptr(), 1, &mut events_read);
|
||||
if success != TRUE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
format!(
|
||||
"Could not peek the console input buffer: {}",
|
||||
std::io::Error::last_os_error()
|
||||
),
|
||||
));
|
||||
return Err(std::io::Error::other(format!(
|
||||
"Could not peek the console input buffer: {}",
|
||||
std::io::Error::last_os_error()
|
||||
)));
|
||||
}
|
||||
Ok(events_read == 0)
|
||||
}
|
||||
|
|
|
@ -107,6 +107,7 @@ pub enum WebWorkerType {
|
|||
|
||||
/// Events that are sent to host from child
|
||||
/// worker.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum WorkerControlEvent {
|
||||
TerminalError(CoreError),
|
||||
Close,
|
||||
|
@ -167,6 +168,7 @@ pub struct WebWorkerInternalHandle {
|
|||
|
||||
impl WebWorkerInternalHandle {
|
||||
/// Post WorkerEvent to parent as a worker
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn post_event(
|
||||
&self,
|
||||
event: WorkerControlEvent,
|
||||
|
@ -828,6 +830,7 @@ impl WebWorker {
|
|||
}
|
||||
|
||||
/// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script)
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn execute_script(
|
||||
&mut self,
|
||||
name: &'static str,
|
||||
|
|
|
@ -92,6 +92,7 @@ pub(crate) static SIGUSR2_RX: LazyLock<tokio::sync::watch::Receiver<()>> =
|
|||
rx
|
||||
});
|
||||
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn import_meta_resolve_callback(
|
||||
loader: &dyn ModuleLoader,
|
||||
specifier: String,
|
||||
|
@ -798,6 +799,7 @@ impl MainWorker {
|
|||
}
|
||||
|
||||
/// See [JsRuntime::execute_script](deno_core::JsRuntime::execute_script)
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn execute_script(
|
||||
&mut self,
|
||||
script_name: &'static str,
|
||||
|
@ -931,6 +933,7 @@ impl MainWorker {
|
|||
/// Dispatches "load" event to the JavaScript runtime.
|
||||
///
|
||||
/// Does not poll event loop, and thus not await any of the "load" event handlers.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_load_event(&mut self) -> Result<(), JsError> {
|
||||
let scope = &mut self.js_runtime.handle_scope();
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
|
@ -948,6 +951,7 @@ impl MainWorker {
|
|||
/// Dispatches "unload" event to the JavaScript runtime.
|
||||
///
|
||||
/// Does not poll event loop, and thus not await any of the "unload" event handlers.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> {
|
||||
let scope = &mut self.js_runtime.handle_scope();
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
|
@ -963,6 +967,7 @@ impl MainWorker {
|
|||
}
|
||||
|
||||
/// Dispatches process.emit("exit") event for node compat.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> {
|
||||
let scope = &mut self.js_runtime.handle_scope();
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
|
@ -980,6 +985,7 @@ impl MainWorker {
|
|||
/// Dispatches "beforeunload" event to the JavaScript runtime. Returns a boolean
|
||||
/// indicating if the event was prevented and thus event loop should continue
|
||||
/// running.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_beforeunload_event(&mut self) -> Result<bool, JsError> {
|
||||
let scope = &mut self.js_runtime.handle_scope();
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
|
@ -997,6 +1003,7 @@ impl MainWorker {
|
|||
}
|
||||
|
||||
/// Dispatches process.emit("beforeExit") event for node compat.
|
||||
#[allow(clippy::result_large_err)]
|
||||
pub fn dispatch_process_beforeexit_event(&mut self) -> Result<bool, JsError> {
|
||||
let scope = &mut self.js_runtime.handle_scope();
|
||||
let tc_scope = &mut v8::TryCatch::new(scope);
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
[toolchain]
|
||||
channel = "1.85.1"
|
||||
channel = "1.86.0"
|
||||
components = ["rustfmt", "clippy"]
|
||||
|
|
|
@ -129,6 +129,6 @@ pub fn get_tls_config(
|
|||
|
||||
Ok(Arc::new(config))
|
||||
}
|
||||
None => Err(io::Error::new(io::ErrorKind::Other, "Cannot find key")),
|
||||
None => Err(io::Error::other("Cannot find key")),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1423,7 +1423,7 @@ pub fn custom_headers(
|
|||
if p.contains("/encoding/") {
|
||||
let charset = p
|
||||
.split_terminator('/')
|
||||
.last()
|
||||
.next_back()
|
||||
.unwrap()
|
||||
.trim_end_matches(".ts");
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue