Upgrade Rust toolchain to 1.84.0 (#15408)
Some checks are pending
CI / Determine changes (push) Waiting to run
CI / cargo fmt (push) Waiting to run
CI / cargo clippy (push) Blocked by required conditions
CI / cargo test (linux) (push) Blocked by required conditions
CI / cargo test (linux, release) (push) Blocked by required conditions
CI / cargo test (windows) (push) Blocked by required conditions
CI / cargo test (wasm) (push) Blocked by required conditions
CI / cargo build (release) (push) Waiting to run
CI / cargo build (msrv) (push) Blocked by required conditions
CI / cargo fuzz build (push) Blocked by required conditions
CI / fuzz parser (push) Blocked by required conditions
CI / test scripts (push) Blocked by required conditions
CI / ecosystem (push) Blocked by required conditions
CI / cargo shear (push) Blocked by required conditions
CI / python package (push) Waiting to run
CI / pre-commit (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / formatter instabilities and black similarity (push) Blocked by required conditions
CI / test ruff-lsp (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions

This commit is contained in:
Micha Reiser 2025-01-11 09:51:58 +01:00 committed by GitHub
parent 2d82445794
commit c39ca8fe6d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 36 additions and 45 deletions

View file

@ -211,6 +211,9 @@ redundant_clone = "warn"
debug_assert_with_mut_call = "warn" debug_assert_with_mut_call = "warn"
unused_peekable = "warn" unused_peekable = "warn"
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
large_stack_arrays = "allow"
[profile.release] [profile.release]
# Note that we set these explicitly, and these values # Note that we set these explicitly, and these values
# were chosen based on a trade-off between compile times # were chosen based on a trade-off between compile times

View file

@ -74,7 +74,7 @@ impl Index {
DocumentKey::NotebookCell(url) DocumentKey::NotebookCell(url)
} else if Path::new(url.path()) } else if Path::new(url.path())
.extension() .extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb")) .is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
{ {
DocumentKey::Notebook(url) DocumentKey::Notebook(url)
} else { } else {

View file

@ -285,7 +285,7 @@ impl Workspace {
open_files.contains(&file) open_files.contains(&file)
} else if let Some(system_path) = file.path(db).as_system_path() { } else if let Some(system_path) = file.path(db).as_system_path() {
self.package(db, system_path) self.package(db, system_path)
.map_or(false, |package| package.contains_file(db, file)) .is_some_and(|package| package.contains_file(db, file))
} else { } else {
file.path(db).is_system_virtual_path() file.path(db).is_system_virtual_path()
} }

View file

@ -959,7 +959,7 @@ A `--config` flag must either be a path to a `.toml` configuration file
// We want to display the most helpful error to the user as possible. // We want to display the most helpful error to the user as possible.
if Path::new(value) if Path::new(value)
.extension() .extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("toml")) .is_some_and(|ext| ext.eq_ignore_ascii_case("toml"))
{ {
if !value.contains('=') { if !value.contains('=') {
tip.push_str(&format!( tip.push_str(&format!(

View file

@ -87,13 +87,13 @@ pub trait System: Debug {
/// Returns `true` if `path` exists and is a directory. /// Returns `true` if `path` exists and is a directory.
fn is_directory(&self, path: &SystemPath) -> bool { fn is_directory(&self, path: &SystemPath) -> bool {
self.path_metadata(path) self.path_metadata(path)
.map_or(false, |metadata| metadata.file_type.is_directory()) .is_ok_and(|metadata| metadata.file_type.is_directory())
} }
/// Returns `true` if `path` exists and is a file. /// Returns `true` if `path` exists and is a file.
fn is_file(&self, path: &SystemPath) -> bool { fn is_file(&self, path: &SystemPath) -> bool {
self.path_metadata(path) self.path_metadata(path)
.map_or(false, |metadata| metadata.file_type.is_file()) .is_ok_and(|metadata| metadata.file_type.is_file())
} }
/// Returns the current working directory /// Returns the current working directory

View file

@ -497,12 +497,7 @@ impl<'a> Printer<'a> {
dest: self.state.buffer.text_len(), dest: self.state.buffer.text_len(),
}; };
if self if self.state.source_markers.last() != Some(&marker) {
.state
.source_markers
.last()
.map_or(true, |last| last != &marker)
{
self.state.source_markers.push(marker); self.state.source_markers.push(marker);
} }
} }

View file

@ -540,7 +540,7 @@ fn is_docstring_section(
// The return value of the function. // The return value of the function.
// """ // """
// ``` // ```
if previous_line.map_or(false, |line| line.trim().is_empty()) { if previous_line.is_some_and(|line| line.trim().is_empty()) {
return true; return true;
} }

View file

@ -38,7 +38,7 @@ pub(crate) fn fix_file(
diagnostic diagnostic
.fix .fix
.as_ref() .as_ref()
.map_or(false, |fix| fix.applies(required_applicability)) .is_some_and(|fix| fix.applies(required_applicability))
}) })
.peekable(); .peekable();

View file

@ -78,11 +78,7 @@ impl<'a> Directive<'a> {
comment_start = text[..comment_start].trim_end().len(); comment_start = text[..comment_start].trim_end().len();
// The next character has to be the `#` character. // The next character has to be the `#` character.
if text[..comment_start] if !text[..comment_start].ends_with('#') {
.chars()
.last()
.map_or(true, |c| c != '#')
{
continue; continue;
} }
comment_start -= '#'.len_utf8(); comment_start -= '#'.len_utf8();

View file

@ -493,7 +493,7 @@ pub(crate) fn f_strings(checker: &mut Checker, call: &ast::ExprCall, summary: &F
checker checker
.semantic() .semantic()
.resolve_qualified_name(call.func.as_ref()) .resolve_qualified_name(call.func.as_ref())
.map_or(false, |qualified_name| { .is_some_and(|qualified_name| {
matches!( matches!(
qualified_name.segments(), qualified_name.segments(),
["django", "utils", "translation", "gettext" | "gettext_lazy"] ["django", "utils", "translation", "gettext" | "gettext_lazy"]

View file

@ -145,7 +145,7 @@ pub(crate) fn super_call_with_parameters(checker: &mut Checker, call: &ast::Expr
.resolve_qualified_name(func) .resolve_qualified_name(func)
.is_some_and(|name| name.segments() == ["dataclasses", "dataclass"]) .is_some_and(|name| name.segments() == ["dataclasses", "dataclass"])
{ {
arguments.find_keyword("slots").map_or(false, |keyword| { arguments.find_keyword("slots").is_some_and(|keyword| {
matches!( matches!(
keyword.value, keyword.value,
Expr::BooleanLiteral(ast::ExprBooleanLiteral { value: true, .. }) Expr::BooleanLiteral(ast::ExprBooleanLiteral { value: true, .. })

View file

@ -117,7 +117,7 @@ fn in_subscript_index(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
} }
// E.g., `Generic[DType, Unpack[int]]`. // E.g., `Generic[DType, Unpack[int]]`.
if parent.slice.as_tuple_expr().map_or(false, |slice| { if parent.slice.as_tuple_expr().is_some_and(|slice| {
slice slice
.elts .elts
.iter() .iter()
@ -144,5 +144,5 @@ fn in_vararg(expr: &ExprSubscript, semantic: &SemanticModel) -> bool {
.as_ref() .as_ref()
.and_then(|vararg| vararg.annotation.as_ref()) .and_then(|vararg| vararg.annotation.as_ref())
.and_then(|annotation| annotation.as_subscript_expr()) .and_then(|annotation| annotation.as_subscript_expr())
.map_or(false, |annotation| annotation == expr) == Some(expr)
} }

View file

@ -144,7 +144,7 @@ pub(crate) fn print_empty_string(checker: &mut Checker, call: &ast::ExprCall) {
let empty_separator = call let empty_separator = call
.arguments .arguments
.find_keyword("sep") .find_keyword("sep")
.map_or(false, |keyword| is_empty_string(&keyword.value)); .is_some_and(|keyword| is_empty_string(&keyword.value));
if !empty_separator { if !empty_separator {
return; return;
} }

View file

@ -1732,7 +1732,7 @@ impl StringLiteralValue {
pub fn is_unicode(&self) -> bool { pub fn is_unicode(&self) -> bool {
self.iter() self.iter()
.next() .next()
.map_or(false, |part| part.flags.prefix().is_unicode()) .is_some_and(|part| part.flags.prefix().is_unicode())
} }
/// Returns a slice of all the [`StringLiteral`] parts contained in this value. /// Returns a slice of all the [`StringLiteral`] parts contained in this value.

View file

@ -85,7 +85,7 @@ pub(crate) struct FormatLeadingAlternateBranchComments<'a> {
impl Format<PyFormatContext<'_>> for FormatLeadingAlternateBranchComments<'_> { impl Format<PyFormatContext<'_>> for FormatLeadingAlternateBranchComments<'_> {
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
if self.last_node.map_or(false, |preceding| { if self.last_node.is_some_and(|preceding| {
should_insert_blank_line_after_class_in_stub_file(preceding, None, f.context()) should_insert_blank_line_after_class_in_stub_file(preceding, None, f.context())
}) { }) {
write!(f, [empty_line(), leading_comments(self.comments)])?; write!(f, [empty_line(), leading_comments(self.comments)])?;

View file

@ -1002,24 +1002,21 @@ impl<'a> SemanticModel<'a> {
let value_name = UnqualifiedName::from_expr(value)?; let value_name = UnqualifiedName::from_expr(value)?;
let (_, tail) = value_name.segments().split_first()?; let (_, tail) = value_name.segments().split_first()?;
let resolved: QualifiedName = if qualified_name let resolved: QualifiedName =
.segments() if qualified_name.segments().first().copied() == Some(".") {
.first() from_relative_import(
.map_or(false, |segment| *segment == ".") self.module.qualified_name()?,
{ qualified_name.segments(),
from_relative_import( tail,
self.module.qualified_name()?, )?
qualified_name.segments(), } else {
tail, qualified_name
)? .segments()
} else { .iter()
qualified_name .chain(tail)
.segments() .copied()
.iter() .collect()
.chain(tail) };
.copied()
.collect()
};
Some(resolved) Some(resolved)
} }
BindingKind::Builtin => { BindingKind::Builtin => {

View file

@ -125,7 +125,7 @@ impl Index {
DocumentKey::NotebookCell(url) DocumentKey::NotebookCell(url)
} else if Path::new(url.path()) } else if Path::new(url.path())
.extension() .extension()
.map_or(false, |ext| ext.eq_ignore_ascii_case("ipynb")) .is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb"))
{ {
DocumentKey::Notebook(url) DocumentKey::Notebook(url)
} else { } else {

View file

@ -1,2 +1,2 @@
[toolchain] [toolchain]
channel = "1.83" channel = "1.84"