mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-03 10:23:11 +00:00
Upgrade Rust toolchain to 1.83 (#14677)
This commit is contained in:
parent
a6402fb51e
commit
b63c2e126b
63 changed files with 127 additions and 127 deletions
|
@ -416,7 +416,7 @@ impl<'db> Iterator for SearchPathIterator<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'db> FusedIterator for SearchPathIterator<'db> {}
|
||||
impl FusedIterator for SearchPathIterator<'_> {}
|
||||
|
||||
/// Represents a single `.pth` file in a `site-packages` directory.
|
||||
/// One or more lines in a `.pth` file may be a (relative or absolute)
|
||||
|
|
|
@ -400,7 +400,7 @@ pub(crate) struct ConstraintsIterator<'map, 'db> {
|
|||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
impl<'db> Iterator for ConstraintsIterator<'_, 'db> {
|
||||
type Item = Constraint<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -424,7 +424,7 @@ impl DeclarationsIterator<'_, '_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
impl<'db> Iterator for DeclarationsIterator<'_, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -401,7 +401,7 @@ pub(super) struct DeclarationIdIterator<'a> {
|
|||
inner: DeclarationsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
impl Iterator for DeclarationIdIterator<'_> {
|
||||
type Item = ScopedDefinitionId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -2023,7 +2023,7 @@ impl<'db> TypeVarInstance<'db> {
|
|||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn constraints(self, db: &'db dyn Db) -> Option<&[Type<'db>]> {
|
||||
pub(crate) fn constraints(self, db: &'db dyn Db) -> Option<&'db [Type<'db>]> {
|
||||
if let Some(TypeVarBoundOrConstraints::Constraints(tuple)) = self.bound_or_constraints(db) {
|
||||
Some(tuple.elements(db))
|
||||
} else {
|
||||
|
@ -2567,7 +2567,7 @@ impl<'db> Class<'db> {
|
|||
///
|
||||
/// Were this not a salsa query, then the calling query
|
||||
/// would depend on the class's AST and rerun for every change in that file.
|
||||
fn explicit_bases(self, db: &'db dyn Db) -> &[Type<'db>] {
|
||||
fn explicit_bases(self, db: &'db dyn Db) -> &'db [Type<'db>] {
|
||||
self.explicit_bases_query(db)
|
||||
}
|
||||
|
||||
|
@ -2995,7 +2995,7 @@ pub struct SliceLiteralType<'db> {
|
|||
step: Option<i32>,
|
||||
}
|
||||
|
||||
impl<'db> SliceLiteralType<'db> {
|
||||
impl SliceLiteralType<'_> {
|
||||
fn as_tuple(self, db: &dyn Db) -> (Option<i32>, Option<i32>, Option<i32>) {
|
||||
(self.start(db), self.stop(db), self.step(db))
|
||||
}
|
||||
|
|
|
@ -289,7 +289,7 @@ struct DisplayMaybeNegatedType<'db> {
|
|||
negated: bool,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayMaybeNegatedType<'db> {
|
||||
impl Display for DisplayMaybeNegatedType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if self.negated {
|
||||
f.write_str("~")?;
|
||||
|
@ -319,7 +319,7 @@ pub(crate) struct DisplayTypeArray<'b, 'db> {
|
|||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
impl Display for DisplayTypeArray<'_, '_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.join(", ")
|
||||
.entries(self.types.iter().map(|ty| ty.display(self.db)))
|
||||
|
|
|
@ -54,6 +54,7 @@ pub(crate) fn narrowing_constraint<'db>(
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
|
@ -62,6 +63,7 @@ fn all_narrowing_constraints_for_pattern<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, ConstraintNode::Pattern(pattern), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
|
@ -70,6 +72,7 @@ fn all_narrowing_constraints_for_expression<'db>(
|
|||
NarrowingConstraintsBuilder::new(db, ConstraintNode::Expression(expression), true).finish()
|
||||
}
|
||||
|
||||
#[allow(clippy::ref_option)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_negative_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
|
|
|
@ -26,7 +26,7 @@ pub(crate) struct Requester<'s> {
|
|||
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> Client<'s> {
|
||||
impl Client<'_> {
|
||||
pub(super) fn new(sender: ClientSender) -> Self {
|
||||
Self {
|
||||
notifier: Notifier(sender.clone()),
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(clippy::ref_option)]
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::db::RootDatabase;
|
||||
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
|
||||
|
|
|
@ -22,7 +22,7 @@ pub struct Join<'a, 'b> {
|
|||
seen_first: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Join<'a, 'b> {
|
||||
impl Join<'_, '_> {
|
||||
pub fn entry(&mut self, item: &dyn Display) -> &mut Self {
|
||||
if self.seen_first {
|
||||
self.result = self
|
||||
|
|
|
@ -270,7 +270,7 @@ impl VendoredZipArchive {
|
|||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct NormalizedVendoredPath<'a>(Cow<'a, str>);
|
||||
|
||||
impl<'a> NormalizedVendoredPath<'a> {
|
||||
impl NormalizedVendoredPath<'_> {
|
||||
fn with_trailing_slash(self) -> Self {
|
||||
debug_assert!(!self.0.ends_with('/'));
|
||||
let mut data = self.0.into_owned();
|
||||
|
|
|
@ -62,6 +62,10 @@ impl Edit {
|
|||
self.content.as_deref()
|
||||
}
|
||||
|
||||
pub fn into_content(self) -> Option<Box<str>> {
|
||||
self.content
|
||||
}
|
||||
|
||||
fn kind(&self) -> EditOperationKind {
|
||||
if self.content.is_none() {
|
||||
EditOperationKind::Deletion
|
||||
|
|
|
@ -263,7 +263,7 @@ impl<'inner, Context, Inspector> Inspect<'inner, Context, Inspector> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'inner, Context, Inspector> Buffer for Inspect<'inner, Context, Inspector>
|
||||
impl<Context, Inspector> Buffer for Inspect<'_, Context, Inspector>
|
||||
where
|
||||
Inspector: FnMut(&FormatElement),
|
||||
{
|
||||
|
|
|
@ -442,7 +442,7 @@ impl<'a> Iterator for BestFittingVariantsIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for BestFittingVariantsIter<'a> {
|
||||
impl DoubleEndedIterator for BestFittingVariantsIter<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let start_position = self.elements.iter().rposition(|element| {
|
||||
matches!(element, FormatElement::Tag(Tag::StartBestFittingEntry))
|
||||
|
|
|
@ -151,7 +151,7 @@ impl Document {
|
|||
propagate_expands(self, &mut enclosing, &mut interned);
|
||||
}
|
||||
|
||||
pub fn display<'a>(&'a self, source_code: SourceCode<'a>) -> DisplayDocument {
|
||||
pub fn display<'a>(&'a self, source_code: SourceCode<'a>) -> DisplayDocument<'a> {
|
||||
DisplayDocument {
|
||||
elements: self.elements.as_slice(),
|
||||
source_code,
|
||||
|
|
|
@ -905,7 +905,7 @@ struct PrinterState<'a> {
|
|||
fits_queue: Vec<std::slice::Iter<'a, FormatElement>>,
|
||||
}
|
||||
|
||||
impl<'a> PrinterState<'a> {
|
||||
impl PrinterState<'_> {
|
||||
fn with_capacity(capacity: usize) -> Self {
|
||||
Self {
|
||||
buffer: String::with_capacity(capacity),
|
||||
|
@ -1049,8 +1049,6 @@ struct FitsMeasurer<'a, 'print> {
|
|||
bomb: DebugDropBomb,
|
||||
}
|
||||
|
||||
impl<'a, 'print> FitsMeasurer<'a, 'print> {}
|
||||
|
||||
impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
||||
fn new_flat(
|
||||
print_queue: &'print PrintQueue<'a>,
|
||||
|
|
|
@ -145,7 +145,7 @@ impl<'a, 'print> FitsQueue<'a, 'print> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'print> Queue<'a> for FitsQueue<'a, 'print> {
|
||||
impl<'a> Queue<'a> for FitsQueue<'a, '_> {
|
||||
fn pop(&mut self) -> Option<&'a FormatElement> {
|
||||
self.queue.pop().or_else(
|
||||
#[cold]
|
||||
|
|
|
@ -279,7 +279,7 @@ impl<'a> Iterator for SectionContextsIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for SectionContextsIter<'a> {
|
||||
impl DoubleEndedIterator for SectionContextsIter<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let back = self.inner.next_back()?;
|
||||
Some(SectionContext {
|
||||
|
|
|
@ -292,7 +292,7 @@ pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &s
|
|||
{
|
||||
// Case 1: at least one regular parameter, so append after the last one.
|
||||
Edit::insertion(format!(", {parameter}"), last.end())
|
||||
} else if parameters.args.first().is_some() {
|
||||
} else if !parameters.args.is_empty() {
|
||||
// Case 2: no regular parameters, but at least one keyword parameter, so add before the
|
||||
// first.
|
||||
let pos = parameters.start();
|
||||
|
@ -316,7 +316,7 @@ pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &s
|
|||
} else {
|
||||
Edit::insertion(format!(", {parameter}"), slash.start())
|
||||
}
|
||||
} else if parameters.kwonlyargs.first().is_some() {
|
||||
} else if !parameters.kwonlyargs.is_empty() {
|
||||
// Case 3: no regular parameter, but a keyword-only parameter exist, so add parameter before that.
|
||||
// We need to backtrack to before the `*` separator.
|
||||
// We know there is no non-keyword-only params, so we can safely assume that the `*` separator is the first
|
||||
|
|
|
@ -26,7 +26,7 @@ pub(super) struct Diff<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Diff<'a> {
|
||||
pub(crate) fn from_message(message: &'a Message) -> Option<Diff> {
|
||||
pub(crate) fn from_message(message: &'a Message) -> Option<Diff<'a>> {
|
||||
message.fix().map(|fix| Diff {
|
||||
source_code: message.source_file(),
|
||||
fix,
|
||||
|
|
|
@ -227,7 +227,7 @@ impl Display for Code<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Ranged for Code<'a> {
|
||||
impl Ranged for Code<'_> {
|
||||
/// The range of the rule code.
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
|
@ -240,7 +240,7 @@ pub(crate) struct Codes<'a> {
|
|||
codes: Vec<Code<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Codes<'a> {
|
||||
impl Codes<'_> {
|
||||
/// Returns an iterator over the [`Code`]s in the `noqa` directive.
|
||||
pub(crate) fn iter(&self) -> std::slice::Iter<Code> {
|
||||
self.codes.iter()
|
||||
|
@ -287,7 +287,7 @@ pub(crate) enum FileExemption<'a> {
|
|||
Codes(Vec<&'a NoqaCode>),
|
||||
}
|
||||
|
||||
impl<'a> FileExemption<'a> {
|
||||
impl FileExemption<'_> {
|
||||
/// Returns `true` if the file is exempt from the given rule.
|
||||
pub(crate) fn includes(&self, needle: Rule) -> bool {
|
||||
let needle = needle.noqa_code();
|
||||
|
@ -817,7 +817,7 @@ struct NoqaEdit<'a> {
|
|||
line_ending: LineEnding,
|
||||
}
|
||||
|
||||
impl<'a> NoqaEdit<'a> {
|
||||
impl NoqaEdit<'_> {
|
||||
fn into_edit(self) -> Edit {
|
||||
let mut edit_content = String::new();
|
||||
self.write(&mut edit_content);
|
||||
|
@ -849,7 +849,7 @@ impl<'a> NoqaEdit<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Ranged for NoqaEdit<'a> {
|
||||
impl Ranged for NoqaEdit<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.edit_range
|
||||
}
|
||||
|
|
|
@ -15,10 +15,10 @@ pub(crate) fn is_fastapi_route(
|
|||
function_def: &ast::StmtFunctionDef,
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
return function_def
|
||||
function_def
|
||||
.decorator_list
|
||||
.iter()
|
||||
.any(|decorator| is_fastapi_route_decorator(decorator, semantic).is_some());
|
||||
.any(|decorator| is_fastapi_route_decorator(decorator, semantic).is_some())
|
||||
}
|
||||
|
||||
/// Returns `true` if the decorator is indicative of a FastAPI route.
|
||||
|
|
|
@ -146,7 +146,7 @@ enum Method<'a> {
|
|||
Instance(InstanceMethod<'a>),
|
||||
}
|
||||
|
||||
impl<'a> Method<'a> {
|
||||
impl Method<'_> {
|
||||
fn uses_custom_var(&self) -> bool {
|
||||
match self {
|
||||
Self::Class(class_method) => class_method.uses_custom_var(),
|
||||
|
@ -162,7 +162,7 @@ struct ClassMethod<'a> {
|
|||
type_params: Option<&'a TypeParams>,
|
||||
}
|
||||
|
||||
impl<'a> ClassMethod<'a> {
|
||||
impl ClassMethod<'_> {
|
||||
/// Returns `true` if the class method is annotated with
|
||||
/// a custom `TypeVar` that is likely private.
|
||||
fn uses_custom_var(&self) -> bool {
|
||||
|
@ -203,7 +203,7 @@ struct InstanceMethod<'a> {
|
|||
type_params: Option<&'a TypeParams>,
|
||||
}
|
||||
|
||||
impl<'a> InstanceMethod<'a> {
|
||||
impl InstanceMethod<'_> {
|
||||
/// Returns `true` if the instance method is annotated with
|
||||
/// a custom `TypeVar` that is likely private.
|
||||
fn uses_custom_var(&self) -> bool {
|
||||
|
|
|
@ -59,7 +59,7 @@ impl<'semantic, 'data> ReturnVisitor<'semantic, 'data> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'semantic, 'a> Visitor<'a> for ReturnVisitor<'semantic, 'a> {
|
||||
impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::ClassDef(ast::StmtClassDef { decorator_list, .. }) => {
|
||||
|
|
|
@ -59,7 +59,7 @@ impl<'a> From<&'a str> for NatOrdStr<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<String> for NatOrdStr<'a> {
|
||||
impl From<String> for NatOrdStr<'_> {
|
||||
fn from(s: String) -> Self {
|
||||
NatOrdStr(Cow::Owned(s))
|
||||
}
|
||||
|
|
|
@ -420,7 +420,7 @@ impl<'a> LinePreprocessor<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for LinePreprocessor<'a> {
|
||||
impl Iterator for LinePreprocessor<'_> {
|
||||
type Item = LogicalLineInfo;
|
||||
|
||||
fn next(&mut self) -> Option<LogicalLineInfo> {
|
||||
|
|
|
@ -878,7 +878,7 @@ pub(crate) fn check_docstring(
|
|||
{
|
||||
let extra_property_decorators = checker.settings.pydocstyle.property_decorators();
|
||||
if !definition.is_property(extra_property_decorators, semantic) {
|
||||
if body_entries.returns.first().is_some() {
|
||||
if !body_entries.returns.is_empty() {
|
||||
match function_def.returns.as_deref() {
|
||||
Some(returns) => {
|
||||
// Ignore it if it's annotated as returning `None`
|
||||
|
@ -915,7 +915,7 @@ pub(crate) fn check_docstring(
|
|||
// DOC402
|
||||
if checker.enabled(Rule::DocstringMissingYields) {
|
||||
if !yields_documented(docstring, &docstring_sections, convention) {
|
||||
if body_entries.yields.first().is_some() {
|
||||
if !body_entries.yields.is_empty() {
|
||||
match function_def.returns.as_deref() {
|
||||
Some(returns)
|
||||
if !generator_annotation_arguments(returns, semantic).is_some_and(
|
||||
|
|
|
@ -76,11 +76,9 @@ pub(crate) fn get_section_contexts<'a>(
|
|||
) -> SectionContexts<'a> {
|
||||
match convention {
|
||||
Some(Convention::Google) => {
|
||||
return SectionContexts::from_docstring(docstring, SectionStyle::Google);
|
||||
}
|
||||
Some(Convention::Numpy) => {
|
||||
return SectionContexts::from_docstring(docstring, SectionStyle::Numpy);
|
||||
SectionContexts::from_docstring(docstring, SectionStyle::Google)
|
||||
}
|
||||
Some(Convention::Numpy) => SectionContexts::from_docstring(docstring, SectionStyle::Numpy),
|
||||
Some(Convention::Pep257) | None => {
|
||||
// There are some overlapping section names, between the Google and NumPy conventions
|
||||
// (e.g., "Returns", "Raises"). Break ties by checking for the presence of some of the
|
||||
|
|
|
@ -462,7 +462,7 @@ struct ImportBinding<'a> {
|
|||
parent_range: Option<TextRange>,
|
||||
}
|
||||
|
||||
impl<'a> ImportBinding<'a> {
|
||||
impl ImportBinding<'_> {
|
||||
/// The symbol that is stored in the outer scope as a result of this import.
|
||||
///
|
||||
/// For example:
|
||||
|
|
|
@ -113,7 +113,7 @@ impl SequenceIndexVisitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'_> for SequenceIndexVisitor<'a> {
|
||||
impl Visitor<'_> for SequenceIndexVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
if self.modified {
|
||||
return;
|
||||
|
|
|
@ -162,7 +162,7 @@ fn is_sequence(expr: &Expr, semantic: &SemanticModel) -> bool {
|
|||
};
|
||||
|
||||
// Match against specific built-in constructors that return sequences
|
||||
return semantic.resolve_builtin_symbol(func).is_some_and(|func| {
|
||||
semantic.resolve_builtin_symbol(func).is_some_and(|func| {
|
||||
matches!(
|
||||
func,
|
||||
"chr"
|
||||
|
@ -187,5 +187,5 @@ fn is_sequence(expr: &Expr, semantic: &SemanticModel) -> bool {
|
|||
| "memoryview"
|
||||
| "oct"
|
||||
)
|
||||
});
|
||||
})
|
||||
}
|
||||
|
|
|
@ -144,7 +144,7 @@ struct InnerForWithAssignTargetsVisitor<'a, 'b> {
|
|||
assignment_targets: Vec<ExprWithInnerBindingKind<'a>>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> StatementVisitor<'b> for InnerForWithAssignTargetsVisitor<'a, 'b> {
|
||||
impl<'b> StatementVisitor<'b> for InnerForWithAssignTargetsVisitor<'_, 'b> {
|
||||
fn visit_stmt(&mut self, stmt: &'b Stmt) {
|
||||
// Collect target expressions.
|
||||
match stmt {
|
||||
|
|
|
@ -149,7 +149,7 @@ fn is_same_expr(left: &Expr, right: &Expr) -> bool {
|
|||
|
||||
/// Collect all named variables in an expression consisting solely of tuples and
|
||||
/// names.
|
||||
fn collect_names<'a>(expr: &'a Expr) -> Box<dyn Iterator<Item = &ast::ExprName> + 'a> {
|
||||
fn collect_names<'a>(expr: &'a Expr) -> Box<dyn Iterator<Item = &'a ast::ExprName> + 'a> {
|
||||
Box::new(
|
||||
expr.as_name_expr().into_iter().chain(
|
||||
expr.as_tuple_expr()
|
||||
|
|
|
@ -106,7 +106,7 @@ pub(super) struct FileOpen<'a> {
|
|||
pub(super) reference: &'a ResolvedReference,
|
||||
}
|
||||
|
||||
impl<'a> FileOpen<'a> {
|
||||
impl FileOpen<'_> {
|
||||
/// Determine whether an expression is a reference to the file handle, by comparing
|
||||
/// their ranges. If two expressions have the same range, they must be the same expression.
|
||||
pub(super) fn is_ref(&self, expr: &Expr) -> bool {
|
||||
|
|
|
@ -203,7 +203,7 @@ impl<'src, 'loc> UselessSuppressionComments<'src, 'loc> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'src, 'loc> CaptureSuppressionComment<'src> for UselessSuppressionComments<'src, 'loc> {
|
||||
impl<'src> CaptureSuppressionComment<'src> for UselessSuppressionComments<'src, '_> {
|
||||
fn capture(&mut self, comment: SuppressionCommentData<'src>) {
|
||||
match self.check_suppression_comment(&comment) {
|
||||
Ok(()) => {}
|
||||
|
|
|
@ -315,7 +315,7 @@ enum DisplayKind<'a> {
|
|||
Dict { items: &'a [ast::DictItem] },
|
||||
}
|
||||
|
||||
impl<'a> DisplayKind<'a> {
|
||||
impl DisplayKind<'_> {
|
||||
const fn is_set_literal(self) -> bool {
|
||||
matches!(self, Self::Sequence(SequenceKind::Set))
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ impl<'a, 'b> LoggerCandidateVisitor<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> Visitor<'b> for LoggerCandidateVisitor<'a, 'b> {
|
||||
impl<'b> Visitor<'b> for LoggerCandidateVisitor<'_, 'b> {
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
if let Expr::Call(call) = expr {
|
||||
match call.func.as_ref() {
|
||||
|
|
|
@ -407,7 +407,7 @@ pub enum StringLike<'a> {
|
|||
FString(&'a ast::ExprFString),
|
||||
}
|
||||
|
||||
impl<'a> StringLike<'a> {
|
||||
impl StringLike<'_> {
|
||||
pub const fn is_fstring(self) -> bool {
|
||||
matches!(self, Self::FString(_))
|
||||
}
|
||||
|
|
|
@ -669,7 +669,7 @@ impl<'a> Deref for SegmentsVec<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> PartialEq<SegmentsVec<'b>> for SegmentsVec<'a> {
|
||||
impl<'b> PartialEq<SegmentsVec<'b>> for SegmentsVec<'_> {
|
||||
fn eq(&self, other: &SegmentsVec<'b>) -> bool {
|
||||
self.as_slice() == other.as_slice()
|
||||
}
|
||||
|
|
|
@ -919,14 +919,14 @@ impl<'a> Iterator for DictKeyIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for DictKeyIterator<'a> {
|
||||
impl DoubleEndedIterator for DictKeyIterator<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.items.next_back().map(DictItem::key)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for DictKeyIterator<'a> {}
|
||||
impl<'a> ExactSizeIterator for DictKeyIterator<'a> {}
|
||||
impl FusedIterator for DictKeyIterator<'_> {}
|
||||
impl ExactSizeIterator for DictKeyIterator<'_> {}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DictValueIterator<'a> {
|
||||
|
@ -961,14 +961,14 @@ impl<'a> Iterator for DictValueIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for DictValueIterator<'a> {
|
||||
impl DoubleEndedIterator for DictValueIterator<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.items.next_back().map(DictItem::value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for DictValueIterator<'a> {}
|
||||
impl<'a> ExactSizeIterator for DictValueIterator<'a> {}
|
||||
impl FusedIterator for DictValueIterator<'_> {}
|
||||
impl ExactSizeIterator for DictValueIterator<'_> {}
|
||||
|
||||
/// See also [Set](https://docs.python.org/3/library/ast.html#ast.Set)
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -3666,7 +3666,7 @@ impl<'a> Iterator for ParametersIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for ParametersIterator<'a> {
|
||||
impl DoubleEndedIterator for ParametersIterator<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let ParametersIterator {
|
||||
posonlyargs,
|
||||
|
@ -3692,11 +3692,11 @@ impl<'a> DoubleEndedIterator for ParametersIterator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for ParametersIterator<'a> {}
|
||||
impl FusedIterator for ParametersIterator<'_> {}
|
||||
|
||||
/// We rely on the same invariants outlined in the comment above `Parameters::len()`
|
||||
/// in order to implement `ExactSizeIterator` here
|
||||
impl<'a> ExactSizeIterator for ParametersIterator<'a> {}
|
||||
impl ExactSizeIterator for ParametersIterator<'_> {}
|
||||
|
||||
impl<'a> IntoIterator for &'a Parameters {
|
||||
type IntoIter = ParametersIterator<'a>;
|
||||
|
|
|
@ -340,7 +340,7 @@ impl<'a, T> IntoIterator for LeadingDanglingTrailing<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Debug for LeadingDanglingTrailing<'a, T>
|
||||
impl<T> Debug for LeadingDanglingTrailing<'_, T>
|
||||
where
|
||||
T: Debug,
|
||||
{
|
||||
|
|
|
@ -221,7 +221,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<'ast, 'buf, B> Deref for WithNodeLevel<'ast, 'buf, B>
|
||||
impl<'ast, B> Deref for WithNodeLevel<'ast, '_, B>
|
||||
where
|
||||
B: Buffer<Context = PyFormatContext<'ast>>,
|
||||
{
|
||||
|
@ -232,7 +232,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<'ast, 'buf, B> DerefMut for WithNodeLevel<'ast, 'buf, B>
|
||||
impl<'ast, B> DerefMut for WithNodeLevel<'ast, '_, B>
|
||||
where
|
||||
B: Buffer<Context = PyFormatContext<'ast>>,
|
||||
{
|
||||
|
|
|
@ -881,7 +881,7 @@ impl Format<PyFormatContext<'_>> for Operand<'_> {
|
|||
fn fmt(&self, f: &mut Formatter<PyFormatContext<'_>>) -> FormatResult<()> {
|
||||
let expression = self.expression();
|
||||
|
||||
return if is_expression_parenthesized(
|
||||
if is_expression_parenthesized(
|
||||
expression.into(),
|
||||
f.context().comments().ranges(),
|
||||
f.context().source(),
|
||||
|
@ -1017,7 +1017,7 @@ impl Format<PyFormatContext<'_>> for Operand<'_> {
|
|||
Ok(())
|
||||
} else {
|
||||
expression.format().with_options(Parentheses::Never).fmt(f)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -133,9 +133,7 @@ impl FormatNodeRule<ExprTuple> for FormatExprTuple {
|
|||
// ```
|
||||
// In all other cases comments get assigned to a list element
|
||||
match elts.as_slice() {
|
||||
[] => {
|
||||
return empty_parenthesized("(", dangling, ")").fmt(f);
|
||||
}
|
||||
[] => empty_parenthesized("(", dangling, ")").fmt(f),
|
||||
[single] => match self.parentheses {
|
||||
TupleParentheses::Preserve if !is_parenthesized => {
|
||||
single.format().fmt(f)?;
|
||||
|
|
|
@ -14,7 +14,7 @@ pub(super) enum AnyExpressionYield<'a> {
|
|||
YieldFrom(&'a ExprYieldFrom),
|
||||
}
|
||||
|
||||
impl<'a> AnyExpressionYield<'a> {
|
||||
impl AnyExpressionYield<'_> {
|
||||
const fn is_yield_from(&self) -> bool {
|
||||
matches!(self, AnyExpressionYield::YieldFrom(_))
|
||||
}
|
||||
|
|
|
@ -18,7 +18,10 @@ impl<T, C> AsFormat<C> for &T
|
|||
where
|
||||
T: AsFormat<C>,
|
||||
{
|
||||
type Format<'a> = T::Format<'a> where Self: 'a;
|
||||
type Format<'a>
|
||||
= T::Format<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
fn format(&self) -> Self::Format<'_> {
|
||||
AsFormat::format(&**self)
|
||||
|
|
|
@ -36,7 +36,7 @@ pub(crate) enum ClauseHeader<'a> {
|
|||
OrElse(ElseClause<'a>),
|
||||
}
|
||||
|
||||
impl<'a> ClauseHeader<'a> {
|
||||
impl ClauseHeader<'_> {
|
||||
/// The range from the clause keyword up to and including the final colon.
|
||||
pub(crate) fn range(self, source: &str) -> FormatResult<TextRange> {
|
||||
let keyword_range = self.first_keyword_range(source)?;
|
||||
|
|
|
@ -47,12 +47,10 @@ impl FormatRule<ExceptHandler, PyFormatContext<'_>> for FormatExceptHandler {
|
|||
}
|
||||
|
||||
impl<'ast> AsFormat<PyFormatContext<'ast>> for ExceptHandler {
|
||||
type Format<'a> = FormatRefWithRule<
|
||||
'a,
|
||||
ExceptHandler,
|
||||
FormatExceptHandler,
|
||||
PyFormatContext<'ast>,
|
||||
> where Self: 'a;
|
||||
type Format<'a>
|
||||
= FormatRefWithRule<'a, ExceptHandler, FormatExceptHandler, PyFormatContext<'ast>>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
fn format(&self) -> Self::Format<'_> {
|
||||
FormatRefWithRule::new(self, FormatExceptHandler::default())
|
||||
|
|
|
@ -267,7 +267,7 @@ struct DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> {
|
|||
code_example: CodeExample<'src>,
|
||||
}
|
||||
|
||||
impl<'ast, 'buf, 'fmt, 'src> DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> {
|
||||
impl<'src> DocstringLinePrinter<'_, '_, '_, 'src> {
|
||||
/// Print all of the lines in the given iterator to this
|
||||
/// printer's formatter.
|
||||
///
|
||||
|
@ -665,7 +665,7 @@ struct OutputDocstringLine<'src> {
|
|||
is_last: bool,
|
||||
}
|
||||
|
||||
impl<'src> OutputDocstringLine<'src> {
|
||||
impl OutputDocstringLine<'_> {
|
||||
/// Return this reformatted line, but with the given function applied to
|
||||
/// the text of the line.
|
||||
fn map(self, mut map: impl FnMut(&str) -> String) -> OutputDocstringLine<'static> {
|
||||
|
@ -1026,7 +1026,7 @@ impl<'src> CodeExampleRst<'src> {
|
|||
///
|
||||
/// [literal block]: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks
|
||||
/// [code block directive]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block
|
||||
fn new(original: InputDocstringLine<'src>) -> Option<CodeExampleRst> {
|
||||
fn new(original: InputDocstringLine<'src>) -> Option<CodeExampleRst<'src>> {
|
||||
let (opening_indent, rest) = indent_with_suffix(original.line);
|
||||
if rest.starts_with(".. ") {
|
||||
if let Some(litblock) = CodeExampleRst::new_code_block(original) {
|
||||
|
@ -1061,7 +1061,7 @@ impl<'src> CodeExampleRst<'src> {
|
|||
/// Attempts to create a new reStructuredText code example from a
|
||||
/// `code-block` or `sourcecode` directive. If one couldn't be found, then
|
||||
/// `None` is returned.
|
||||
fn new_code_block(original: InputDocstringLine<'src>) -> Option<CodeExampleRst> {
|
||||
fn new_code_block(original: InputDocstringLine<'src>) -> Option<CodeExampleRst<'src>> {
|
||||
// This regex attempts to parse the start of a reStructuredText code
|
||||
// block [directive]. From the reStructuredText spec:
|
||||
//
|
||||
|
|
|
@ -778,7 +778,7 @@ impl<'str> CharIndicesWithOffset<'str> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'str> Iterator for CharIndicesWithOffset<'str> {
|
||||
impl Iterator for CharIndicesWithOffset<'_> {
|
||||
type Item = (usize, char);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -787,7 +787,7 @@ impl<'a> LogicalLinesIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for LogicalLinesIter<'a> {
|
||||
impl Iterator for LogicalLinesIter<'_> {
|
||||
type Item = FormatResult<LogicalLine>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -841,7 +841,7 @@ impl<'a> Iterator for LogicalLinesIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for LogicalLinesIter<'a> {}
|
||||
impl FusedIterator for LogicalLinesIter<'_> {}
|
||||
|
||||
/// A logical line or a comment (or form feed only) line
|
||||
struct LogicalLine {
|
||||
|
|
|
@ -202,7 +202,7 @@ impl UnicodeEscape<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Escape for UnicodeEscape<'a> {
|
||||
impl Escape for UnicodeEscape<'_> {
|
||||
fn source_len(&self) -> usize {
|
||||
self.source.len()
|
||||
}
|
||||
|
@ -337,7 +337,7 @@ impl AsciiEscape<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Escape for AsciiEscape<'a> {
|
||||
impl Escape for AsciiEscape<'_> {
|
||||
fn source_len(&self) -> usize {
|
||||
self.source.len()
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ const MAPPING_PATTERN_START_SET: TokenSet = TokenSet::new([
|
|||
])
|
||||
.union(LITERAL_PATTERN_START_SET);
|
||||
|
||||
impl<'src> Parser<'src> {
|
||||
impl Parser<'_> {
|
||||
/// Returns `true` if the current token is a valid start of a pattern.
|
||||
pub(super) fn at_pattern_start(&self) -> bool {
|
||||
self.at_ts(PATTERN_START_SET) || self.at_soft_keyword()
|
||||
|
|
|
@ -113,20 +113,17 @@ impl From<&Expr> for ResolvedPythonType {
|
|||
|
||||
// Unary operators.
|
||||
Expr::UnaryOp(ast::ExprUnaryOp { operand, op, .. }) => match op {
|
||||
UnaryOp::Invert => {
|
||||
return match ResolvedPythonType::from(operand.as_ref()) {
|
||||
UnaryOp::Invert => match ResolvedPythonType::from(operand.as_ref()) {
|
||||
ResolvedPythonType::Atom(PythonType::Number(
|
||||
NumberLike::Bool | NumberLike::Integer,
|
||||
)) => ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer)),
|
||||
ResolvedPythonType::Atom(_) => ResolvedPythonType::TypeError,
|
||||
_ => ResolvedPythonType::Unknown,
|
||||
}
|
||||
}
|
||||
},
|
||||
// Ex) `not 1.0`
|
||||
UnaryOp::Not => ResolvedPythonType::Atom(PythonType::Number(NumberLike::Bool)),
|
||||
// Ex) `+1` or `-1`
|
||||
UnaryOp::UAdd | UnaryOp::USub => {
|
||||
return match ResolvedPythonType::from(operand.as_ref()) {
|
||||
UnaryOp::UAdd | UnaryOp::USub => match ResolvedPythonType::from(operand.as_ref()) {
|
||||
ResolvedPythonType::Atom(PythonType::Number(number)) => {
|
||||
ResolvedPythonType::Atom(PythonType::Number(
|
||||
if number == NumberLike::Bool {
|
||||
|
@ -138,8 +135,7 @@ impl From<&Expr> for ResolvedPythonType {
|
|||
}
|
||||
ResolvedPythonType::Atom(_) => ResolvedPythonType::TypeError,
|
||||
_ => ResolvedPythonType::Unknown,
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
// Binary operators.
|
||||
|
|
|
@ -446,7 +446,7 @@ impl<'a> Deref for Bindings<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DerefMut for Bindings<'a> {
|
||||
impl DerefMut for Bindings<'_> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
|
@ -749,7 +749,7 @@ pub enum AnyImport<'a, 'ast> {
|
|||
FromImport(&'a FromImport<'ast>),
|
||||
}
|
||||
|
||||
impl<'a, 'ast> Imported<'ast> for AnyImport<'a, 'ast> {
|
||||
impl<'ast> Imported<'ast> for AnyImport<'_, 'ast> {
|
||||
fn qualified_name(&self) -> &QualifiedName<'ast> {
|
||||
match self {
|
||||
Self::Import(import) => import.qualified_name(),
|
||||
|
|
|
@ -209,7 +209,7 @@ impl<'de> serde::de::Deserialize<'de> for NameImports {
|
|||
|
||||
struct AnyNameImportsVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for AnyNameImportsVisitor {
|
||||
impl serde::de::Visitor<'_> for AnyNameImportsVisitor {
|
||||
type Value = NameImports;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
|
|
|
@ -70,7 +70,7 @@ impl Ranged for DunderAllDefinition<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> SemanticModel<'a> {
|
||||
impl SemanticModel<'_> {
|
||||
/// Extract the names bound to a given __all__ assignment.
|
||||
pub fn extract_dunder_all_names<'expr>(
|
||||
&self,
|
||||
|
|
|
@ -255,7 +255,7 @@ impl<'a> Deref for Scopes<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DerefMut for Scopes<'a> {
|
||||
impl DerefMut for Scopes<'_> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
|
|
|
@ -156,7 +156,7 @@ pub(crate) fn check(
|
|||
.map(|(diagnostic, noqa_edit)| {
|
||||
to_lsp_diagnostic(
|
||||
diagnostic,
|
||||
&noqa_edit,
|
||||
noqa_edit,
|
||||
&source_kind,
|
||||
locator.to_index(),
|
||||
encoding,
|
||||
|
@ -234,7 +234,7 @@ pub(crate) fn fixes_for_diagnostics(
|
|||
/// If the source kind is a text document, the cell index will always be `0`.
|
||||
fn to_lsp_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
noqa_edit: &Option<Edit>,
|
||||
noqa_edit: Option<Edit>,
|
||||
source_kind: &SourceKind,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
|
@ -261,9 +261,9 @@ fn to_lsp_diagnostic(
|
|||
new_text: edit.content().unwrap_or_default().to_string(),
|
||||
})
|
||||
.collect();
|
||||
let noqa_edit = noqa_edit.as_ref().map(|noqa_edit| lsp_types::TextEdit {
|
||||
let noqa_edit = noqa_edit.map(|noqa_edit| lsp_types::TextEdit {
|
||||
range: diagnostic_edit_range(noqa_edit.range(), source_kind, index, encoding),
|
||||
new_text: noqa_edit.content().unwrap_or_default().to_string(),
|
||||
new_text: noqa_edit.into_content().unwrap_or_default().into_string(),
|
||||
});
|
||||
serde_json::to_value(AssociatedDiagnosticData {
|
||||
kind: kind.clone(),
|
||||
|
|
|
@ -26,7 +26,7 @@ pub(crate) struct Requester<'s> {
|
|||
response_handlers: FxHashMap<lsp_server::RequestId, ResponseBuilder<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> Client<'s> {
|
||||
impl Client<'_> {
|
||||
pub(super) fn new(sender: ClientSender) -> Self {
|
||||
Self {
|
||||
notifier: Notifier(sender.clone()),
|
||||
|
|
|
@ -334,7 +334,7 @@ impl RuffSettingsIndex {
|
|||
|
||||
struct EditorConfigurationTransformer<'a>(&'a ResolvedEditorSettings, &'a Path);
|
||||
|
||||
impl<'a> ConfigurationTransformer for EditorConfigurationTransformer<'a> {
|
||||
impl ConfigurationTransformer for EditorConfigurationTransformer<'_> {
|
||||
fn transform(&self, filesystem_configuration: Configuration) -> Configuration {
|
||||
let ResolvedEditorSettings {
|
||||
configuration,
|
||||
|
|
|
@ -333,7 +333,7 @@ struct SerializeVisitor<'a> {
|
|||
entries: &'a mut BTreeMap<String, OptionField>,
|
||||
}
|
||||
|
||||
impl<'a> Visit for SerializeVisitor<'a> {
|
||||
impl Visit for SerializeVisitor<'_> {
|
||||
fn record_set(&mut self, name: &str, set: OptionSet) {
|
||||
// Collect the entries of the set.
|
||||
let mut entries = BTreeMap::new();
|
||||
|
|
|
@ -186,7 +186,7 @@ impl<'a> Resolver<'a> {
|
|||
pub fn package_roots(
|
||||
&'a self,
|
||||
files: &[&'a Path],
|
||||
) -> FxHashMap<&'a Path, Option<PackageRoot<'_>>> {
|
||||
) -> FxHashMap<&'a Path, Option<PackageRoot<'a>>> {
|
||||
// Pre-populate the module cache, since the list of files could (but isn't
|
||||
// required to) contain some `__init__.py` files.
|
||||
let mut package_cache: FxHashMap<&Path, bool> = FxHashMap::default();
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
[toolchain]
|
||||
channel = "1.82"
|
||||
channel = "1.83"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue