Improve unused-variable autofixes for with statements (#2785)

This commit is contained in:
Charlie Marsh 2023-02-11 19:38:14 -05:00 committed by GitHub
parent 81651a8479
commit 752c0150e1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 173 additions and 9 deletions

View file

@ -66,3 +66,18 @@ def f(a, b):
y = \ y = \
a if a is not None else b a if a is not None else b
def f():
with Nested(m) as (cm):
pass
def f():
with (Nested(m) as (cm),):
pass
def f():
with Nested(m) as (x, y):
pass

View file

@ -1,10 +1,11 @@
use itertools::Itertools; use itertools::Itertools;
use log::error; use log::error;
use ruff_macros::{define_violation, derive_message_formats}; use rustpython_parser::ast::{ExprKind, Located, Stmt, StmtKind};
use rustpython_parser::ast::{ExprKind, Location, Stmt, StmtKind};
use rustpython_parser::lexer; use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok; use rustpython_parser::lexer::Tok;
use ruff_macros::{define_violation, derive_message_formats};
use crate::ast::helpers::contains_effect; use crate::ast::helpers::contains_effect;
use crate::ast::types::{BindingKind, Range, RefEquality, ScopeKind}; use crate::ast::types::{BindingKind, Range, RefEquality, ScopeKind};
use crate::autofix::helpers::delete_stmt; use crate::autofix::helpers::delete_stmt;
@ -32,17 +33,122 @@ impl AlwaysAutofixableViolation for UnusedVariable {
} }
} }
fn match_token_after<F>(stmt: &Stmt, locator: &Locator, f: F) -> Location /// Return the start and end [`Location`] of the token after the next match of the predicate,
/// skipping over any bracketed expressions.
fn match_token_after<F>(stmt: &Stmt, locator: &Locator, f: F) -> Range
where where
F: Fn(Tok) -> bool, F: Fn(Tok) -> bool,
{ {
let contents = locator.slice_source_code_range(&Range::from_located(stmt)); let contents = locator.slice_source_code_range(&Range::from_located(stmt));
for ((_, tok, _), (start, ..)) in lexer::make_tokenizer_located(contents, stmt.location)
// Track the bracket depth.
let mut par_count = 0;
let mut sqb_count = 0;
let mut brace_count = 0;
for ((_, tok, _), (start, _, end)) in lexer::make_tokenizer_located(contents, stmt.location)
.flatten() .flatten()
.tuple_windows() .tuple_windows()
{ {
match tok {
Tok::Lpar => {
par_count += 1;
}
Tok::Lsqb => {
sqb_count += 1;
}
Tok::Lbrace => {
brace_count += 1;
}
Tok::Rpar => {
par_count -= 1;
// If this is a closing bracket, continue.
if par_count == 0 {
continue;
}
}
Tok::Rsqb => {
sqb_count -= 1;
// If this is a closing bracket, continue.
if sqb_count == 0 {
continue;
}
}
Tok::Rbrace => {
brace_count -= 1;
// If this is a closing bracket, continue.
if brace_count == 0 {
continue;
}
}
_ => {}
}
// If we're in nested brackets, continue.
if par_count > 0 || sqb_count > 0 || brace_count > 0 {
continue;
}
if f(tok) { if f(tok) {
return start; return Range::new(start, end);
}
}
unreachable!("No token after matched");
}
/// Return the start and end [`Location`] of the token matching the predicate, skipping over
/// any bracketed expressions.
fn match_token<F, T>(located: &Located<T>, locator: &Locator, f: F) -> Range
where
F: Fn(Tok) -> bool,
{
let contents = locator.slice_source_code_at(located.location);
// Track the bracket depth.
let mut par_count = 0;
let mut sqb_count = 0;
let mut brace_count = 0;
for (start, tok, end) in lexer::make_tokenizer_located(contents, located.location).flatten() {
match tok {
Tok::Lpar => {
par_count += 1;
}
Tok::Lsqb => {
sqb_count += 1;
}
Tok::Lbrace => {
brace_count += 1;
}
Tok::Rpar => {
par_count -= 1;
// If this is a closing bracket, continue.
if par_count == 0 {
continue;
}
}
Tok::Rsqb => {
sqb_count -= 1;
// If this is a closing bracket, continue.
if sqb_count == 0 {
continue;
}
}
Tok::Rbrace => {
brace_count -= 1;
// If this is a closing bracket, continue.
if brace_count == 0 {
continue;
}
}
_ => {}
}
// If we're in nested brackets, continue.
if par_count > 0 || sqb_count > 0 || brace_count > 0 {
continue;
}
if f(tok) {
return Range::new(start, end);
} }
} }
unreachable!("No token after matched"); unreachable!("No token after matched");
@ -70,7 +176,7 @@ fn remove_unused_variable(
DeletionKind::Partial, DeletionKind::Partial,
Fix::deletion( Fix::deletion(
stmt.location, stmt.location,
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal), match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal).location,
), ),
)) ))
} else { } else {
@ -117,7 +223,7 @@ fn remove_unused_variable(
DeletionKind::Partial, DeletionKind::Partial,
Fix::deletion( Fix::deletion(
stmt.location, stmt.location,
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal), match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal).location,
), ),
)) ))
} else { } else {
@ -162,7 +268,12 @@ fn remove_unused_variable(
DeletionKind::Partial, DeletionKind::Partial,
Fix::deletion( Fix::deletion(
item.context_expr.end_location.unwrap(), item.context_expr.end_location.unwrap(),
optional_vars.end_location.unwrap(), // The end of the `Withitem` is the colon, comma, or closing
// parenthesis following the `optional_vars`.
match_token(&item.context_expr, checker.locator, |tok| {
tok == Tok::Colon || tok == Tok::Comma || tok == Tok::Rpar
})
.location,
), ),
)); ));
} }

View file

@ -1,5 +1,5 @@
--- ---
source: src/rules/pyflakes/mod.rs source: crates/ruff/src/rules/pyflakes/mod.rs
expression: diagnostics expression: diagnostics
--- ---
- kind: - kind:
@ -312,4 +312,42 @@ expression: diagnostics
row: 69 row: 69
column: 0 column: 0
parent: ~ parent: ~
- kind:
UnusedVariable:
name: cm
location:
row: 72
column: 23
end_location:
row: 72
column: 25
fix:
content:
- ""
location:
row: 72
column: 18
end_location:
row: 72
column: 26
parent: ~
- kind:
UnusedVariable:
name: cm
location:
row: 77
column: 24
end_location:
row: 77
column: 26
fix:
content:
- ""
location:
row: 77
column: 19
end_location:
row: 77
column: 27
parent: ~