Avoid no-op fix for nested with expressions (#4906)

This commit is contained in:
Charlie Marsh 2023-06-06 16:15:21 -04:00 committed by GitHub
parent 2b5fb70482
commit 2a6d7cd71c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 172 additions and 141 deletions

View file

@ -83,6 +83,11 @@ def f():
pass
def f():
with (Nested(m)) as (cm):
pass
def f():
toplevel = tt = lexer.get_token()
if not tt:

View file

@ -1,5 +1,5 @@
use itertools::Itertools;
use ruff_text_size::TextRange;
use ruff_text_size::{TextRange, TextSize};
use rustpython_parser::ast::{self, Ranged, Stmt};
use rustpython_parser::{lexer, Mode, Tok};
@ -61,21 +61,37 @@ impl Violation for UnusedVariable {
}
}
/// Return the [`TextRange`] of the token after the next match of
/// the predicate, skipping over any bracketed expressions.
fn match_token_after<F, T>(located: &T, locator: &Locator, f: F) -> TextRange
/// Return the [`TextRange`] of the token before the next match of the predicate
fn match_token_before<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
where
F: Fn(Tok) -> bool,
T: Ranged,
{
let contents = locator.after(located.start());
let contents = locator.after(location);
for ((_, range), (tok, _)) in lexer::lex_starts_at(contents, Mode::Module, location)
.flatten()
.tuple_windows()
{
if f(tok) {
return Some(range);
}
}
None
}
/// Return the [`TextRange`] of the token after the next match of the predicate, skipping over
/// any bracketed expressions.
fn match_token_after<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
where
F: Fn(Tok) -> bool,
{
let contents = locator.after(location);
// Track the bracket depth.
let mut par_count = 0u32;
let mut sqb_count = 0u32;
let mut brace_count = 0u32;
for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, Mode::Module, located.start())
for ((tok, _), (_, range)) in lexer::lex_starts_at(contents, Mode::Module, location)
.flatten()
.tuple_windows()
{
@ -91,97 +107,83 @@ where
}
Tok::Rpar => {
par_count = par_count.saturating_sub(1);
// If this is a closing bracket, continue.
if par_count == 0 {
continue;
}
}
Tok::Rsqb => {
sqb_count = sqb_count.saturating_sub(1);
// If this is a closing bracket, continue.
if sqb_count == 0 {
continue;
}
}
Tok::Rbrace => {
brace_count = brace_count.saturating_sub(1);
// If this is a closing bracket, continue.
if brace_count == 0 {
continue;
}
}
_ => {}
}
// If we're in nested brackets, continue.
if par_count > 0 || sqb_count > 0 || brace_count > 0 {
continue;
}
if f(tok) {
return range;
return Some(range);
}
}
unreachable!("No token after matched");
None
}
/// Return the [`TextRange`] of the token matching the predicate,
/// skipping over any bracketed expressions.
fn match_token<F, T>(located: &T, locator: &Locator, f: F) -> TextRange
/// Return the [`TextRange`] of the token matching the predicate or the first mismatched
/// bracket, skipping over any bracketed expressions.
fn match_token_or_closing_brace<F>(location: TextSize, locator: &Locator, f: F) -> Option<TextRange>
where
F: Fn(Tok) -> bool,
T: Ranged,
{
let contents = locator.after(located.start());
let contents = locator.after(location);
// Track the bracket depth.
let mut par_count = 0;
let mut sqb_count = 0;
let mut brace_count = 0;
let mut par_count = 0u32;
let mut sqb_count = 0u32;
let mut brace_count = 0u32;
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, located.start()).flatten() {
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, location).flatten() {
match tok {
Tok::Lpar => {
par_count += 1;
par_count = par_count.saturating_add(1);
}
Tok::Lsqb => {
sqb_count += 1;
sqb_count = sqb_count.saturating_add(1);
}
Tok::Lbrace => {
brace_count += 1;
brace_count = brace_count.saturating_add(1);
}
Tok::Rpar => {
par_count -= 1;
// If this is a closing bracket, continue.
if par_count == 0 {
continue;
return Some(range);
}
par_count = par_count.saturating_sub(1);
}
Tok::Rsqb => {
sqb_count -= 1;
// If this is a closing bracket, continue.
if sqb_count == 0 {
continue;
return Some(range);
}
sqb_count = sqb_count.saturating_sub(1);
}
Tok::Rbrace => {
brace_count -= 1;
// If this is a closing bracket, continue.
if brace_count == 0 {
continue;
return Some(range);
}
brace_count = brace_count.saturating_sub(1);
}
_ => {}
}
// If we're in nested brackets, continue.
if par_count > 0 || sqb_count > 0 || brace_count > 0 {
continue;
}
if f(tok) {
return range;
return Some(range);
}
}
unreachable!("No token after matched");
None
}
/// Generate a [`Edit`] to remove an unused variable assignment, given the
@ -201,10 +203,10 @@ fn remove_unused_variable(
{
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
let edit = Edit::deletion(
target.start(),
match_token_after(target, checker.locator, |tok| tok == Tok::Equal).start(),
);
let start = target.start();
let end =
match_token_after(start, checker.locator, |tok| tok == Tok::Equal)?.start();
let edit = Edit::deletion(start, end);
Some(Fix::suggested(edit))
} else {
// If (e.g.) assigning to a constant (`x = 1`), delete the entire statement.
@ -232,10 +234,10 @@ fn remove_unused_variable(
return if contains_effect(value, |id| checker.semantic_model().is_builtin(id)) {
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
let edit = Edit::deletion(
stmt.start(),
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal).start(),
);
let start = stmt.start();
let end =
match_token_after(start, checker.locator, |tok| tok == Tok::Equal)?.start();
let edit = Edit::deletion(start, end);
Some(Fix::suggested(edit))
} else {
// If (e.g.) assigning to a constant (`x = 1`), delete the entire statement.
@ -258,15 +260,20 @@ fn remove_unused_variable(
for item in items {
if let Some(optional_vars) = &item.optional_vars {
if optional_vars.range() == range {
let edit = Edit::deletion(
item.context_expr.end(),
// The end of the `Withitem` is the colon, comma, or closing
// parenthesis following the `optional_vars`.
match_token(&item.context_expr, checker.locator, |tok| {
tok == Tok::Colon || tok == Tok::Comma || tok == Tok::Rpar
})
.start(),
);
// Find the first token before the `as` keyword.
let start =
match_token_before(item.context_expr.start(), checker.locator, |tok| {
tok == Tok::As
})?
.end();
// Find the first colon, comma, or closing bracket after the `as` keyword.
let end = match_token_or_closing_brace(start, checker.locator, |tok| {
tok == Tok::Colon || tok == Tok::Comma
})?
.start();
let edit = Edit::deletion(start, end);
return Some(Fix::suggested(edit));
}
}

View file

@ -377,126 +377,145 @@ F841_3.py:77:25: F841 [*] Local variable `cm` is assigned to but never used
79 79 |
80 80 |
F841_3.py:87:5: F841 [*] Local variable `toplevel` is assigned to but never used
F841_3.py:87:26: F841 [*] Local variable `cm` is assigned to but never used
|
87 | def f():
88 | toplevel = tt = lexer.get_token()
| ^^^^^^^^ F841
89 | if not tt:
90 | break
88 | with (Nested(m)) as (cm):
| ^^ F841
89 | pass
|
= help: Remove assignment to unused variable `toplevel`
= help: Remove assignment to unused variable `cm`
Suggested fix
84 84 |
85 85 |
86 86 | def f():
87 |- toplevel = tt = lexer.get_token()
87 |+ tt = lexer.get_token()
88 88 | if not tt:
89 89 | break
87 |- with (Nested(m)) as (cm):
87 |+ with (Nested(m)):
88 88 | pass
89 89 |
90 90 |
F841_3.py:93:5: F841 [*] Local variable `toplevel` is assigned to but never used
F841_3.py:92:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
93 | def f():
94 | toplevel = tt = lexer.get_token()
92 | def f():
93 | toplevel = tt = lexer.get_token()
| ^^^^^^^^ F841
94 | if not tt:
95 | break
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
89 89 |
90 90 |
91 91 | def f():
92 |- toplevel = tt = lexer.get_token()
92 |+ tt = lexer.get_token()
93 93 | if not tt:
94 94 | break
95 95 |
F841_3.py:98:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
98 | def f():
99 | toplevel = tt = lexer.get_token()
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
90 90 |
91 91 |
92 92 | def f():
93 |- toplevel = tt = lexer.get_token()
93 |+ tt = lexer.get_token()
94 94 |
95 95 |
96 96 | def f():
96 96 |
97 97 | def f():
98 |- toplevel = tt = lexer.get_token()
98 |+ tt = lexer.get_token()
99 99 |
100 100 |
101 101 | def f():
F841_3.py:93:16: F841 [*] Local variable `tt` is assigned to but never used
F841_3.py:98:16: F841 [*] Local variable `tt` is assigned to but never used
|
93 | def f():
94 | toplevel = tt = lexer.get_token()
98 | def f():
99 | toplevel = tt = lexer.get_token()
| ^^ F841
|
= help: Remove assignment to unused variable `tt`
Suggested fix
90 90 |
91 91 |
92 92 | def f():
93 |- toplevel = tt = lexer.get_token()
93 |+ toplevel = lexer.get_token()
94 94 |
95 95 |
96 96 | def f():
F841_3.py:97:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
97 | def f():
98 | toplevel = (a, b) = lexer.get_token()
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
94 94 |
95 95 |
96 96 | def f():
97 |- toplevel = (a, b) = lexer.get_token()
97 |+ (a, b) = lexer.get_token()
98 98 |
96 96 |
97 97 | def f():
98 |- toplevel = tt = lexer.get_token()
98 |+ toplevel = lexer.get_token()
99 99 |
100 100 | def f():
100 100 |
101 101 | def f():
F841_3.py:101:14: F841 [*] Local variable `toplevel` is assigned to but never used
F841_3.py:102:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
101 | def f():
102 | (a, b) = toplevel = lexer.get_token()
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
98 98 |
99 99 |
100 100 | def f():
101 |- (a, b) = toplevel = lexer.get_token()
101 |+ (a, b) = lexer.get_token()
102 102 |
103 103 |
104 104 | def f():
F841_3.py:105:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
105 | def f():
106 | toplevel = tt = 1
102 | def f():
103 | toplevel = (a, b) = lexer.get_token()
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
102 102 |
99 99 |
100 100 |
101 101 | def f():
102 |- toplevel = (a, b) = lexer.get_token()
102 |+ (a, b) = lexer.get_token()
103 103 |
104 104 | def f():
105 |- toplevel = tt = 1
105 |+ tt = 1
104 104 |
105 105 | def f():
F841_3.py:105:16: F841 [*] Local variable `tt` is assigned to but never used
F841_3.py:106:14: F841 [*] Local variable `toplevel` is assigned to but never used
|
105 | def f():
106 | toplevel = tt = 1
106 | def f():
107 | (a, b) = toplevel = lexer.get_token()
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
103 103 |
104 104 |
105 105 | def f():
106 |- (a, b) = toplevel = lexer.get_token()
106 |+ (a, b) = lexer.get_token()
107 107 |
108 108 |
109 109 | def f():
F841_3.py:110:5: F841 [*] Local variable `toplevel` is assigned to but never used
|
110 | def f():
111 | toplevel = tt = 1
| ^^^^^^^^ F841
|
= help: Remove assignment to unused variable `toplevel`
Suggested fix
107 107 |
108 108 |
109 109 | def f():
110 |- toplevel = tt = 1
110 |+ tt = 1
F841_3.py:110:16: F841 [*] Local variable `tt` is assigned to but never used
|
110 | def f():
111 | toplevel = tt = 1
| ^^ F841
|
= help: Remove assignment to unused variable `tt`
Suggested fix
102 102 |
103 103 |
104 104 | def f():
105 |- toplevel = tt = 1
105 |+ toplevel = 1
107 107 |
108 108 |
109 109 | def f():
110 |- toplevel = tt = 1
110 |+ toplevel = 1