[flake8-pie] Reuse parsed tokens (PIE800) (#15438)
Some checks are pending
CI / cargo test (linux, release) (push) Blocked by required conditions
CI / Determine changes (push) Waiting to run
CI / cargo fmt (push) Waiting to run
CI / cargo clippy (push) Blocked by required conditions
CI / cargo test (linux) (push) Blocked by required conditions
CI / cargo test (windows) (push) Blocked by required conditions
CI / cargo test (wasm) (push) Blocked by required conditions
CI / cargo build (release) (push) Waiting to run
CI / cargo build (msrv) (push) Blocked by required conditions
CI / cargo fuzz build (push) Blocked by required conditions
CI / fuzz parser (push) Blocked by required conditions
CI / test scripts (push) Blocked by required conditions
CI / ecosystem (push) Blocked by required conditions
CI / cargo shear (push) Blocked by required conditions
CI / python package (push) Waiting to run
CI / pre-commit (push) Waiting to run
CI / mkdocs (push) Waiting to run
CI / formatter instabilities and black similarity (push) Blocked by required conditions
CI / test ruff-lsp (push) Blocked by required conditions
CI / benchmarks (push) Blocked by required conditions

## Summary

Follow-up to #15394. See [this review
comment](https://github.com/astral-sh/ruff/pull/15394#discussion_r1910526741).

## Test Plan

`cargo nextest run` and `cargo insta test`.
This commit is contained in:
InSync 2025-01-13 09:03:11 +07:00 committed by GitHub
parent 347ab5b47a
commit 70c3be88b9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1,11 +1,10 @@
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, ViolationMetadata};
use ruff_python_ast::{self as ast, Expr};
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_python_parser::{TokenKind, Tokens};
use ruff_text_size::{Ranged, TextSize};
use crate::checkers::ast::Checker;
use crate::Locator;
/// ## What it does
/// Checks for unnecessary dictionary unpacking operators (`**`).
@ -54,7 +53,7 @@ pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
// inside a dict.
if let Expr::Dict(inner) = value {
let mut diagnostic = Diagnostic::new(UnnecessarySpread, value.range());
if let Some(fix) = unnecessary_spread_fix(inner, prev_end, checker.locator()) {
if let Some(fix) = unnecessary_spread_fix(inner, prev_end, checker.tokens()) {
diagnostic.set_fix(fix);
}
checker.diagnostics.push(diagnostic);
@ -68,24 +67,27 @@ pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
fn unnecessary_spread_fix(
dict: &ast::ExprDict,
prev_end: TextSize,
locator: &Locator,
tokens: &Tokens,
) -> Option<Fix> {
// Find the `**` token preceding the spread.
let doublestar = SimpleTokenizer::starts_at(prev_end, locator.contents())
.find(|tok| matches!(tok.kind(), SimpleTokenKind::DoubleStar))?;
let doublestar = tokens
.after(prev_end)
.iter()
.find(|tok| matches!(tok.kind(), TokenKind::DoubleStar))?;
if let Some(last) = dict.iter_values().last() {
// Ex) `**{a: 1, b: 2}`
let mut edits = vec![];
let mut open_parens: u32 = 0;
for tok in SimpleTokenizer::starts_at(doublestar.end(), locator.contents()).skip_trivia() {
for tok in tokens.after(doublestar.end()) {
match tok.kind() {
SimpleTokenKind::LParen => {
kind if kind.is_trivia() => {}
TokenKind::Lpar => {
edits.push(Edit::range_deletion(tok.range()));
open_parens += 1;
}
SimpleTokenKind::LBrace => {
TokenKind::Lbrace => {
edits.push(Edit::range_deletion(tok.range()));
break;
}
@ -97,22 +99,23 @@ fn unnecessary_spread_fix(
}
let mut found_r_curly = false;
for tok in SimpleTokenizer::starts_at(last.end(), locator.contents()).skip_trivia() {
for tok in tokens.after(last.end()) {
if found_r_curly && open_parens == 0 {
break;
}
match tok.kind() {
SimpleTokenKind::Comma => {
kind if kind.is_trivia() => {}
TokenKind::Comma => {
edits.push(Edit::range_deletion(tok.range()));
}
SimpleTokenKind::RParen => {
TokenKind::Rpar => {
if found_r_curly {
edits.push(Edit::range_deletion(tok.range()));
open_parens -= 1;
}
}
SimpleTokenKind::RBrace => {
TokenKind::Rbrace => {
edits.push(Edit::range_deletion(tok.range()));
found_r_curly = true;
}