mirror of
https://github.com/astral-sh/ruff.git
synced 2025-10-03 15:15:33 +00:00
Use simple lexer for argument removal (#6710)
This commit is contained in:
parent
086e11087f
commit
bb5fbb1b5c
7 changed files with 62 additions and 107 deletions
|
@ -1,15 +1,15 @@
|
|||
//! Interface for generating autofix edits from higher-level actions (e.g., "remove an argument").
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::{
|
||||
self as ast, Arguments, ExceptHandler, Expr, Keyword, PySourceType, Ranged, Stmt,
|
||||
};
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, Keyword, Ranged, Stmt};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{lexer, AsMode};
|
||||
use ruff_python_trivia::{has_leading_content, is_python_whitespace, PythonWhitespace};
|
||||
|
||||
use ruff_python_trivia::{
|
||||
has_leading_content, is_python_whitespace, PythonWhitespace, SimpleTokenKind, SimpleTokenizer,
|
||||
};
|
||||
use ruff_source_file::{Locator, NewlineWithTrailingNewline};
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
|
||||
|
@ -89,78 +89,49 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
argument: &T,
|
||||
arguments: &Arguments,
|
||||
parentheses: Parentheses,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
source: &str,
|
||||
) -> Result<Edit> {
|
||||
// TODO(sbrugman): Preserve trailing comments.
|
||||
if arguments.keywords.len() + arguments.args.len() > 1 {
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
// Partition into arguments before and after the argument to remove.
|
||||
let (before, after): (Vec<_>, Vec<_>) = arguments
|
||||
.args
|
||||
.iter()
|
||||
.map(Expr::range)
|
||||
.chain(arguments.keywords.iter().map(Keyword::range))
|
||||
.filter(|range| argument.range() != *range)
|
||||
.partition(|range| range.start() < argument.start());
|
||||
|
||||
if arguments
|
||||
.args
|
||||
.iter()
|
||||
.map(Expr::start)
|
||||
.chain(arguments.keywords.iter().map(Keyword::start))
|
||||
.any(|location| location > argument.start())
|
||||
{
|
||||
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
|
||||
// argument to the end of the subsequent comma.
|
||||
let mut seen_comma = false;
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
locator.slice(arguments.range()),
|
||||
source_type.as_mode(),
|
||||
arguments.start(),
|
||||
)
|
||||
.flatten()
|
||||
{
|
||||
if seen_comma {
|
||||
if tok.is_non_logical_newline() {
|
||||
// Also delete any non-logical newlines after the comma.
|
||||
continue;
|
||||
}
|
||||
fix_end = Some(if tok.is_newline() {
|
||||
range.end()
|
||||
} else {
|
||||
range.start()
|
||||
});
|
||||
break;
|
||||
}
|
||||
if range.start() == argument.start() {
|
||||
fix_start = Some(range.start());
|
||||
}
|
||||
if fix_start.is_some() && tok.is_comma() {
|
||||
seen_comma = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Case 2: argument or keyword is the last node, so delete from the start of the
|
||||
// previous comma to the end of the argument.
|
||||
for (tok, range) in lexer::lex_starts_at(
|
||||
locator.slice(arguments.range()),
|
||||
source_type.as_mode(),
|
||||
arguments.start(),
|
||||
)
|
||||
.flatten()
|
||||
{
|
||||
if range.start() == argument.start() {
|
||||
fix_end = Some(argument.end());
|
||||
break;
|
||||
}
|
||||
if tok.is_comma() {
|
||||
fix_start = Some(range.start());
|
||||
}
|
||||
}
|
||||
}
|
||||
if !after.is_empty() {
|
||||
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
|
||||
// argument to the end of the subsequent comma.
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(argument.end(), source);
|
||||
|
||||
match (fix_start, fix_end) {
|
||||
(Some(start), Some(end)) => Ok(Edit::deletion(start, end)),
|
||||
_ => {
|
||||
bail!("No fix could be constructed")
|
||||
}
|
||||
}
|
||||
// Find the trailing comma.
|
||||
tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||
.context("Unable to find trailing comma")?;
|
||||
|
||||
// Find the next non-whitespace token.
|
||||
let next = tokenizer
|
||||
.find(|token| {
|
||||
token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline
|
||||
})
|
||||
.context("Unable to find next token")?;
|
||||
|
||||
Ok(Edit::deletion(argument.start(), next.start()))
|
||||
} else if let Some(previous) = before.iter().map(Ranged::end).max() {
|
||||
// Case 2: argument or keyword is the last node, so delete from the start of the
|
||||
// previous comma to the end of the argument.
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(previous, source);
|
||||
|
||||
// Find the trailing comma.
|
||||
let comma = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||
.context("Unable to find trailing comma")?;
|
||||
|
||||
Ok(Edit::deletion(comma.start(), argument.end()))
|
||||
} else {
|
||||
// Only one argument; remove it (but preserve parentheses, if needed).
|
||||
// Case 3: argument or keyword is the only node, so delete the arguments (but preserve
|
||||
// parentheses, if needed).
|
||||
Ok(match parentheses {
|
||||
Parentheses::Remove => Edit::deletion(arguments.start(), arguments.end()),
|
||||
Parentheses::Preserve => {
|
||||
|
|
|
@ -85,8 +85,7 @@ pub(crate) fn unnecessary_range_start(checker: &mut Checker, call: &ast::ExprCal
|
|||
&start,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
|
|
@ -732,8 +732,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D
|
|||
keyword,
|
||||
arguments,
|
||||
edits::Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::suggested)
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::is_const_true;
|
||||
use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged};
|
||||
use ruff_python_ast::{self as ast, Keyword, Ranged};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::autofix::edits::{remove_argument, Parentheses};
|
||||
|
@ -78,12 +78,9 @@ pub(crate) fn inplace_argument(checker: &mut Checker, call: &ast::ExprCall) {
|
|||
&& checker.semantic().current_statement().is_expr_stmt()
|
||||
&& checker.semantic().current_expression_parent().is_none()
|
||||
{
|
||||
if let Some(fix) = convert_inplace_argument_to_assignment(
|
||||
call,
|
||||
keyword,
|
||||
checker.source_type,
|
||||
checker.locator(),
|
||||
) {
|
||||
if let Some(fix) =
|
||||
convert_inplace_argument_to_assignment(call, keyword, checker.locator())
|
||||
{
|
||||
diagnostic.set_fix(fix);
|
||||
}
|
||||
}
|
||||
|
@ -103,7 +100,6 @@ pub(crate) fn inplace_argument(checker: &mut Checker, call: &ast::ExprCall) {
|
|||
fn convert_inplace_argument_to_assignment(
|
||||
call: &ast::ExprCall,
|
||||
keyword: &Keyword,
|
||||
source_type: PySourceType,
|
||||
locator: &Locator,
|
||||
) -> Option<Fix> {
|
||||
// Add the assignment.
|
||||
|
@ -118,8 +114,7 @@ fn convert_inplace_argument_to_assignment(
|
|||
keyword,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator,
|
||||
source_type,
|
||||
locator.contents(),
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
|
|
|
@ -2,8 +2,7 @@ use anyhow::Result;
|
|||
|
||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Keyword, PySourceType, Ranged};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_python_ast::{self as ast, Keyword, Ranged};
|
||||
|
||||
use crate::autofix::edits::{remove_argument, Parentheses};
|
||||
use crate::checkers::ast::Checker;
|
||||
|
@ -55,8 +54,7 @@ fn generate_fix(
|
|||
stdout: &Keyword,
|
||||
stderr: &Keyword,
|
||||
call: &ast::ExprCall,
|
||||
locator: &Locator,
|
||||
source_type: PySourceType,
|
||||
source: &str,
|
||||
) -> Result<Fix> {
|
||||
let (first, second) = if stdout.start() < stderr.start() {
|
||||
(stdout, stderr)
|
||||
|
@ -69,8 +67,7 @@ fn generate_fix(
|
|||
second,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator,
|
||||
source_type,
|
||||
source,
|
||||
)?],
|
||||
))
|
||||
}
|
||||
|
@ -105,9 +102,8 @@ pub(crate) fn replace_stdout_stderr(checker: &mut Checker, call: &ast::ExprCall)
|
|||
|
||||
let mut diagnostic = Diagnostic::new(ReplaceStdoutStderr, call.range());
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
diagnostic.try_set_fix(|| {
|
||||
generate_fix(stdout, stderr, call, checker.locator(), checker.source_type)
|
||||
});
|
||||
diagnostic
|
||||
.try_set_fix(|| generate_fix(stdout, stderr, call, checker.locator().contents()));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
|
|
@ -194,8 +194,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
kwarg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -215,8 +214,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -243,8 +241,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
kwarg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
@ -264,8 +261,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &mut Checker, call: &ast::ExprCal
|
|||
arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
|
|
@ -73,8 +73,7 @@ pub(crate) fn useless_object_inheritance(checker: &mut Checker, class_def: &ast:
|
|||
base,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.locator(),
|
||||
checker.source_type,
|
||||
checker.locator().contents(),
|
||||
)
|
||||
.map(Fix::automatic)
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue