mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Use a Rope to power fixer (#584)
This commit is contained in:
parent
ec3fed5a61
commit
2be632f3cc
10 changed files with 95 additions and 99 deletions
|
@ -1,10 +1,14 @@
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
use ropey::RopeBuilder;
|
||||||
use rustpython_parser::ast::Location;
|
use rustpython_parser::ast::Location;
|
||||||
|
|
||||||
|
use crate::ast::types::Range;
|
||||||
use crate::autofix::{Fix, Patch};
|
use crate::autofix::{Fix, Patch};
|
||||||
use crate::checks::Check;
|
use crate::checks::Check;
|
||||||
|
use crate::source_code_locator::SourceCodeLocator;
|
||||||
|
|
||||||
// TODO(charlie): The model here is awkward because `Apply` is only relevant at
|
// TODO(charlie): The model here is awkward because `Apply` is only relevant at
|
||||||
// higher levels in the execution flow.
|
// higher levels in the execution flow.
|
||||||
|
@ -36,7 +40,7 @@ impl From<bool> for Mode {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Auto-fix errors in a file, and write the fixed source code to disk.
|
/// Auto-fix errors in a file, and write the fixed source code to disk.
|
||||||
pub fn fix_file(checks: &mut [Check], contents: &str) -> Option<String> {
|
pub fn fix_file<'a>(checks: &mut [Check], contents: &'a str) -> Option<Cow<'a, str>> {
|
||||||
if checks.iter().all(|check| check.fix.is_none()) {
|
if checks.iter().all(|check| check.fix.is_none()) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -48,12 +52,12 @@ pub fn fix_file(checks: &mut [Check], contents: &str) -> Option<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply a series of fixes.
|
/// Apply a series of fixes.
|
||||||
fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) -> String {
|
fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) -> Cow<'_, str> {
|
||||||
let lines: Vec<&str> = contents.lines().collect();
|
let locator = SourceCodeLocator::new(contents);
|
||||||
|
|
||||||
let mut output: String = Default::default();
|
let mut output = RopeBuilder::new();
|
||||||
let mut last_pos: Location = Default::default();
|
let mut last_pos: Location = Location::new(1, 0);
|
||||||
let mut applied: BTreeSet<&Patch> = Default::default();
|
let mut applied: BTreeSet<&Patch> = BTreeSet::new();
|
||||||
|
|
||||||
for fix in fixes.sorted_by_key(|fix| fix.patch.location) {
|
for fix in fixes.sorted_by_key(|fix| fix.patch.location) {
|
||||||
// If we already applied an identical fix as part of another correction, skip
|
// If we already applied an identical fix as part of another correction, skip
|
||||||
|
@ -69,44 +73,27 @@ fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) ->
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if fix.patch.location.row() > last_pos.row() {
|
// Add all contents from `last_pos` to `fix.patch.location`.
|
||||||
if last_pos.row() > 0 || last_pos.column() > 0 {
|
let slice = locator.slice_source_code_range(&Range {
|
||||||
output.push_str(&lines[last_pos.row() - 1][last_pos.column()..]);
|
location: last_pos,
|
||||||
output.push('\n');
|
end_location: fix.patch.location,
|
||||||
}
|
});
|
||||||
for line in &lines[last_pos.row()..fix.patch.location.row() - 1] {
|
output.append(&slice);
|
||||||
output.push_str(line);
|
|
||||||
output.push('\n');
|
|
||||||
}
|
|
||||||
output.push_str(&lines[fix.patch.location.row() - 1][..fix.patch.location.column()]);
|
|
||||||
output.push_str(&fix.patch.content);
|
|
||||||
} else {
|
|
||||||
output.push_str(
|
|
||||||
&lines[last_pos.row() - 1][last_pos.column()..fix.patch.location.column()],
|
|
||||||
);
|
|
||||||
output.push_str(&fix.patch.content);
|
|
||||||
}
|
|
||||||
last_pos = fix.patch.end_location;
|
|
||||||
|
|
||||||
|
// Add the patch itself.
|
||||||
|
output.append(&fix.patch.content);
|
||||||
|
|
||||||
|
// Track that the fix was applied.
|
||||||
|
last_pos = fix.patch.end_location;
|
||||||
applied.insert(&fix.patch);
|
applied.insert(&fix.patch);
|
||||||
fix.applied = true;
|
fix.applied = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if last_pos.row() > 0
|
// Add the remaining content.
|
||||||
&& (last_pos.row() - 1) < lines.len()
|
let slice = locator.slice_source_code_at(&last_pos);
|
||||||
&& (last_pos.row() > 0 || last_pos.column() > 0)
|
output.append(&slice);
|
||||||
{
|
|
||||||
output.push_str(&lines[last_pos.row() - 1][last_pos.column()..]);
|
|
||||||
output.push('\n');
|
|
||||||
}
|
|
||||||
if last_pos.row() < lines.len() {
|
|
||||||
for line in &lines[last_pos.row()..] {
|
|
||||||
output.push_str(line);
|
|
||||||
output.push('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
output
|
Cow::from(output.finish())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -1,18 +1,8 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::Module;
|
use libcst_native::Module;
|
||||||
use rustpython_ast::Located;
|
|
||||||
|
|
||||||
use crate::ast::types::Range;
|
pub fn match_module(module_text: &str) -> Result<Module> {
|
||||||
use crate::source_code_locator::SourceCodeLocator;
|
match libcst_native::parse_module(module_text, None) {
|
||||||
|
|
||||||
pub fn match_tree<'a, T>(
|
|
||||||
locator: &'a SourceCodeLocator,
|
|
||||||
located: &'a Located<T>,
|
|
||||||
) -> Result<Module<'a>> {
|
|
||||||
match libcst_native::parse_module(
|
|
||||||
locator.slice_source_code_range(&Range::from_located(located)),
|
|
||||||
None,
|
|
||||||
) {
|
|
||||||
Ok(module) => Ok(module),
|
Ok(module) => Ok(module),
|
||||||
Err(_) => return Err(anyhow::anyhow!("Failed to extract CST from source.")),
|
Err(_) => return Err(anyhow::anyhow!("Failed to extract CST from source.")),
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,12 +25,15 @@ pub fn leading_space(line: &str) -> String {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract the leading indentation from a docstring.
|
/// Extract the leading indentation from a docstring.
|
||||||
pub fn indentation<'a>(checker: &'a Checker, docstring: &Expr) -> &'a str {
|
pub fn indentation<'a>(checker: &'a Checker, docstring: &Expr) -> String {
|
||||||
let range = Range::from_located(docstring);
|
let range = Range::from_located(docstring);
|
||||||
checker.locator.slice_source_code_range(&Range {
|
checker
|
||||||
location: Location::new(range.location.row(), 0),
|
.locator
|
||||||
end_location: Location::new(range.location.row(), range.location.column()),
|
.slice_source_code_range(&Range {
|
||||||
})
|
location: Location::new(range.location.row(), 0),
|
||||||
|
end_location: Location::new(range.location.row(), range.location.column()),
|
||||||
|
})
|
||||||
|
.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace any non-whitespace characters from an indentation string.
|
/// Replace any non-whitespace characters from an indentation string.
|
||||||
|
|
|
@ -6,8 +6,9 @@ use libcst_native::{
|
||||||
SmallStatement, Statement, Tuple,
|
SmallStatement, Statement, Tuple,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::ast::types::Range;
|
||||||
use crate::autofix::Fix;
|
use crate::autofix::Fix;
|
||||||
use crate::cst::matchers::match_tree;
|
use crate::cst::matchers::match_module;
|
||||||
use crate::source_code_locator::SourceCodeLocator;
|
use crate::source_code_locator::SourceCodeLocator;
|
||||||
|
|
||||||
fn match_expr<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Expr<'b>> {
|
fn match_expr<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Expr<'b>> {
|
||||||
|
@ -44,7 +45,8 @@ pub fn fix_unnecessary_generator_list(
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(GeneratorExp)))) -> Expr(ListComp)))
|
// Expr(Call(GeneratorExp)))) -> Expr(ListComp)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -86,7 +88,8 @@ pub fn fix_unnecessary_generator_set(
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(GeneratorExp)))) -> Expr(SetComp)))
|
// Expr(Call(GeneratorExp)))) -> Expr(SetComp)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -128,7 +131,8 @@ pub fn fix_unnecessary_generator_dict(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -194,7 +198,8 @@ pub fn fix_unnecessary_list_comprehension_set(
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(ListComp)))) ->
|
// Expr(Call(ListComp)))) ->
|
||||||
// Expr(SetComp)))
|
// Expr(SetComp)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -234,7 +239,8 @@ pub fn fix_unnecessary_literal_set(
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(List|Tuple)))) -> Expr(Set)))
|
// Expr(Call(List|Tuple)))) -> Expr(Set)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let mut call = match_call(body)?;
|
let mut call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -281,7 +287,8 @@ pub fn fix_unnecessary_literal_dict(
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(List|Tuple)))) -> Expr(Dict)))
|
// Expr(Call(List|Tuple)))) -> Expr(Dict)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -351,8 +358,9 @@ pub fn fix_unnecessary_collection_call(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call("list" | "tuple" | "dict")))) -> Expr(List|Tuple|Dict)))
|
// Expr(Call("list" | "tuple" | "dict")))) -> Expr(List|Tuple|Dict)
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let name = if let Expression::Name(name) = &call.func.as_ref() {
|
let name = if let Expression::Name(name) = &call.func.as_ref() {
|
||||||
|
@ -463,7 +471,8 @@ pub fn fix_unnecessary_literal_within_tuple_call(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -518,7 +527,8 @@ pub fn fix_unnecessary_literal_within_list_call(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -576,7 +586,8 @@ pub fn fix_unnecessary_list_call(
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
// Expr(Call(List|Tuple)))) -> Expr(List|Tuple)))
|
// Expr(Call(List|Tuple)))) -> Expr(List|Tuple)))
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
let call = match_call(body)?;
|
let call = match_call(body)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
@ -598,7 +609,8 @@ pub fn fix_unnecessary_comprehension(
|
||||||
locator: &SourceCodeLocator,
|
locator: &SourceCodeLocator,
|
||||||
expr: &rustpython_ast::Expr,
|
expr: &rustpython_ast::Expr,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, expr)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
let mut body = match_expr(&mut tree)?;
|
let mut body = match_expr(&mut tree)?;
|
||||||
|
|
||||||
match &body.value {
|
match &body.value {
|
||||||
|
|
|
@ -131,11 +131,10 @@ pub fn lint_stdin(
|
||||||
|
|
||||||
// Apply autofix, write results to stdout.
|
// Apply autofix, write results to stdout.
|
||||||
if matches!(autofix, fixer::Mode::Apply) {
|
if matches!(autofix, fixer::Mode::Apply) {
|
||||||
let output = match fix_file(&mut checks, stdin) {
|
match fix_file(&mut checks, stdin) {
|
||||||
None => stdin.to_string(),
|
None => io::stdout().write_all(stdin.as_bytes()),
|
||||||
Some(content) => content,
|
Some(contents) => io::stdout().write_all(contents.as_bytes()),
|
||||||
};
|
}?;
|
||||||
io::stdout().write_all(output.as_bytes())?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert to messages.
|
// Convert to messages.
|
||||||
|
@ -176,7 +175,7 @@ pub fn lint_path(
|
||||||
// Apply autofix.
|
// Apply autofix.
|
||||||
if matches!(autofix, fixer::Mode::Apply) {
|
if matches!(autofix, fixer::Mode::Apply) {
|
||||||
if let Some(fixed_contents) = fix_file(&mut checks, &contents) {
|
if let Some(fixed_contents) = fix_file(&mut checks, &contents) {
|
||||||
write(path, fixed_contents)?;
|
write(path, fixed_contents.as_ref())?;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -271,7 +271,7 @@ pub fn invalid_escape_sequence(
|
||||||
});
|
});
|
||||||
|
|
||||||
// Determine whether the string is single- or triple-quoted.
|
// Determine whether the string is single- or triple-quoted.
|
||||||
let quote = extract_quote(text);
|
let quote = extract_quote(&text);
|
||||||
let quote_pos = text.find(quote).unwrap();
|
let quote_pos = text.find(quote).unwrap();
|
||||||
let prefix = text[..quote_pos].to_lowercase();
|
let prefix = text[..quote_pos].to_lowercase();
|
||||||
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
|
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
|
||||||
|
|
|
@ -210,7 +210,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
// Avoid D202 violations for blank lines followed by inner functions or classes.
|
// Avoid D202 violations for blank lines followed by inner functions or classes.
|
||||||
if blank_lines_after == 1 && INNER_FUNCTION_OR_CLASS_REGEX.is_match(after) {
|
if blank_lines_after == 1 && INNER_FUNCTION_OR_CLASS_REGEX.is_match(&after) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -387,7 +387,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let docstring_indent = helpers::indentation(checker, docstring).to_string();
|
let docstring_indent = helpers::indentation(checker, docstring);
|
||||||
let mut has_seen_tab = docstring_indent.contains('\t');
|
let mut has_seen_tab = docstring_indent.contains('\t');
|
||||||
let mut is_over_indented = true;
|
let mut is_over_indented = true;
|
||||||
let mut over_indented_lines = vec![];
|
let mut over_indented_lines = vec![];
|
||||||
|
@ -537,7 +537,7 @@ pub fn newline_after_last_paragraph(checker: &mut Checker, definition: &Definiti
|
||||||
// Insert a newline just before the end-quote(s).
|
// Insert a newline just before the end-quote(s).
|
||||||
let content = format!(
|
let content = format!(
|
||||||
"\n{}",
|
"\n{}",
|
||||||
helpers::clean(helpers::indentation(checker, docstring))
|
helpers::clean(&helpers::indentation(checker, docstring))
|
||||||
);
|
);
|
||||||
check.amend(Fix::insertion(
|
check.amend(Fix::insertion(
|
||||||
content,
|
content,
|
||||||
|
@ -916,7 +916,7 @@ fn blanks_and_section_underline(
|
||||||
// Add a dashed line (of the appropriate length) under the section header.
|
// Add a dashed line (of the appropriate length) under the section header.
|
||||||
let content = format!(
|
let content = format!(
|
||||||
"{}{}\n",
|
"{}{}\n",
|
||||||
helpers::clean(helpers::indentation(checker, docstring)),
|
helpers::clean(&helpers::indentation(checker, docstring)),
|
||||||
"-".repeat(context.section_name.len())
|
"-".repeat(context.section_name.len())
|
||||||
);
|
);
|
||||||
check.amend(Fix::insertion(
|
check.amend(Fix::insertion(
|
||||||
|
@ -950,7 +950,7 @@ fn blanks_and_section_underline(
|
||||||
// Add a dashed line (of the appropriate length) under the section header.
|
// Add a dashed line (of the appropriate length) under the section header.
|
||||||
let content = format!(
|
let content = format!(
|
||||||
"{}{}\n",
|
"{}{}\n",
|
||||||
helpers::clean(helpers::indentation(checker, docstring)),
|
helpers::clean(&helpers::indentation(checker, docstring)),
|
||||||
"-".repeat(context.section_name.len())
|
"-".repeat(context.section_name.len())
|
||||||
);
|
);
|
||||||
check.amend(Fix::insertion(
|
check.amend(Fix::insertion(
|
||||||
|
@ -1026,7 +1026,7 @@ fn blanks_and_section_underline(
|
||||||
// Replace the existing underline with a line of the appropriate length.
|
// Replace the existing underline with a line of the appropriate length.
|
||||||
let content = format!(
|
let content = format!(
|
||||||
"{}{}\n",
|
"{}{}\n",
|
||||||
helpers::clean(helpers::indentation(checker, docstring)),
|
helpers::clean(&helpers::indentation(checker, docstring)),
|
||||||
"-".repeat(context.section_name.len())
|
"-".repeat(context.section_name.len())
|
||||||
);
|
);
|
||||||
check.amend(Fix::replacement(
|
check.amend(Fix::replacement(
|
||||||
|
@ -1054,7 +1054,7 @@ fn blanks_and_section_underline(
|
||||||
|
|
||||||
if checker.settings.enabled.contains(&CheckCode::D215) {
|
if checker.settings.enabled.contains(&CheckCode::D215) {
|
||||||
let leading_space = helpers::leading_space(non_empty_line);
|
let leading_space = helpers::leading_space(non_empty_line);
|
||||||
let indentation = helpers::indentation(checker, docstring).to_string();
|
let indentation = helpers::indentation(checker, docstring);
|
||||||
if leading_space.len() > indentation.len() {
|
if leading_space.len() > indentation.len() {
|
||||||
let mut check = Check::new(
|
let mut check = Check::new(
|
||||||
CheckKind::SectionUnderlineNotOverIndented(context.section_name.to_string()),
|
CheckKind::SectionUnderlineNotOverIndented(context.section_name.to_string()),
|
||||||
|
@ -1195,7 +1195,7 @@ fn common_section(
|
||||||
|
|
||||||
if checker.settings.enabled.contains(&CheckCode::D214) {
|
if checker.settings.enabled.contains(&CheckCode::D214) {
|
||||||
let leading_space = helpers::leading_space(context.line);
|
let leading_space = helpers::leading_space(context.line);
|
||||||
let indentation = helpers::indentation(checker, docstring).to_string();
|
let indentation = helpers::indentation(checker, docstring);
|
||||||
if leading_space.len() > indentation.len() {
|
if leading_space.len() > indentation.len() {
|
||||||
let mut check = Check::new(
|
let mut check = Check::new(
|
||||||
CheckKind::SectionNotOverIndented(context.section_name.to_string()),
|
CheckKind::SectionNotOverIndented(context.section_name.to_string()),
|
||||||
|
|
|
@ -2,9 +2,10 @@ use anyhow::Result;
|
||||||
use libcst_native::{Codegen, ImportNames, NameOrAttribute, SmallStatement, Statement};
|
use libcst_native::{Codegen, ImportNames, NameOrAttribute, SmallStatement, Statement};
|
||||||
use rustpython_ast::Stmt;
|
use rustpython_ast::Stmt;
|
||||||
|
|
||||||
|
use crate::ast::types::Range;
|
||||||
use crate::autofix::{helpers, Fix};
|
use crate::autofix::{helpers, Fix};
|
||||||
use crate::cst::helpers::compose_module_path;
|
use crate::cst::helpers::compose_module_path;
|
||||||
use crate::cst::matchers::match_tree;
|
use crate::cst::matchers::match_module;
|
||||||
use crate::source_code_locator::SourceCodeLocator;
|
use crate::source_code_locator::SourceCodeLocator;
|
||||||
|
|
||||||
/// Generate a Fix to remove any unused imports from an `import` statement.
|
/// Generate a Fix to remove any unused imports from an `import` statement.
|
||||||
|
@ -15,7 +16,8 @@ pub fn remove_unused_imports(
|
||||||
parent: Option<&Stmt>,
|
parent: Option<&Stmt>,
|
||||||
deleted: &[&Stmt],
|
deleted: &[&Stmt],
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, stmt)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(stmt));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
|
|
||||||
let body = if let Some(Statement::Simple(body)) = tree.body.first_mut() {
|
let body = if let Some(Statement::Simple(body)) = tree.body.first_mut() {
|
||||||
body
|
body
|
||||||
|
@ -72,7 +74,8 @@ pub fn remove_unused_import_froms(
|
||||||
parent: Option<&Stmt>,
|
parent: Option<&Stmt>,
|
||||||
deleted: &[&Stmt],
|
deleted: &[&Stmt],
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let mut tree = match_tree(locator, stmt)?;
|
let module_text = locator.slice_source_code_range(&Range::from_located(stmt));
|
||||||
|
let mut tree = match_module(&module_text)?;
|
||||||
|
|
||||||
let body = if let Some(Statement::Simple(body)) = tree.body.first_mut() {
|
let body = if let Some(Statement::Simple(body)) = tree.body.first_mut() {
|
||||||
body
|
body
|
||||||
|
|
|
@ -16,14 +16,14 @@ pub fn remove_class_def_base(
|
||||||
bases: &[Expr],
|
bases: &[Expr],
|
||||||
keywords: &[Keyword],
|
keywords: &[Keyword],
|
||||||
) -> Option<Fix> {
|
) -> Option<Fix> {
|
||||||
let content = locator.slice_source_code_at(stmt_at);
|
let contents = locator.slice_source_code_at(stmt_at);
|
||||||
|
|
||||||
// Case 1: `object` is the only base.
|
// Case 1: `object` is the only base.
|
||||||
if bases.len() == 1 && keywords.is_empty() {
|
if bases.len() == 1 && keywords.is_empty() {
|
||||||
let mut fix_start = None;
|
let mut fix_start = None;
|
||||||
let mut fix_end = None;
|
let mut fix_end = None;
|
||||||
let mut count: usize = 0;
|
let mut count: usize = 0;
|
||||||
for (start, tok, end) in lexer::make_tokenizer(content).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
if matches!(tok, Tok::Lpar) {
|
if matches!(tok, Tok::Lpar) {
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
fix_start = Some(helpers::to_absolute(&start, stmt_at));
|
fix_start = Some(helpers::to_absolute(&start, stmt_at));
|
||||||
|
@ -56,7 +56,7 @@ pub fn remove_class_def_base(
|
||||||
let mut fix_start: Option<Location> = None;
|
let mut fix_start: Option<Location> = None;
|
||||||
let mut fix_end: Option<Location> = None;
|
let mut fix_end: Option<Location> = None;
|
||||||
let mut seen_comma = false;
|
let mut seen_comma = false;
|
||||||
for (start, tok, end) in lexer::make_tokenizer(content).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
let start = helpers::to_absolute(&start, stmt_at);
|
let start = helpers::to_absolute(&start, stmt_at);
|
||||||
if seen_comma {
|
if seen_comma {
|
||||||
if matches!(tok, Tok::Newline) {
|
if matches!(tok, Tok::Newline) {
|
||||||
|
@ -83,7 +83,7 @@ pub fn remove_class_def_base(
|
||||||
// isn't a comma.
|
// isn't a comma.
|
||||||
let mut fix_start: Option<Location> = None;
|
let mut fix_start: Option<Location> = None;
|
||||||
let mut fix_end: Option<Location> = None;
|
let mut fix_end: Option<Location> = None;
|
||||||
for (start, tok, end) in lexer::make_tokenizer(content).flatten() {
|
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||||
let start = helpers::to_absolute(&start, stmt_at);
|
let start = helpers::to_absolute(&start, stmt_at);
|
||||||
let end = helpers::to_absolute(&end, stmt_at);
|
let end = helpers::to_absolute(&end, stmt_at);
|
||||||
if start == expr_at {
|
if start == expr_at {
|
||||||
|
@ -106,7 +106,7 @@ pub fn remove_super_arguments(locator: &SourceCodeLocator, expr: &Expr) -> Optio
|
||||||
let range = Range::from_located(expr);
|
let range = Range::from_located(expr);
|
||||||
let contents = locator.slice_source_code_range(&range);
|
let contents = locator.slice_source_code_range(&range);
|
||||||
|
|
||||||
let mut tree = match libcst_native::parse_module(contents, None) {
|
let mut tree = match libcst_native::parse_module(&contents, None) {
|
||||||
Ok(m) => m,
|
Ok(m) => m,
|
||||||
Err(_) => return None,
|
Err(_) => return None,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
//! Struct used to efficiently slice source code at (row, column) Locations.
|
//! Struct used to efficiently slice source code at (row, column) Locations.
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
use once_cell::unsync::OnceCell;
|
use once_cell::unsync::OnceCell;
|
||||||
use ropey::Rope;
|
use ropey::Rope;
|
||||||
use rustpython_ast::Location;
|
use rustpython_ast::Location;
|
||||||
|
@ -23,24 +25,24 @@ impl<'a> SourceCodeLocator<'a> {
|
||||||
self.rope.get_or_init(|| Rope::from_str(self.contents))
|
self.rope.get_or_init(|| Rope::from_str(self.contents))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn slice_source_code_at(&self, location: &Location) -> &'a str {
|
pub fn slice_source_code_at(&self, location: &Location) -> Cow<'_, str> {
|
||||||
let rope = self.get_or_init_rope();
|
let rope = self.get_or_init_rope();
|
||||||
let offset = rope.line_to_char(location.row() - 1) + location.column();
|
let offset = rope.line_to_char(location.row() - 1) + location.column();
|
||||||
&self.contents[offset..]
|
Cow::from(rope.slice(offset..))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn slice_source_code_range(&self, range: &Range) -> &'a str {
|
pub fn slice_source_code_range(&self, range: &Range) -> Cow<'_, str> {
|
||||||
let rope = self.get_or_init_rope();
|
let rope = self.get_or_init_rope();
|
||||||
let start = rope.line_to_char(range.location.row() - 1) + range.location.column();
|
let start = rope.line_to_char(range.location.row() - 1) + range.location.column();
|
||||||
let end = rope.line_to_char(range.end_location.row() - 1) + range.end_location.column();
|
let end = rope.line_to_char(range.end_location.row() - 1) + range.end_location.column();
|
||||||
&self.contents[start..end]
|
Cow::from(rope.slice(start..end))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn partition_source_code_at(
|
pub fn partition_source_code_at(
|
||||||
&self,
|
&self,
|
||||||
outer: &Range,
|
outer: &Range,
|
||||||
inner: &Range,
|
inner: &Range,
|
||||||
) -> (&'a str, &'a str, &'a str) {
|
) -> (Cow<'_, str>, Cow<'_, str>, Cow<'_, str>) {
|
||||||
let rope = self.get_or_init_rope();
|
let rope = self.get_or_init_rope();
|
||||||
let outer_start = rope.line_to_char(outer.location.row() - 1) + outer.location.column();
|
let outer_start = rope.line_to_char(outer.location.row() - 1) + outer.location.column();
|
||||||
let outer_end =
|
let outer_end =
|
||||||
|
@ -49,9 +51,9 @@ impl<'a> SourceCodeLocator<'a> {
|
||||||
let inner_end =
|
let inner_end =
|
||||||
rope.line_to_char(inner.end_location.row() - 1) + inner.end_location.column();
|
rope.line_to_char(inner.end_location.row() - 1) + inner.end_location.column();
|
||||||
(
|
(
|
||||||
&self.contents[outer_start..inner_start],
|
Cow::from(rope.slice(outer_start..inner_start)),
|
||||||
&self.contents[inner_start..inner_end],
|
Cow::from(rope.slice(inner_start..inner_end)),
|
||||||
&self.contents[inner_end..outer_end],
|
Cow::from(rope.slice(inner_end..outer_end)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue