Rename Autofix to Fix (#7657)

**Summary** Mostly mechanical symbol rename and search-and-replace, with
small changes to the markdown docs to read better
This commit is contained in:
konsti 2023-09-28 12:53:05 +02:00 committed by GitHub
parent 8028de8956
commit 1e173f7909
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
231 changed files with 943 additions and 960 deletions

View file

@ -0,0 +1,166 @@
//! Interface for editing code snippets. These functions take statements or expressions as input,
//! and return the modified code snippet as output.
use anyhow::{bail, Result};
use libcst_native::{
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
};
use ruff_python_ast::Stmt;
use ruff_python_codegen::Stylist;
use ruff_source_file::Locator;
use crate::cst::helpers::compose_module_path;
use crate::cst::matchers::match_statement;
/// Glue code to make libcst codegen work with ruff's Stylist
pub(crate) trait CodegenStylist<'a>: Codegen<'a> {
fn codegen_stylist(&self, stylist: &'a Stylist) -> String;
}
impl<'a, T: Codegen<'a>> CodegenStylist<'a> for T {
fn codegen_stylist(&self, stylist: &'a Stylist) -> String {
let mut state = CodegenState {
default_newline: stylist.line_ending().as_str(),
default_indent: stylist.indentation(),
..Default::default()
};
self.codegen(&mut state);
state.to_string()
}
}
/// Given an import statement, remove any imports that are specified in the `imports` iterator.
///
/// Returns `Ok(None)` if the statement is empty after removing the imports.
pub(crate) fn remove_imports<'a>(
member_names: impl Iterator<Item = &'a str>,
stmt: &Stmt,
locator: &Locator,
stylist: &Stylist,
) -> Result<Option<String>> {
let module_text = locator.slice(stmt);
let mut tree = match_statement(module_text)?;
let Statement::Simple(body) = &mut tree else {
bail!("Expected Statement::Simple");
};
let aliases = match body.body.first_mut() {
Some(SmallStatement::Import(import_body)) => &mut import_body.names,
Some(SmallStatement::ImportFrom(import_body)) => {
if let ImportNames::Aliases(names) = &mut import_body.names {
names
} else if let ImportNames::Star(..) = &import_body.names {
// Special-case: if the import is a `from ... import *`, then we delete the
// entire statement.
let mut found_star = false;
for member in member_names {
if member == "*" {
found_star = true;
} else {
bail!("Expected \"*\" for unused import (got: \"{}\")", member);
}
}
if !found_star {
bail!("Expected \'*\' for unused import");
}
return Ok(None);
} else {
bail!("Expected: ImportNames::Aliases | ImportNames::Star");
}
}
_ => bail!("Expected: SmallStatement::ImportFrom | SmallStatement::Import"),
};
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
for member in member_names {
let alias_index = aliases
.iter()
.position(|alias| member == compose_module_path(&alias.name));
if let Some(index) = alias_index {
aliases.remove(index);
}
}
// But avoid destroying any trailing comments.
if let Some(alias) = aliases.last_mut() {
let has_comment = if let Some(comma) = &alias.comma {
match &comma.whitespace_after {
ParenthesizableWhitespace::SimpleWhitespace(_) => false,
ParenthesizableWhitespace::ParenthesizedWhitespace(whitespace) => {
whitespace.first_line.comment.is_some()
}
}
} else {
false
};
if !has_comment {
alias.comma = trailing_comma;
}
}
if aliases.is_empty() {
return Ok(None);
}
Ok(Some(tree.codegen_stylist(stylist)))
}
/// Given an import statement, remove any imports that are not specified in the `imports` slice.
///
/// Returns the modified import statement.
pub(crate) fn retain_imports(
member_names: &[&str],
stmt: &Stmt,
locator: &Locator,
stylist: &Stylist,
) -> Result<String> {
let module_text = locator.slice(stmt);
let mut tree = match_statement(module_text)?;
let Statement::Simple(body) = &mut tree else {
bail!("Expected Statement::Simple");
};
let aliases = match body.body.first_mut() {
Some(SmallStatement::Import(import_body)) => &mut import_body.names,
Some(SmallStatement::ImportFrom(import_body)) => {
if let ImportNames::Aliases(names) = &mut import_body.names {
names
} else {
bail!("Expected: ImportNames::Aliases");
}
}
_ => bail!("Expected: SmallStatement::ImportFrom | SmallStatement::Import"),
};
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
aliases.retain(|alias| {
member_names
.iter()
.any(|member| *member == compose_module_path(&alias.name))
});
// But avoid destroying any trailing comments.
if let Some(alias) = aliases.last_mut() {
let has_comment = if let Some(comma) = &alias.comma {
match &comma.whitespace_after {
ParenthesizableWhitespace::SimpleWhitespace(_) => false,
ParenthesizableWhitespace::ParenthesizedWhitespace(whitespace) => {
whitespace.first_line.comment.is_some()
}
}
} else {
false
};
if !has_comment {
alias.comma = trailing_comma;
}
}
Ok(tree.codegen_stylist(stylist))
}

View file

@ -0,0 +1,367 @@
//! Interface for generating fix edits from higher-level actions (e.g., "remove an argument").
use anyhow::{Context, Result};
use ruff_diagnostics::Edit;
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Stmt};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_trivia::{
has_leading_content, is_python_whitespace, PythonWhitespace, SimpleTokenKind, SimpleTokenizer,
};
use ruff_source_file::{Locator, NewlineWithTrailingNewline};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::fix::codemods;
/// Return the `Fix` to use when deleting a `Stmt`.
///
/// In some cases, this is as simple as deleting the `Range` of the `Stmt`
/// itself. However, there are a few exceptions:
/// - If the `Stmt` is _not_ the terminal statement in a multi-statement line,
/// we need to delete up to the start of the next statement (and avoid
/// deleting any content that precedes the statement).
/// - If the `Stmt` is the terminal statement in a multi-statement line, we need
/// to avoid deleting any content that precedes the statement.
/// - If the `Stmt` has no trailing and leading content, then it's convenient to
/// remove the entire start and end lines.
/// - If the `Stmt` is the last statement in its parent body, replace it with a
/// `pass` instead.
pub(crate) fn delete_stmt(
stmt: &Stmt,
parent: Option<&Stmt>,
locator: &Locator,
indexer: &Indexer,
) -> Edit {
if parent
.map(|parent| is_lone_child(stmt, parent))
.unwrap_or_default()
{
// If removing this node would lead to an invalid syntax tree, replace
// it with a `pass`.
Edit::range_replacement("pass".to_string(), stmt.range())
} else {
if let Some(semicolon) = trailing_semicolon(stmt.end(), locator) {
let next = next_stmt_break(semicolon, locator);
Edit::deletion(stmt.start(), next)
} else if has_leading_content(stmt.start(), locator) {
Edit::range_deletion(stmt.range())
} else if let Some(start) = indexer.preceded_by_continuations(stmt.start(), locator) {
Edit::deletion(start, stmt.end())
} else {
let range = locator.full_lines_range(stmt.range());
Edit::range_deletion(range)
}
}
}
/// Generate a `Fix` to remove the specified imports from an `import` statement.
pub(crate) fn remove_unused_imports<'a>(
member_names: impl Iterator<Item = &'a str>,
stmt: &Stmt,
parent: Option<&Stmt>,
locator: &Locator,
stylist: &Stylist,
indexer: &Indexer,
) -> Result<Edit> {
match codemods::remove_imports(member_names, stmt, locator, stylist)? {
None => Ok(delete_stmt(stmt, parent, locator, indexer)),
Some(content) => Ok(Edit::range_replacement(content, stmt.range())),
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum Parentheses {
/// Remove parentheses, if the removed argument is the only argument left.
Remove,
/// Preserve parentheses, even if the removed argument is the only argument
Preserve,
}
/// Generic function to remove arguments or keyword arguments in function
/// calls and class definitions. (For classes `args` should be considered
/// `bases`)
///
/// Supports the removal of parentheses when this is the only (kw)arg left.
/// For this behavior, set `remove_parentheses` to `true`.
pub(crate) fn remove_argument<T: Ranged>(
argument: &T,
arguments: &Arguments,
parentheses: Parentheses,
source: &str,
) -> Result<Edit> {
// Partition into arguments before and after the argument to remove.
let (before, after): (Vec<_>, Vec<_>) = arguments
.arguments_source_order()
.map(|arg| arg.range())
.filter(|range| argument.range() != *range)
.partition(|range| range.start() < argument.start());
if !after.is_empty() {
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
// argument to the end of the subsequent comma.
let mut tokenizer = SimpleTokenizer::starts_at(argument.end(), source);
// Find the trailing comma.
tokenizer
.find(|token| token.kind == SimpleTokenKind::Comma)
.context("Unable to find trailing comma")?;
// Find the next non-whitespace token.
let next = tokenizer
.find(|token| {
token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline
})
.context("Unable to find next token")?;
Ok(Edit::deletion(argument.start(), next.start()))
} else if let Some(previous) = before.iter().map(Ranged::end).max() {
// Case 2: argument or keyword is the last node, so delete from the start of the
// previous comma to the end of the argument.
let mut tokenizer = SimpleTokenizer::starts_at(previous, source);
// Find the trailing comma.
let comma = tokenizer
.find(|token| token.kind == SimpleTokenKind::Comma)
.context("Unable to find trailing comma")?;
Ok(Edit::deletion(comma.start(), argument.end()))
} else {
// Case 3: argument or keyword is the only node, so delete the arguments (but preserve
// parentheses, if needed).
Ok(match parentheses {
Parentheses::Remove => Edit::range_deletion(arguments.range()),
Parentheses::Preserve => Edit::range_replacement("()".to_string(), arguments.range()),
})
}
}
/// Determine if a vector contains only one, specific element.
fn is_only<T: PartialEq>(vec: &[T], value: &T) -> bool {
vec.len() == 1 && vec[0] == *value
}
/// Determine if a child is the only statement in its body.
fn is_lone_child(child: &Stmt, parent: &Stmt) -> bool {
match parent {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. })
| Stmt::ClassDef(ast::StmtClassDef { body, .. })
| Stmt::With(ast::StmtWith { body, .. }) => {
if is_only(body, child) {
return true;
}
}
Stmt::For(ast::StmtFor { body, orelse, .. })
| Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
if is_only(body, child) || is_only(orelse, child) {
return true;
}
}
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
if is_only(body, child)
|| elif_else_clauses
.iter()
.any(|ast::ElifElseClause { body, .. }| is_only(body, child))
{
return true;
}
}
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
if is_only(body, child)
|| is_only(orelse, child)
|| is_only(finalbody, child)
|| handlers.iter().any(|handler| match handler {
ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
body, ..
}) => is_only(body, child),
})
{
return true;
}
}
Stmt::Match(ast::StmtMatch { cases, .. }) => {
if cases.iter().any(|case| is_only(&case.body, child)) {
return true;
}
}
_ => {}
}
false
}
/// Return the location of a trailing semicolon following a `Stmt`, if it's part
/// of a multi-statement line.
fn trailing_semicolon(offset: TextSize, locator: &Locator) -> Option<TextSize> {
let contents = locator.after(offset);
for line in NewlineWithTrailingNewline::from(contents) {
let trimmed = line.trim_whitespace_start();
if trimmed.starts_with(';') {
let colon_offset = line.text_len() - trimmed.text_len();
return Some(offset + line.start() + colon_offset);
}
if !trimmed.starts_with('\\') {
break;
}
}
None
}
/// Find the next valid break for a `Stmt` after a semicolon.
fn next_stmt_break(semicolon: TextSize, locator: &Locator) -> TextSize {
let start_location = semicolon + TextSize::from(1);
for line in
NewlineWithTrailingNewline::with_offset(locator.after(start_location), start_location)
{
let trimmed = line.trim_whitespace();
// Skip past any continuations.
if trimmed.starts_with('\\') {
continue;
}
return if trimmed.is_empty() {
// If the line is empty, then despite the previous statement ending in a
// semicolon, we know that it's not a multi-statement line.
line.start()
} else {
// Otherwise, find the start of the next statement. (Or, anything that isn't
// whitespace.)
let relative_offset = line.find(|c: char| !is_python_whitespace(c)).unwrap();
line.start() + TextSize::try_from(relative_offset).unwrap()
};
}
locator.line_end(start_location)
}
/// Add leading whitespace to a snippet, if it's immediately preceded an identifier or keyword.
pub(crate) fn pad_start(mut content: String, start: TextSize, locator: &Locator) -> String {
// Ex) When converting `except(ValueError,)` from a tuple to a single argument, we need to
// insert a space before the fix, to achieve `except ValueError`.
if locator
.up_to(start)
.chars()
.last()
.is_some_and(|char| char.is_ascii_alphabetic())
{
content.insert(0, ' ');
}
content
}
/// Add trailing whitespace to a snippet, if it's immediately followed by an identifier or keyword.
pub(crate) fn pad_end(mut content: String, end: TextSize, locator: &Locator) -> String {
if locator
.after(end)
.chars()
.next()
.is_some_and(|char| char.is_ascii_alphabetic())
{
content.push(' ');
}
content
}
/// Add leading or trailing whitespace to a snippet, if it's immediately preceded or followed by
/// an identifier or keyword.
pub(crate) fn pad(content: String, range: TextRange, locator: &Locator) -> String {
pad_start(
pad_end(content, range.end(), locator),
range.start(),
locator,
)
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff_python_parser::parse_suite;
use ruff_source_file::Locator;
use ruff_text_size::{Ranged, TextSize};
use crate::fix::edits::{next_stmt_break, trailing_semicolon};
#[test]
fn find_semicolon() -> Result<()> {
let contents = "x = 1";
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(trailing_semicolon(stmt.end(), &locator), None);
let contents = "x = 1; y = 1";
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt.end(), &locator),
Some(TextSize::from(5))
);
let contents = "x = 1 ; y = 1";
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt.end(), &locator),
Some(TextSize::from(6))
);
let contents = r"
x = 1 \
; y = 1
"
.trim();
let program = parse_suite(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt.end(), &locator),
Some(TextSize::from(10))
);
Ok(())
}
#[test]
fn find_next_stmt_break() {
let contents = "x = 1; y = 1";
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(TextSize::from(4), &locator),
TextSize::from(5)
);
let contents = "x = 1 ; y = 1";
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(TextSize::from(5), &locator),
TextSize::from(6)
);
let contents = r"
x = 1 \
; y = 1
"
.trim();
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(TextSize::from(10), &locator),
TextSize::from(12)
);
}
}

View file

@ -0,0 +1,357 @@
use itertools::Itertools;
use std::collections::BTreeSet;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
use ruff_source_file::Locator;
use crate::linter::FixTable;
use crate::registry::{AsRule, Rule};
pub(crate) mod codemods;
pub(crate) mod edits;
pub(crate) mod snippet;
pub(crate) struct FixResult {
/// The resulting source code, after applying all fixes.
pub(crate) code: String,
/// The number of fixes applied for each [`Rule`].
pub(crate) fixes: FixTable,
/// Source map for the fixed source code.
pub(crate) source_map: SourceMap,
}
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub(crate) fn fix_file(diagnostics: &[Diagnostic], locator: &Locator) -> Option<FixResult> {
let mut with_fixes = diagnostics
.iter()
.filter(|diag| diag.fix.is_some())
.peekable();
if with_fixes.peek().is_none() {
None
} else {
Some(apply_fixes(with_fixes, locator))
}
}
/// Apply a series of fixes.
fn apply_fixes<'a>(
diagnostics: impl Iterator<Item = &'a Diagnostic>,
locator: &'a Locator<'a>,
) -> FixResult {
let mut output = String::with_capacity(locator.len());
let mut last_pos: Option<TextSize> = None;
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
let mut isolated: FxHashSet<u32> = FxHashSet::default();
let mut fixed = FxHashMap::default();
let mut source_map = SourceMap::default();
for (rule, fix) in diagnostics
.filter_map(|diagnostic| {
diagnostic
.fix
.as_ref()
.map(|fix| (diagnostic.kind.rule(), fix))
})
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
{
let mut edits = fix
.edits()
.iter()
.filter(|edit| !applied.contains(edit))
.peekable();
// If the fix contains at least one new edit, enforce isolation and positional requirements.
if let Some(first) = edits.peek() {
// If this fix requires isolation, and we've already applied another fix in the
// same isolation group, skip it.
if let IsolationLevel::Group(id) = fix.isolation() {
if !isolated.insert(id) {
continue;
}
}
// If this fix overlaps with a fix we've already applied, skip it.
if last_pos.is_some_and(|last_pos| last_pos >= first.start()) {
continue;
}
}
let mut applied_edits = Vec::with_capacity(fix.edits().len());
for edit in edits {
// Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice(TextRange::new(last_pos.unwrap_or_default(), edit.start()));
output.push_str(slice);
// Add the start source marker for the patch.
source_map.push_start_marker(edit, output.text_len());
// Add the patch itself.
output.push_str(edit.content().unwrap_or_default());
// Add the end source marker for the added patch.
source_map.push_end_marker(edit, output.text_len());
// Track that the edit was applied.
last_pos = Some(edit.end());
applied_edits.push(edit);
}
applied.extend(applied_edits.drain(..));
*fixed.entry(rule).or_default() += 1;
}
// Add the remaining content.
let slice = locator.after(last_pos.unwrap_or_default());
output.push_str(slice);
FixResult {
code: output,
fixes: fixed,
source_map,
}
}
/// Compare two fixes.
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
fix1.min_start()
.cmp(&fix2.min_start())
.then_with(|| match (&rule1, &rule2) {
// Apply `EndsInPeriod` fixes before `NewLineAfterLastParagraph` fixes.
(Rule::EndsInPeriod, Rule::NewLineAfterLastParagraph) => std::cmp::Ordering::Less,
(Rule::NewLineAfterLastParagraph, Rule::EndsInPeriod) => std::cmp::Ordering::Greater,
// Apply `IfElseBlockInsteadOfDictGet` fixes before `IfElseBlockInsteadOfIfExp` fixes.
(Rule::IfElseBlockInsteadOfDictGet, Rule::IfElseBlockInsteadOfIfExp) => {
std::cmp::Ordering::Less
}
(Rule::IfElseBlockInsteadOfIfExp, Rule::IfElseBlockInsteadOfDictGet) => {
std::cmp::Ordering::Greater
}
_ => std::cmp::Ordering::Equal,
})
}
#[cfg(test)]
mod tests {
use ruff_text_size::{Ranged, TextSize};
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
use ruff_source_file::Locator;
use crate::fix::{apply_fixes, FixResult};
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
#[allow(deprecated)]
fn create_diagnostics(edit: impl IntoIterator<Item = Edit>) -> Vec<Diagnostic> {
edit.into_iter()
.map(|edit| Diagnostic {
// The choice of rule here is arbitrary.
kind: MissingNewlineAtEndOfFile.into(),
range: edit.range(),
fix: Some(Fix::unspecified(edit)),
parent: None,
})
.collect()
}
#[test]
fn empty_file() {
let locator = Locator::new(r#""#);
let diagnostics = create_diagnostics([]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(code, "");
assert_eq!(fixes.values().sum::<usize>(), 0);
assert!(source_map.markers().is_empty());
}
#[test]
fn apply_one_insertion() {
let locator = Locator::new(
r#"
import os
print("hello world")
"#
.trim(),
);
let diagnostics = create_diagnostics([Edit::insertion(
"import sys\n".to_string(),
TextSize::new(10),
)]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(
code,
r#"
import os
import sys
print("hello world")
"#
.trim()
);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
SourceMarker::new(10.into(), 10.into(),),
SourceMarker::new(10.into(), 21.into(),),
]
);
}
#[test]
fn apply_one_replacement() {
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let diagnostics = create_diagnostics([Edit::replacement(
"Bar".to_string(),
TextSize::new(8),
TextSize::new(14),
)]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(
code,
r#"
class A(Bar):
...
"#
.trim(),
);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
SourceMarker::new(8.into(), 8.into(),),
SourceMarker::new(14.into(), 11.into(),),
]
);
}
#[test]
fn apply_one_removal() {
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let diagnostics = create_diagnostics([Edit::deletion(TextSize::new(7), TextSize::new(15))]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(
code,
r#"
class A:
...
"#
.trim()
);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
SourceMarker::new(7.into(), 7.into()),
SourceMarker::new(15.into(), 7.into()),
]
);
}
#[test]
fn apply_two_removals() {
let locator = Locator::new(
r#"
class A(object, object, object):
...
"#
.trim(),
);
let diagnostics = create_diagnostics([
Edit::deletion(TextSize::from(8), TextSize::from(16)),
Edit::deletion(TextSize::from(22), TextSize::from(30)),
]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(
code,
r#"
class A(object):
...
"#
.trim()
);
assert_eq!(fixes.values().sum::<usize>(), 2);
assert_eq!(
source_map.markers(),
&[
SourceMarker::new(8.into(), 8.into()),
SourceMarker::new(16.into(), 8.into()),
SourceMarker::new(22.into(), 14.into(),),
SourceMarker::new(30.into(), 14.into(),),
]
);
}
#[test]
fn ignore_overlapping_fixes() {
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let diagnostics = create_diagnostics([
Edit::deletion(TextSize::from(7), TextSize::from(15)),
Edit::replacement("ignored".to_string(), TextSize::from(9), TextSize::from(11)),
]);
let FixResult {
code,
fixes,
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(
code,
r#"
class A:
...
"#
.trim(),
);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
SourceMarker::new(7.into(), 7.into(),),
SourceMarker::new(15.into(), 7.into(),),
]
);
}
}

View file

@ -0,0 +1,40 @@
use unicode_width::UnicodeWidthStr;
/// A snippet of source code for user-facing display, as in a diagnostic.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct SourceCodeSnippet(String);
impl SourceCodeSnippet {
pub(crate) fn new(source_code: String) -> Self {
Self(source_code)
}
pub(crate) fn from_str(source_code: &str) -> Self {
Self(source_code.to_string())
}
/// Return the full snippet for user-facing display, or `None` if the snippet should be
/// truncated.
pub(crate) fn full_display(&self) -> Option<&str> {
if Self::should_truncate(&self.0) {
None
} else {
Some(&self.0)
}
}
/// Return a truncated snippet for user-facing display.
pub(crate) fn truncated_display(&self) -> &str {
if Self::should_truncate(&self.0) {
"..."
} else {
&self.0
}
}
/// Returns `true` if the source code should be truncated when included in a user-facing
/// diagnostic.
fn should_truncate(source_code: &str) -> bool {
source_code.width() > 50 || source_code.contains(['\r', '\n'])
}
}