Use a shared Rope between AST checker and fixer (#585)

This commit is contained in:
Charlie Marsh 2022-11-04 12:05:46 -04:00 committed by GitHub
parent 2be632f3cc
commit f0239de559
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 86 additions and 28 deletions

View file

@ -40,21 +40,25 @@ impl From<bool> for Mode {
}
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub fn fix_file<'a>(checks: &mut [Check], contents: &'a str) -> Option<Cow<'a, str>> {
pub fn fix_file<'a>(
checks: &'a mut [Check],
locator: &'a SourceCodeLocator<'a>,
) -> Option<Cow<'a, str>> {
if checks.iter().all(|check| check.fix.is_none()) {
return None;
}
Some(apply_fixes(
checks.iter_mut().filter_map(|check| check.fix.as_mut()),
contents,
locator,
))
}
/// Apply a series of fixes.
fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) -> Cow<'_, str> {
let locator = SourceCodeLocator::new(contents);
fn apply_fixes<'a>(
fixes: impl Iterator<Item = &'a mut Fix>,
locator: &'a SourceCodeLocator<'a>,
) -> Cow<'a, str> {
let mut output = RopeBuilder::new();
let mut last_pos: Location = Location::new(1, 0);
let mut applied: BTreeSet<&Patch> = BTreeSet::new();
@ -103,11 +107,13 @@ mod tests {
use crate::autofix::fixer::apply_fixes;
use crate::autofix::{Fix, Patch};
use crate::SourceCodeLocator;
#[test]
fn empty_file() -> Result<()> {
let mut fixes = vec![];
let actual = apply_fixes(fixes.iter_mut(), "");
let locator = SourceCodeLocator::new("");
let actual = apply_fixes(fixes.iter_mut(), &locator);
let expected = "";
assert_eq!(actual, expected);
@ -125,12 +131,12 @@ mod tests {
},
applied: false,
}];
let actual = apply_fixes(
fixes.iter_mut(),
let locator = SourceCodeLocator::new(
"class A(object):
...
",
);
let actual = apply_fixes(fixes.iter_mut(), &locator);
let expected = "class A(Bar):
...
@ -151,12 +157,12 @@ mod tests {
},
applied: false,
}];
let actual = apply_fixes(
fixes.iter_mut(),
let locator = SourceCodeLocator::new(
"class A(object):
...
",
);
let actual = apply_fixes(fixes.iter_mut(), &locator);
let expected = "class A:
...
@ -187,12 +193,12 @@ mod tests {
applied: false,
},
];
let actual = apply_fixes(
fixes.iter_mut(),
let locator = SourceCodeLocator::new(
"class A(object, object):
...
",
);
let actual = apply_fixes(fixes.iter_mut(), &locator);
let expected = "class A:
...
@ -223,12 +229,12 @@ mod tests {
applied: false,
},
];
let actual = apply_fixes(
fixes.iter_mut(),
let locator = SourceCodeLocator::new(
"class A(object):
...
",
);
let actual = apply_fixes(fixes.iter_mut(), &locator);
let expected = "class A:
...

View file

@ -148,13 +148,22 @@ mod tests {
use crate::checks::{Check, CheckCode};
use crate::flake8_quotes::settings::Quote;
use crate::linter::tokenize;
use crate::{flake8_quotes, fs, linter, noqa, Settings};
use crate::{flake8_quotes, fs, linter, noqa, Settings, SourceCodeLocator};
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;
let tokens: Vec<LexResult> = tokenize(&contents);
let locator = SourceCodeLocator::new(&contents);
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
linter::check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)
linter::check_path(
path,
&contents,
tokens,
&locator,
&noqa_line_for,
settings,
autofix,
)
}
#[test_case(Path::new("doubles.py"))]

View file

@ -11,6 +11,7 @@ use crate::autofix::fixer::Mode;
use crate::checks::Check;
use crate::linter::{check_path, tokenize};
use crate::settings::configuration::Configuration;
use crate::source_code_locator::SourceCodeLocator;
mod ast;
pub mod autofix;
@ -65,6 +66,9 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(contents);
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
@ -73,6 +77,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
path,
contents,
tokens,
&locator,
&noqa_line_for,
&settings,
&if autofix { Mode::Generate } else { Mode::None },

View file

@ -54,6 +54,7 @@ pub(crate) fn check_path(
path: &Path,
contents: &str,
tokens: Vec<LexResult>,
locator: &SourceCodeLocator,
noqa_line_for: &[usize],
settings: &Settings,
autofix: &fixer::Mode,
@ -61,16 +62,13 @@ pub(crate) fn check_path(
// Aggregate all checks.
let mut checks: Vec<Check> = vec![];
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(contents);
// Run the token-based checks.
if settings
.enabled
.iter()
.any(|check_code| matches!(check_code.lint_source(), LintSource::Tokens))
{
check_tokens(&mut checks, &locator, &tokens, settings);
check_tokens(&mut checks, locator, &tokens, settings);
}
// Run the AST-based checks.
@ -81,7 +79,7 @@ pub(crate) fn check_path(
{
match parse_program_tokens(tokens, "<filename>") {
Ok(python_ast) => {
checks.extend(check_ast(&python_ast, &locator, settings, autofix, path))
checks.extend(check_ast(&python_ast, locator, settings, autofix, path))
}
Err(parse_error) => {
if settings.enabled.contains(&CheckCode::E999) {
@ -123,15 +121,26 @@ pub fn lint_stdin(
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(stdin);
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(stdin);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Generate checks.
let mut checks = check_path(path, stdin, tokens, &noqa_line_for, settings, autofix)?;
let mut checks = check_path(
path,
stdin,
tokens,
&locator,
&noqa_line_for,
settings,
autofix,
)?;
// Apply autofix, write results to stdout.
if matches!(autofix, fixer::Mode::Apply) {
match fix_file(&mut checks, stdin) {
match fix_file(&mut checks, &locator) {
None => io::stdout().write_all(stdin.as_bytes()),
Some(contents) => io::stdout().write_all(contents.as_bytes()),
}?;
@ -166,15 +175,26 @@ pub fn lint_path(
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(&contents);
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(&contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Generate checks.
let mut checks = check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)?;
let mut checks = check_path(
path,
&contents,
tokens,
&locator,
&noqa_line_for,
settings,
autofix,
)?;
// Apply autofix.
if matches!(autofix, fixer::Mode::Apply) {
if let Some(fixed_contents) = fix_file(&mut checks, &contents) {
if let Some(fixed_contents) = fix_file(&mut checks, &locator) {
write(path, fixed_contents.as_ref())?;
}
};
@ -197,6 +217,9 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(&contents);
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(&contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
@ -205,6 +228,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
path,
&contents,
tokens,
&locator,
&noqa_line_for,
settings,
&fixer::Mode::None,
@ -242,13 +266,27 @@ mod tests {
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::linter::tokenize;
use crate::{fs, linter, noqa, settings, Settings};
use crate::source_code_locator::SourceCodeLocator;
use crate::{fs, linter, noqa, settings};
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
fn check_path(
path: &Path,
settings: &settings::Settings,
autofix: &fixer::Mode,
) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;
let tokens: Vec<LexResult> = tokenize(&contents);
let locator = SourceCodeLocator::new(&contents);
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
linter::check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)
linter::check_path(
path,
&contents,
tokens,
&locator,
&noqa_line_for,
settings,
autofix,
)
}
#[test_case(CheckCode::A001, Path::new("A001.py"); "A001")]