mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 20:10:09 +00:00
Enable automatic noqa
insertion (#256)
This commit is contained in:
parent
e5b16973a9
commit
c0ce0b0c48
3 changed files with 210 additions and 14 deletions
|
@ -11,23 +11,22 @@ use crate::check_ast::check_ast;
|
||||||
use crate::check_lines::check_lines;
|
use crate::check_lines::check_lines;
|
||||||
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
|
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
|
||||||
use crate::message::Message;
|
use crate::message::Message;
|
||||||
|
use crate::noqa::add_noqa;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::{cache, fs, noqa};
|
use crate::{cache, fs, noqa};
|
||||||
|
|
||||||
fn check_path(
|
fn check_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
contents: &str,
|
contents: &str,
|
||||||
|
tokens: Vec<LexResult>,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
autofix: &fixer::Mode,
|
autofix: &fixer::Mode,
|
||||||
) -> Vec<Check> {
|
) -> Vec<Check> {
|
||||||
// Aggregate all checks.
|
// Aggregate all checks.
|
||||||
let mut checks: Vec<Check> = vec![];
|
let mut checks: Vec<Check> = vec![];
|
||||||
|
|
||||||
// Tokenize once.
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
|
||||||
|
|
||||||
// Determine the noqa line for every line in the source.
|
// Determine the noqa line for every line in the source.
|
||||||
let noqa_line_for = noqa::extract_noqa_line_for(&lxr);
|
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
||||||
|
|
||||||
// Run the AST-based checks.
|
// Run the AST-based checks.
|
||||||
if settings
|
if settings
|
||||||
|
@ -35,7 +34,7 @@ fn check_path(
|
||||||
.iter()
|
.iter()
|
||||||
.any(|check_code| matches!(check_code.lint_source(), LintSource::AST))
|
.any(|check_code| matches!(check_code.lint_source(), LintSource::AST))
|
||||||
{
|
{
|
||||||
match parser::parse_program_tokens(lxr, "<filename>") {
|
match parser::parse_program_tokens(tokens, "<filename>") {
|
||||||
Ok(python_ast) => {
|
Ok(python_ast) => {
|
||||||
checks.extend(check_ast(&python_ast, contents, settings, autofix, path))
|
checks.extend(check_ast(&python_ast, contents, settings, autofix, path))
|
||||||
}
|
}
|
||||||
|
@ -73,8 +72,11 @@ pub fn lint_path(
|
||||||
// Read the file from disk.
|
// Read the file from disk.
|
||||||
let contents = fs::read_file(path)?;
|
let contents = fs::read_file(path)?;
|
||||||
|
|
||||||
|
// Tokenize once.
|
||||||
|
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
let mut checks = check_path(path, &contents, settings, autofix);
|
let mut checks = check_path(path, &contents, tokens, settings, autofix);
|
||||||
|
|
||||||
// Apply autofix.
|
// Apply autofix.
|
||||||
if matches!(autofix, fixer::Mode::Apply) {
|
if matches!(autofix, fixer::Mode::Apply) {
|
||||||
|
@ -96,11 +98,29 @@ pub fn lint_path(
|
||||||
Ok(messages)
|
Ok(messages)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
|
||||||
|
// Read the file from disk.
|
||||||
|
let contents = fs::read_file(path)?;
|
||||||
|
|
||||||
|
// Tokenize once.
|
||||||
|
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
||||||
|
|
||||||
|
// Determine the noqa line for every line in the source.
|
||||||
|
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
||||||
|
|
||||||
|
// Generate checks.
|
||||||
|
let checks = check_path(path, &contents, tokens, settings, &fixer::Mode::None);
|
||||||
|
|
||||||
|
add_noqa(&checks, &contents, &noqa_line_for, path)
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use rustpython_parser::lexer;
|
||||||
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
use crate::autofix::fixer;
|
use crate::autofix::fixer;
|
||||||
use crate::checks::{Check, CheckCode};
|
use crate::checks::{Check, CheckCode};
|
||||||
|
@ -114,7 +134,10 @@ mod tests {
|
||||||
autofix: &fixer::Mode,
|
autofix: &fixer::Mode,
|
||||||
) -> Result<Vec<Check>> {
|
) -> Result<Vec<Check>> {
|
||||||
let contents = fs::read_file(path)?;
|
let contents = fs::read_file(path)?;
|
||||||
Ok(linter::check_path(path, &contents, settings, autofix))
|
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
||||||
|
Ok(linter::check_path(
|
||||||
|
path, &contents, tokens, settings, autofix,
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
42
src/main.rs
42
src/main.rs
|
@ -25,6 +25,7 @@ use ::ruff::printer::{Printer, SerializationFormat};
|
||||||
use ::ruff::pyproject;
|
use ::ruff::pyproject;
|
||||||
use ::ruff::settings::{FilePattern, Settings};
|
use ::ruff::settings::{FilePattern, Settings};
|
||||||
use ::ruff::tell_user;
|
use ::ruff::tell_user;
|
||||||
|
use ruff::linter::add_noqa_to_path;
|
||||||
|
|
||||||
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
|
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
|
||||||
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||||
|
@ -81,6 +82,9 @@ struct Cli {
|
||||||
/// See ruff's settings.
|
/// See ruff's settings.
|
||||||
#[clap(long, action)]
|
#[clap(long, action)]
|
||||||
show_settings: bool,
|
show_settings: bool,
|
||||||
|
/// Enable automatic additions of noqa directives to failing lines.
|
||||||
|
#[clap(long, action)]
|
||||||
|
add_noqa: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "update-informer")]
|
#[cfg(feature = "update-informer")]
|
||||||
|
@ -183,6 +187,35 @@ fn run_once(
|
||||||
Ok(messages)
|
Ok(messages)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn add_noqa(files: &[PathBuf], settings: &Settings) -> Result<usize> {
|
||||||
|
// Collect all the files to check.
|
||||||
|
let start = Instant::now();
|
||||||
|
let paths: Vec<Result<DirEntry, walkdir::Error>> = files
|
||||||
|
.iter()
|
||||||
|
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
|
||||||
|
.collect();
|
||||||
|
let duration = start.elapsed();
|
||||||
|
debug!("Identified files to lint in: {:?}", duration);
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
|
let modifications: usize = paths
|
||||||
|
.par_iter()
|
||||||
|
.map(|entry| match entry {
|
||||||
|
Ok(entry) => {
|
||||||
|
let path = entry.path();
|
||||||
|
add_noqa_to_path(path, settings)
|
||||||
|
}
|
||||||
|
Err(_) => Ok(0),
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
let duration = start.elapsed();
|
||||||
|
debug!("Added noqa to files in: {:?}", duration);
|
||||||
|
|
||||||
|
Ok(modifications)
|
||||||
|
}
|
||||||
|
|
||||||
fn inner_main() -> Result<ExitCode> {
|
fn inner_main() -> Result<ExitCode> {
|
||||||
let cli = Cli::parse();
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
@ -254,6 +287,10 @@ fn inner_main() -> Result<ExitCode> {
|
||||||
println!("Warning: --fix is not enabled in watch mode.");
|
println!("Warning: --fix is not enabled in watch mode.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if cli.add_noqa {
|
||||||
|
println!("Warning: --no-qa is not enabled in watch mode.");
|
||||||
|
}
|
||||||
|
|
||||||
if cli.format != SerializationFormat::Text {
|
if cli.format != SerializationFormat::Text {
|
||||||
println!("Warning: --format 'text' is used in watch mode.");
|
println!("Warning: --format 'text' is used in watch mode.");
|
||||||
}
|
}
|
||||||
|
@ -292,6 +329,11 @@ fn inner_main() -> Result<ExitCode> {
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if cli.add_noqa {
|
||||||
|
let modifications = add_noqa(&cli.files, &settings)?;
|
||||||
|
if modifications > 0 {
|
||||||
|
println!("Added {modifications} noqa directives.");
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
let messages = run_once(&cli.files, &settings, !cli.no_cache, cli.fix)?;
|
let messages = run_once(&cli.files, &settings, !cli.no_cache, cli.fix)?;
|
||||||
if !cli.quiet {
|
if !cli.quiet {
|
||||||
|
|
145
src/noqa.rs
145
src/noqa.rs
|
@ -1,8 +1,13 @@
|
||||||
use std::cmp::{max, min};
|
use std::cmp::{max, min};
|
||||||
|
|
||||||
|
use crate::checks::{Check, CheckCode};
|
||||||
|
use anyhow::Result;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustpython_parser::lexer::{LexResult, Tok};
|
use rustpython_parser::lexer::{LexResult, Tok};
|
||||||
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||||
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
|
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
|
||||||
|
@ -38,7 +43,7 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
let mut line_map: Vec<usize> = vec![];
|
let mut noqa_line_for: Vec<usize> = vec![];
|
||||||
|
|
||||||
let mut last_is_string = false;
|
let mut last_is_string = false;
|
||||||
let mut last_seen = usize::MIN;
|
let mut last_seen = usize::MIN;
|
||||||
|
@ -56,10 +61,10 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
|
|
||||||
// For now, we only care about preserving noqa directives across multi-line strings.
|
// For now, we only care about preserving noqa directives across multi-line strings.
|
||||||
if last_is_string {
|
if last_is_string {
|
||||||
line_map.extend(vec![max_line; (max_line + 1) - min_line]);
|
noqa_line_for.extend(vec![max_line; (max_line + 1) - min_line]);
|
||||||
} else {
|
} else {
|
||||||
for i in (min_line - 1)..(max_line) {
|
for i in (min_line - 1)..(max_line) {
|
||||||
line_map.push(i + 1);
|
noqa_line_for.push(i + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +74,7 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
// Handle empty lines.
|
// Handle empty lines.
|
||||||
if start.row() > last_seen {
|
if start.row() > last_seen {
|
||||||
for i in last_seen..(start.row() - 1) {
|
for i in last_seen..(start.row() - 1) {
|
||||||
line_map.push(i + 1);
|
noqa_line_for.push(i + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,19 +85,91 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
last_is_string = matches!(tok, Tok::String { .. });
|
last_is_string = matches!(tok, Tok::String { .. });
|
||||||
}
|
}
|
||||||
|
|
||||||
line_map
|
noqa_line_for
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_noqa_inner(
|
||||||
|
checks: &Vec<Check>,
|
||||||
|
contents: &str,
|
||||||
|
noqa_line_for: &[usize],
|
||||||
|
) -> Result<(usize, String)> {
|
||||||
|
let lines: Vec<&str> = contents.lines().collect();
|
||||||
|
let mut matches_by_line: BTreeMap<usize, BTreeSet<&CheckCode>> = BTreeMap::new();
|
||||||
|
for lineno in 0..lines.len() {
|
||||||
|
let mut codes: BTreeSet<&CheckCode> = BTreeSet::new();
|
||||||
|
for check in checks {
|
||||||
|
if check.location.row() == lineno + 1 {
|
||||||
|
codes.insert(check.kind.code());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Grab the noqa (logical) line number for the current (physical) line.
|
||||||
|
// If there are newlines at the end of the file, they won't be represented in
|
||||||
|
// `noqa_line_for`, so fallback to the current line.
|
||||||
|
let noqa_lineno = noqa_line_for
|
||||||
|
.get(lineno)
|
||||||
|
.map(|lineno| lineno - 1)
|
||||||
|
.unwrap_or(lineno);
|
||||||
|
|
||||||
|
if !codes.is_empty() {
|
||||||
|
let matches = matches_by_line
|
||||||
|
.entry(noqa_lineno)
|
||||||
|
.or_insert_with(BTreeSet::new);
|
||||||
|
matches.append(&mut codes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut count: usize = 0;
|
||||||
|
let mut output = "".to_string();
|
||||||
|
for (lineno, line) in lines.iter().enumerate() {
|
||||||
|
match matches_by_line.get(&lineno) {
|
||||||
|
None => {
|
||||||
|
output.push_str(line);
|
||||||
|
output.push('\n');
|
||||||
|
}
|
||||||
|
Some(codes) => {
|
||||||
|
match extract_noqa_directive(line) {
|
||||||
|
Directive::None => {
|
||||||
|
output.push_str(line);
|
||||||
|
}
|
||||||
|
Directive::All(start) => output.push_str(&line[..start]),
|
||||||
|
Directive::Codes(start, _) => output.push_str(&line[..start]),
|
||||||
|
};
|
||||||
|
let codes: Vec<&str> = codes.iter().map(|code| code.as_str()).collect();
|
||||||
|
output.push_str(" # noqa: ");
|
||||||
|
output.push_str(&codes.join(", "));
|
||||||
|
output.push('\n');
|
||||||
|
count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((count, output))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_noqa(
|
||||||
|
checks: &Vec<Check>,
|
||||||
|
contents: &str,
|
||||||
|
noqa_line_for: &[usize],
|
||||||
|
path: &Path,
|
||||||
|
) -> Result<usize> {
|
||||||
|
let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?;
|
||||||
|
fs::write(path, output)?;
|
||||||
|
Ok(count)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use crate::checks::{Check, CheckKind};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use rustpython_parser::ast::Location;
|
||||||
use rustpython_parser::lexer;
|
use rustpython_parser::lexer;
|
||||||
use rustpython_parser::lexer::LexResult;
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
use crate::noqa::extract_noqa_line_for;
|
use crate::noqa::{add_noqa_inner, extract_noqa_line_for};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn line_map() -> Result<()> {
|
fn extraction() -> Result<()> {
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = 1
|
"x = 1
|
||||||
y = 2
|
y = 2
|
||||||
|
@ -146,4 +223,58 @@ z = x + 1",
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn modification() -> Result<()> {
|
||||||
|
let checks = vec![];
|
||||||
|
let contents = "x = 1";
|
||||||
|
let noqa_line_for = vec![1];
|
||||||
|
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
|
||||||
|
assert_eq!(count, 0);
|
||||||
|
assert_eq!(output.trim(), contents.trim());
|
||||||
|
|
||||||
|
let checks = vec![Check::new(
|
||||||
|
CheckKind::UnusedVariable("x".to_string()),
|
||||||
|
Location::new(1, 1),
|
||||||
|
)];
|
||||||
|
let contents = "x = 1";
|
||||||
|
let noqa_line_for = vec![1];
|
||||||
|
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
assert_eq!(output.trim(), "x = 1 # noqa: F841".trim());
|
||||||
|
|
||||||
|
let checks = vec![
|
||||||
|
Check::new(
|
||||||
|
CheckKind::AmbiguousVariableName("x".to_string()),
|
||||||
|
Location::new(1, 1),
|
||||||
|
),
|
||||||
|
Check::new(
|
||||||
|
CheckKind::UnusedVariable("x".to_string()),
|
||||||
|
Location::new(1, 1),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
let contents = "x = 1 # noqa: E741";
|
||||||
|
let noqa_line_for = vec![1];
|
||||||
|
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
|
||||||
|
|
||||||
|
let checks = vec![
|
||||||
|
Check::new(
|
||||||
|
CheckKind::AmbiguousVariableName("x".to_string()),
|
||||||
|
Location::new(1, 1),
|
||||||
|
),
|
||||||
|
Check::new(
|
||||||
|
CheckKind::UnusedVariable("x".to_string()),
|
||||||
|
Location::new(1, 1),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
let contents = "x = 1 # noqa";
|
||||||
|
let noqa_line_for = vec![1];
|
||||||
|
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
|
||||||
|
assert_eq!(count, 1);
|
||||||
|
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue