From befe64a10e93ef1b25a358c44e696d7c0233a31a Mon Sep 17 00:00:00 2001 From: Charlie Marsh Date: Fri, 11 Nov 2022 12:38:01 -0500 Subject: [PATCH] Support `isort: skip`, `isort: on`, and `isort: off` (#678) --- Cargo.lock | 8 + Cargo.toml | 2 + resources/test/fixtures/isort/skip.py | 10 + src/check_ast.rs | 9 - src/check_imports.rs | 4 +- src/check_lines.rs | 16 +- src/directives.rs | 206 ++++++++++++++++++ src/isort/mod.rs | 15 +- .../ruff__isort__tests__skip.py.snap | 22 ++ src/isort/track.rs | 15 +- src/lib.rs | 11 +- src/linter.rs | 61 ++++-- src/noqa.rs | 133 ++--------- src/source_code_locator.rs | 2 +- 14 files changed, 349 insertions(+), 165 deletions(-) create mode 100644 resources/test/fixtures/isort/skip.py create mode 100644 src/directives.rs create mode 100644 src/isort/snapshots/ruff__isort__tests__skip.py.snap diff --git a/Cargo.lock b/Cargo.lock index 1d981ab599..86851d007c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1622,6 +1622,12 @@ dependencies = [ "libc", ] +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + [[package]] name = "nom" version = "5.1.2" @@ -2239,6 +2245,7 @@ dependencies = [ "anyhow", "assert_cmd", "bincode", + "bitflags", "cacache", "chrono", "clap 4.0.22", @@ -2255,6 +2262,7 @@ dependencies = [ "itertools", "libcst", "log", + "nohash-hasher", "notify", "num-bigint", "once_cell", diff --git a/Cargo.toml b/Cargo.toml index 325394fd58..81e51e3c12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,6 +15,7 @@ name = "ruff" [dependencies] anyhow = { version = "1.0.66" } bincode = { version = "1.3.3" } +bitflags = { version = "1.3.2" } chrono = { version = "0.4.21" } clap = { version = "4.0.1", features = ["derive"] } colored = { version = "2.0.0" } @@ -26,6 +27,7 @@ glob = { version = "0.3.0" } itertools = { version = "0.10.5" } libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "a13ec97dd4eb925bde4d426c6e422582793b260c" } log = { version = "0.4.17" } +nohash-hasher = { version = "0.2.0" } notify = { version = "4.0.17" } num-bigint = { version = "0.4.3" } once_cell = { version = "1.16.0" } diff --git a/resources/test/fixtures/isort/skip.py b/resources/test/fixtures/isort/skip.py new file mode 100644 index 0000000000..02332bf21d --- /dev/null +++ b/resources/test/fixtures/isort/skip.py @@ -0,0 +1,10 @@ +# isort: off +import sys +import os +import collections +# isort: on + +import sys +import os # isort: skip +import collections +import abc diff --git a/src/check_ast.rs b/src/check_ast.rs index 78070b809f..81f3b2da6a 100644 --- a/src/check_ast.rs +++ b/src/check_ast.rs @@ -23,7 +23,6 @@ use crate::ast::{helpers, operations, visitor}; use crate::autofix::fixer; use crate::checks::{Check, CheckCode, CheckKind}; use crate::docstrings::definition::{Definition, DefinitionKind, Documentable}; -use crate::isort::track::ImportTracker; use crate::python::builtins::{BUILTINS, MAGIC_GLOBALS}; use crate::python::future::ALL_FEATURE_NAMES; use crate::python::typing; @@ -78,7 +77,6 @@ pub struct Checker<'a> { deferred_functions: Vec<(&'a Stmt, Vec, Vec, VisibleScope)>, deferred_lambdas: Vec<(&'a Expr, Vec, Vec)>, deferred_assignments: Vec, - import_tracker: ImportTracker<'a>, // Internal, derivative state. visible_scope: VisibleScope, in_f_string: Option, @@ -117,7 +115,6 @@ impl<'a> Checker<'a> { deferred_functions: Default::default(), deferred_lambdas: Default::default(), deferred_assignments: Default::default(), - import_tracker: ImportTracker::new(), // Internal, derivative state. visible_scope: VisibleScope { modifier: Modifier::Module, @@ -185,9 +182,6 @@ where 'b: 'a, { fn visit_stmt(&mut self, stmt: &'b Stmt) { - // Call-through to any composed visitors. - self.import_tracker.visit_stmt(stmt); - self.push_parent(stmt); // Track whether we've seen docstrings, non-imports, etc. @@ -1674,9 +1668,6 @@ where } fn visit_excepthandler(&mut self, excepthandler: &'b Excepthandler) { - // Call-through to any composed visitors. - self.import_tracker.visit_excepthandler(excepthandler); - match &excepthandler.node { ExcepthandlerKind::ExceptHandler { type_, name, .. } => { if self.settings.enabled.contains(&CheckCode::E722) && type_.is_none() { diff --git a/src/check_imports.rs b/src/check_imports.rs index f01b5b163f..4375d597d2 100644 --- a/src/check_imports.rs +++ b/src/check_imports.rs @@ -1,5 +1,6 @@ //! Lint rules based on import analysis. +use nohash_hasher::IntSet; use rustpython_parser::ast::Suite; use crate::ast::visitor::Visitor; @@ -30,10 +31,11 @@ fn check_import_blocks( pub fn check_imports( python_ast: &Suite, locator: &SourceCodeLocator, + exclusions: &IntSet, settings: &Settings, autofix: &fixer::Mode, ) -> Vec { - let mut tracker = ImportTracker::new(); + let mut tracker = ImportTracker::new(exclusions); for stmt in python_ast { tracker.visit_stmt(stmt); } diff --git a/src/check_lines.rs b/src/check_lines.rs index 0e4a67ce4f..2672919534 100644 --- a/src/check_lines.rs +++ b/src/check_lines.rs @@ -1,7 +1,6 @@ //! Lint rules based on checking raw physical lines. -use std::collections::BTreeMap; - +use nohash_hasher::IntMap; use once_cell::sync::Lazy; use regex::Regex; use rustpython_parser::ast::Location; @@ -36,7 +35,7 @@ fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool { pub fn check_lines( checks: &mut Vec, contents: &str, - noqa_line_for: &[usize], + noqa_line_for: &IntMap, settings: &Settings, autofix: &fixer::Mode, ) { @@ -44,8 +43,7 @@ pub fn check_lines( let enforce_line_too_long = settings.enabled.contains(&CheckCode::E501); let enforce_noqa = settings.enabled.contains(&CheckCode::M001); - let mut noqa_directives: BTreeMap)> = BTreeMap::new(); - + let mut noqa_directives: IntMap)> = IntMap::default(); let mut line_checks = vec![]; let mut ignored = vec![]; @@ -55,7 +53,7 @@ pub fn check_lines( // If there are newlines at the end of the file, they won't be represented in // `noqa_line_for`, so fallback to the current line. let noqa_lineno = noqa_line_for - .get(lineno) + .get(&lineno) .map(|lineno| lineno - 1) .unwrap_or(lineno); @@ -153,7 +151,7 @@ pub fn check_lines( if let Some(line) = lines.last() { let lineno = lines.len() - 1; let noqa_lineno = noqa_line_for - .get(lineno) + .get(&lineno) .map(|lineno| lineno - 1) .unwrap_or(lineno); @@ -257,6 +255,8 @@ pub fn check_lines( #[cfg(test)] mod tests { + use nohash_hasher::IntMap; + use super::check_lines; use crate::autofix::fixer; use crate::checks::{Check, CheckCode}; @@ -265,7 +265,7 @@ mod tests { #[test] fn e501_non_ascii_char() { let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8. - let noqa_line_for: Vec = vec![1]; + let noqa_line_for: IntMap = Default::default(); let check_with_max_line_length = |line_length: usize| { let mut checks: Vec = vec![]; check_lines( diff --git a/src/directives.rs b/src/directives.rs new file mode 100644 index 0000000000..4c0a541c01 --- /dev/null +++ b/src/directives.rs @@ -0,0 +1,206 @@ +//! Extract `# noqa` and `# isort: skip` directives from tokenized source. + +use bitflags::bitflags; +use nohash_hasher::{IntMap, IntSet}; +use rustpython_ast::Location; +use rustpython_parser::lexer::{LexResult, Tok}; + +use crate::ast::types::Range; +use crate::checks::LintSource; +use crate::{Settings, SourceCodeLocator}; + +bitflags! { + pub struct Flags: u32 { + const NOQA = 0b00000001; + const ISORT = 0b00000010; + } +} + +impl Flags { + pub fn from_settings(settings: &Settings) -> Self { + if settings + .enabled + .iter() + .any(|check_code| matches!(check_code.lint_source(), LintSource::Imports)) + { + Flags::NOQA | Flags::ISORT + } else { + Flags::NOQA + } + } +} + +pub struct Directives { + pub noqa_line_for: IntMap, + pub isort_exclusions: IntSet, +} + +pub fn extract_directives( + lxr: &[LexResult], + locator: &SourceCodeLocator, + flags: &Flags, +) -> Directives { + Directives { + noqa_line_for: if flags.contains(Flags::NOQA) { + extract_noqa_line_for(lxr) + } else { + Default::default() + }, + isort_exclusions: if flags.contains(Flags::ISORT) { + extract_isort_exclusions(lxr, locator) + } else { + Default::default() + }, + } +} + +/// Extract a mapping from logical line to noqa line. +pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap { + let mut noqa_line_for: IntMap = IntMap::default(); + for (start, tok, end) in lxr.iter().flatten() { + if matches!(tok, Tok::EndOfFile) { + break; + } + // For multi-line strings, we expect `noqa` directives on the last line of the + // string. + if matches!(tok, Tok::String { .. }) && end.row() > start.row() { + for i in start.row()..end.row() { + noqa_line_for.insert(i, end.row()); + } + } + } + noqa_line_for +} + +/// Extract a set of lines over which to disable isort. +pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet { + let mut exclusions: IntSet = IntSet::default(); + let mut off: Option<&Location> = None; + for (start, tok, end) in lxr.iter().flatten() { + // TODO(charlie): Modify RustPython to include the comment text in the token. + if matches!(tok, Tok::Comment) { + let comment_text = locator.slice_source_code_range(&Range { + location: *start, + end_location: *end, + }); + if off.is_some() { + if comment_text == "# isort: on" { + if let Some(start) = off { + for row in start.row() + 1..=end.row() { + exclusions.insert(row); + } + } + off = None; + } + } else { + if comment_text.contains("isort: skip") || comment_text.contains("isort:skip") { + exclusions.insert(start.row()); + } else if comment_text == "# isort: off" { + off = Some(start); + } + } + } else if matches!(tok, Tok::EndOfFile) { + if let Some(start) = off { + for row in start.row() + 1..=end.row() { + exclusions.insert(row); + } + } + break; + } + } + exclusions +} + +#[cfg(test)] +mod tests { + use anyhow::Result; + use nohash_hasher::IntMap; + use rustpython_parser::lexer; + use rustpython_parser::lexer::LexResult; + + use crate::directives::extract_noqa_line_for; + + #[test] + fn extraction() -> Result<()> { + let empty: IntMap = Default::default(); + + let lxr: Vec = lexer::make_tokenizer( + "x = 1 +y = 2 +z = x + 1", + ) + .collect(); + assert_eq!(extract_noqa_line_for(&lxr), empty); + + let lxr: Vec = lexer::make_tokenizer( + " +x = 1 +y = 2 +z = x + 1", + ) + .collect(); + assert_eq!(extract_noqa_line_for(&lxr), empty); + + let lxr: Vec = lexer::make_tokenizer( + "x = 1 +y = 2 +z = x + 1 + ", + ) + .collect(); + assert_eq!(extract_noqa_line_for(&lxr), empty); + + let lxr: Vec = lexer::make_tokenizer( + "x = 1 + +y = 2 +z = x + 1 + ", + ) + .collect(); + assert_eq!(extract_noqa_line_for(&lxr), empty); + + let lxr: Vec = lexer::make_tokenizer( + "x = '''abc +def +ghi +''' +y = 2 +z = x + 1", + ) + .collect(); + assert_eq!( + extract_noqa_line_for(&lxr), + IntMap::from_iter([(1, 4), (2, 4), (3, 4)]) + ); + + let lxr: Vec = lexer::make_tokenizer( + "x = 1 + y = '''abc + def + ghi + ''' + z = 2", + ) + .collect(); + assert_eq!( + extract_noqa_line_for(&lxr), + IntMap::from_iter([(2, 5), (3, 5), (4, 5)]) + ); + + let lxr: Vec = lexer::make_tokenizer( + "x = 1 + y = '''abc + def + ghi + '''", + ) + .collect(); + assert_eq!( + extract_noqa_line_for(&lxr), + IntMap::from_iter([(2, 5), (3, 5), (4, 5)]) + ); + + Ok(()) + } +} diff --git a/src/isort/mod.rs b/src/isort/mod.rs index 4d5bb38b49..efeb733a7f 100644 --- a/src/isort/mod.rs +++ b/src/isort/mod.rs @@ -208,17 +208,18 @@ mod tests { use crate::linter::test_path; use crate::Settings; - #[test_case(Path::new("reorder_within_section.py"))] - #[test_case(Path::new("no_reorder_within_section.py"))] - #[test_case(Path::new("separate_future_imports.py"))] - #[test_case(Path::new("separate_third_party_imports.py"))] - #[test_case(Path::new("separate_first_party_imports.py"))] - #[test_case(Path::new("deduplicate_imports.py"))] #[test_case(Path::new("combine_import_froms.py"))] - #[test_case(Path::new("preserve_indentation.py"))] + #[test_case(Path::new("deduplicate_imports.py"))] #[test_case(Path::new("fit_line_length.py"))] #[test_case(Path::new("import_from_after_import.py"))] #[test_case(Path::new("leading_prefix.py"))] + #[test_case(Path::new("no_reorder_within_section.py"))] + #[test_case(Path::new("preserve_indentation.py"))] + #[test_case(Path::new("reorder_within_section.py"))] + #[test_case(Path::new("separate_first_party_imports.py"))] + #[test_case(Path::new("separate_future_imports.py"))] + #[test_case(Path::new("separate_third_party_imports.py"))] + #[test_case(Path::new("skip.py"))] #[test_case(Path::new("trailing_suffix.py"))] fn isort(path: &Path) -> Result<()> { let snapshot = format!("{}", path.to_string_lossy()); diff --git a/src/isort/snapshots/ruff__isort__tests__skip.py.snap b/src/isort/snapshots/ruff__isort__tests__skip.py.snap new file mode 100644 index 0000000000..ab8c4e57b7 --- /dev/null +++ b/src/isort/snapshots/ruff__isort__tests__skip.py.snap @@ -0,0 +1,22 @@ +--- +source: src/isort/mod.rs +expression: checks +--- +- kind: UnsortedImports + location: + row: 9 + column: 0 + end_location: + row: 11 + column: 0 + fix: + patch: + content: "import abc\nimport collections\n" + location: + row: 9 + column: 0 + end_location: + row: 11 + column: 0 + applied: false + diff --git a/src/isort/track.rs b/src/isort/track.rs index 9bda76a4e6..a815becde7 100644 --- a/src/isort/track.rs +++ b/src/isort/track.rs @@ -1,3 +1,4 @@ +use nohash_hasher::IntSet; use rustpython_ast::{ Alias, Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext, Keyword, MatchCase, Operator, Pattern, Stmt, StmtKind, @@ -8,16 +9,19 @@ use crate::ast::visitor::Visitor; #[derive(Debug)] pub struct ImportTracker<'a> { - pub blocks: Vec>, + exclusions: &'a IntSet, + blocks: Vec>, } + impl<'a> ImportTracker<'a> { - pub fn new() -> Self { + pub fn new(exclusions: &'a IntSet) -> Self { Self { + exclusions, blocks: vec![vec![]], } } - fn add_import(&mut self, stmt: &'a Stmt) { + fn track_import(&mut self, stmt: &'a Stmt) { let index = self.blocks.len() - 1; self.blocks[index].push(stmt); } @@ -43,8 +47,9 @@ where if matches!( stmt.node, StmtKind::Import { .. } | StmtKind::ImportFrom { .. } - ) { - self.add_import(stmt); + ) && !self.exclusions.contains(&stmt.location.row()) + { + self.track_import(stmt); } else { self.finalize(); } diff --git a/src/lib.rs b/src/lib.rs index 5986929c21..0d16e49258 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,6 +25,7 @@ pub mod checks_gen; pub mod cli; pub mod code_gen; mod cst; +mod directives; mod docstrings; pub mod flake8_annotations; mod flake8_bugbear; @@ -74,8 +75,12 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result> { // Initialize the SourceCodeLocator (which computes offsets lazily). let locator = SourceCodeLocator::new(contents); - // Determine the noqa line for every line in the source. - let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + // Extract the `# noqa` and `# isort: skip` directives from the source. + let directives = directives::extract_directives( + &tokens, + &locator, + &directives::Flags::from_settings(&settings), + ); // Generate checks. let checks = check_path( @@ -83,7 +88,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result> { contents, tokens, &locator, - &noqa_line_for, + &directives, &settings, &if autofix { Mode::Generate } else { Mode::None }, )?; diff --git a/src/linter.rs b/src/linter.rs index b728965a80..00590c8c56 100644 --- a/src/linter.rs +++ b/src/linter.rs @@ -21,11 +21,12 @@ use crate::check_lines::check_lines; use crate::check_tokens::check_tokens; use crate::checks::{Check, CheckCode, CheckKind, LintSource}; use crate::code_gen::SourceGenerator; +use crate::directives::Directives; use crate::message::Message; use crate::noqa::add_noqa; use crate::settings::Settings; use crate::source_code_locator::SourceCodeLocator; -use crate::{cache, fs, noqa}; +use crate::{cache, directives, fs}; /// Collect tokens up to and including the first error. pub(crate) fn tokenize(contents: &str) -> Vec { @@ -56,7 +57,7 @@ pub(crate) fn check_path( contents: &str, tokens: Vec, locator: &SourceCodeLocator, - noqa_line_for: &[usize], + directives: &Directives, settings: &Settings, autofix: &fixer::Mode, ) -> Result> { @@ -88,7 +89,13 @@ pub(crate) fn check_path( checks.extend(check_ast(&python_ast, locator, settings, autofix, path)); } if use_imports { - checks.extend(check_imports(&python_ast, locator, settings, autofix)); + checks.extend(check_imports( + &python_ast, + locator, + &directives.isort_exclusions, + settings, + autofix, + )); } } Err(parse_error) => { @@ -106,7 +113,13 @@ pub(crate) fn check_path( } // Run the lines-based checks. - check_lines(&mut checks, contents, noqa_line_for, settings, autofix); + check_lines( + &mut checks, + contents, + &directives.noqa_line_for, + settings, + autofix, + ); // Create path ignores. if !checks.is_empty() && !settings.per_file_ignores.is_empty() { @@ -134,8 +147,12 @@ pub fn lint_stdin( // Initialize the SourceCodeLocator (which computes offsets lazily). let locator = SourceCodeLocator::new(stdin); - // Determine the noqa line for every line in the source. - let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + // Extract the `# noqa` and `# isort: skip` directives from the source. + let directives = directives::extract_directives( + &tokens, + &locator, + &directives::Flags::from_settings(settings), + ); // Generate checks. let mut checks = check_path( @@ -143,7 +160,7 @@ pub fn lint_stdin( stdin, tokens, &locator, - &noqa_line_for, + &directives, settings, autofix, )?; @@ -188,8 +205,12 @@ pub fn lint_path( // Initialize the SourceCodeLocator (which computes offsets lazily). let locator = SourceCodeLocator::new(&contents); - // Determine the noqa line for every line in the source. - let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + // Determine the noqa and isort exclusions. + let directives = directives::extract_directives( + &tokens, + &locator, + &directives::Flags::from_settings(settings), + ); // Generate checks. let mut checks = check_path( @@ -197,7 +218,7 @@ pub fn lint_path( &contents, tokens, &locator, - &noqa_line_for, + &directives, settings, autofix, )?; @@ -230,8 +251,12 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result { // Initialize the SourceCodeLocator (which computes offsets lazily). let locator = SourceCodeLocator::new(&contents); - // Determine the noqa line for every line in the source. - let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + // Extract the `# noqa` and `# isort: skip` directives from the source. + let directives = directives::extract_directives( + &tokens, + &locator, + &directives::Flags::from_settings(settings), + ); // Generate checks. let checks = check_path( @@ -239,12 +264,12 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result { &contents, tokens, &locator, - &noqa_line_for, + &directives, settings, &fixer::Mode::None, )?; - add_noqa(&checks, &contents, &noqa_line_for, path) + add_noqa(&checks, &contents, &directives.noqa_line_for, path) } pub fn autoformat_path(path: &Path) -> Result<()> { @@ -268,13 +293,17 @@ pub fn test_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Res let contents = fs::read_file(path)?; let tokens: Vec = tokenize(&contents); let locator = SourceCodeLocator::new(&contents); - let noqa_line_for = noqa::extract_noqa_line_for(&tokens); + let directives = directives::extract_directives( + &tokens, + &locator, + &directives::Flags::from_settings(settings), + ); check_path( path, &contents, tokens, &locator, - &noqa_line_for, + &directives, settings, autofix, ) diff --git a/src/noqa.rs b/src/noqa.rs index 36b15532cc..ea1b2a9181 100644 --- a/src/noqa.rs +++ b/src/noqa.rs @@ -3,14 +3,14 @@ use std::fs; use std::path::Path; use anyhow::Result; +use nohash_hasher::IntMap; use once_cell::sync::Lazy; use regex::Regex; -use rustpython_parser::lexer::{LexResult, Tok}; use crate::checks::{Check, CheckCode}; static NO_QA_REGEX: Lazy = Lazy::new(|| { - Regex::new(r"(?i)(?P\s*# noqa(?::\s?(?P([A-Z]+[0-9]+(?:[,\s]+)?)+))?)") + Regex::new(r"(?P\s*# noqa(?::\s?(?P([A-Z]+[0-9]+(?:[,\s]+)?)+))?)") .expect("Invalid regex") }); static SPLIT_COMMA_REGEX: Lazy = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex")); @@ -43,30 +43,21 @@ pub fn extract_noqa_directive(line: &str) -> Directive { } } -pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec { - let mut noqa_line_for: Vec = vec![]; - for (start, tok, end) in lxr.iter().flatten() { - if matches!(tok, Tok::EndOfFile) { - break; - } - // For multi-line strings, we expect `noqa` directives on the last line of the - // string. By definition, we can't have multiple multi-line strings on - // the same line, so we don't need to verify that we haven't already - // traversed past the current line. - if matches!(tok, Tok::String { .. }) && end.row() > start.row() { - for i in (noqa_line_for.len())..(start.row() - 1) { - noqa_line_for.push(i + 1); - } - noqa_line_for.extend(vec![end.row(); (end.row() + 1) - start.row()]); - } - } - noqa_line_for +pub fn add_noqa( + checks: &[Check], + contents: &str, + noqa_line_for: &IntMap, + path: &Path, +) -> Result { + let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?; + fs::write(path, output)?; + Ok(count) } fn add_noqa_inner( checks: &[Check], contents: &str, - noqa_line_for: &[usize], + noqa_line_for: &IntMap, ) -> Result<(usize, String)> { let lines: Vec<&str> = contents.lines().collect(); let mut matches_by_line: BTreeMap> = BTreeMap::new(); @@ -82,7 +73,7 @@ fn add_noqa_inner( // If there are newlines at the end of the file, they won't be represented in // `noqa_line_for`, so fallback to the current line. let noqa_lineno = noqa_line_for - .get(lineno) + .get(&lineno) .map(|lineno| lineno - 1) .unwrap_or(lineno); @@ -120,108 +111,20 @@ fn add_noqa_inner( Ok((count, output)) } -pub fn add_noqa( - checks: &[Check], - contents: &str, - noqa_line_for: &[usize], - path: &Path, -) -> Result { - let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?; - fs::write(path, output)?; - Ok(count) -} - #[cfg(test)] mod tests { use anyhow::Result; use rustpython_parser::ast::Location; - use rustpython_parser::lexer; - use rustpython_parser::lexer::LexResult; use crate::ast::types::Range; use crate::checks::{Check, CheckKind}; - use crate::noqa::{add_noqa_inner, extract_noqa_line_for}; - - #[test] - fn extraction() -> Result<()> { - let empty: Vec = Default::default(); - - let lxr: Vec = lexer::make_tokenizer( - "x = 1 -y = 2 -z = x + 1", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), empty); - - let lxr: Vec = lexer::make_tokenizer( - " -x = 1 -y = 2 -z = x + 1", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), empty); - - let lxr: Vec = lexer::make_tokenizer( - "x = 1 -y = 2 -z = x + 1 - ", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), empty); - - let lxr: Vec = lexer::make_tokenizer( - "x = 1 - -y = 2 -z = x + 1 - ", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), empty); - - let lxr: Vec = lexer::make_tokenizer( - "x = '''abc -def -ghi -''' -y = 2 -z = x + 1", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), vec![4, 4, 4, 4]); - - let lxr: Vec = lexer::make_tokenizer( - "x = 1 -y = '''abc -def -ghi -''' -z = 2", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), vec![1, 5, 5, 5, 5]); - - let lxr: Vec = lexer::make_tokenizer( - "x = 1 -y = '''abc -def -ghi -'''", - ) - .collect(); - assert_eq!(extract_noqa_line_for(&lxr), vec![1, 5, 5, 5, 5]); - - Ok(()) - } + use crate::noqa::add_noqa_inner; #[test] fn modification() -> Result<()> { let checks = vec![]; let contents = "x = 1"; - let noqa_line_for = vec![1]; + let noqa_line_for = Default::default(); let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?; assert_eq!(count, 0); assert_eq!(output.trim(), contents.trim()); @@ -234,7 +137,7 @@ ghi }, )]; let contents = "x = 1"; - let noqa_line_for = vec![1]; + let noqa_line_for = Default::default(); let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?; assert_eq!(count, 1); assert_eq!(output.trim(), "x = 1 # noqa: F841".trim()); @@ -256,7 +159,7 @@ ghi ), ]; let contents = "x = 1 # noqa: E741"; - let noqa_line_for = vec![1]; + let noqa_line_for = Default::default(); let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?; assert_eq!(count, 1); assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim()); @@ -278,7 +181,7 @@ ghi ), ]; let contents = "x = 1 # noqa"; - let noqa_line_for = vec![1]; + let noqa_line_for = Default::default(); let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?; assert_eq!(count, 1); assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim()); diff --git a/src/source_code_locator.rs b/src/source_code_locator.rs index a8b8f04387..0f30cabdcd 100644 --- a/src/source_code_locator.rs +++ b/src/source_code_locator.rs @@ -17,7 +17,7 @@ impl<'a> SourceCodeLocator<'a> { pub fn new(contents: &'a str) -> Self { SourceCodeLocator { contents, - rope: OnceCell::new(), + rope: Default::default(), } }