mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-28 12:55:05 +00:00
[pygrep_hooks
] Move blanket-noqa
to noqa checker (PGH004
) (#11053)
## Summary Move `blanket-noqa` rule from the token checker to the noqa checker. This allows us to make use of the line directives already computed in the noqa checker. ## Test Plan Verified test results are unchanged.
This commit is contained in:
parent
a9919707d4
commit
647548b5e7
4 changed files with 21 additions and 24 deletions
|
@ -14,6 +14,7 @@ use crate::noqa;
|
||||||
use crate::noqa::{Directive, FileExemption, NoqaDirectives, NoqaMapping};
|
use crate::noqa::{Directive, FileExemption, NoqaDirectives, NoqaMapping};
|
||||||
use crate::registry::{AsRule, Rule, RuleSet};
|
use crate::registry::{AsRule, Rule, RuleSet};
|
||||||
use crate::rule_redirects::get_redirect_target;
|
use crate::rule_redirects::get_redirect_target;
|
||||||
|
use crate::rules::pygrep_hooks;
|
||||||
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
|
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
|
||||||
use crate::settings::LinterSettings;
|
use crate::settings::LinterSettings;
|
||||||
|
|
||||||
|
@ -203,6 +204,10 @@ pub(crate) fn check_noqa(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if settings.rules.enabled(Rule::BlanketNOQA) {
|
||||||
|
pygrep_hooks::rules::blanket_noqa(diagnostics, &noqa_directives, locator);
|
||||||
|
}
|
||||||
|
|
||||||
ignored_diagnostics.sort_unstable();
|
ignored_diagnostics.sort_unstable();
|
||||||
ignored_diagnostics
|
ignored_diagnostics
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,10 +45,6 @@ pub(crate) fn check_tokens(
|
||||||
.check_lines(tokens, &mut diagnostics);
|
.check_lines(tokens, &mut diagnostics);
|
||||||
}
|
}
|
||||||
|
|
||||||
if settings.rules.enabled(Rule::BlanketNOQA) {
|
|
||||||
pygrep_hooks::rules::blanket_noqa(&mut diagnostics, indexer, locator);
|
|
||||||
}
|
|
||||||
|
|
||||||
if settings.rules.enabled(Rule::BlanketTypeIgnore) {
|
if settings.rules.enabled(Rule::BlanketTypeIgnore) {
|
||||||
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, indexer, locator);
|
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, indexer, locator);
|
||||||
}
|
}
|
||||||
|
|
|
@ -246,7 +246,7 @@ impl Rule {
|
||||||
pub const fn lint_source(&self) -> LintSource {
|
pub const fn lint_source(&self) -> LintSource {
|
||||||
match self {
|
match self {
|
||||||
Rule::InvalidPyprojectToml => LintSource::PyprojectToml,
|
Rule::InvalidPyprojectToml => LintSource::PyprojectToml,
|
||||||
Rule::UnusedNOQA => LintSource::Noqa,
|
Rule::BlanketNOQA | Rule::UnusedNOQA => LintSource::Noqa,
|
||||||
Rule::BidirectionalUnicode
|
Rule::BidirectionalUnicode
|
||||||
| Rule::BlankLineWithWhitespace
|
| Rule::BlankLineWithWhitespace
|
||||||
| Rule::DocLineTooLong
|
| Rule::DocLineTooLong
|
||||||
|
@ -256,7 +256,6 @@ impl Rule {
|
||||||
| Rule::MixedSpacesAndTabs
|
| Rule::MixedSpacesAndTabs
|
||||||
| Rule::TrailingWhitespace => LintSource::PhysicalLines,
|
| Rule::TrailingWhitespace => LintSource::PhysicalLines,
|
||||||
Rule::AmbiguousUnicodeCharacterComment
|
Rule::AmbiguousUnicodeCharacterComment
|
||||||
| Rule::BlanketNOQA
|
|
||||||
| Rule::BlanketTypeIgnore
|
| Rule::BlanketTypeIgnore
|
||||||
| Rule::BlankLineAfterDecorator
|
| Rule::BlankLineAfterDecorator
|
||||||
| Rule::BlankLineBetweenMethods
|
| Rule::BlankLineBetweenMethods
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_index::Indexer;
|
|
||||||
use ruff_python_trivia::Cursor;
|
use ruff_python_trivia::Cursor;
|
||||||
use ruff_source_file::Locator;
|
use ruff_source_file::Locator;
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange};
|
||||||
|
|
||||||
use crate::noqa::Directive;
|
use crate::noqa::{Directive, NoqaDirectives};
|
||||||
|
|
||||||
/// ## What it does
|
/// ## What it does
|
||||||
/// Check for `noqa` annotations that suppress all diagnostics, as opposed to
|
/// Check for `noqa` annotations that suppress all diagnostics, as opposed to
|
||||||
|
@ -84,46 +83,44 @@ impl Violation for BlanketNOQA {
|
||||||
/// PGH004
|
/// PGH004
|
||||||
pub(crate) fn blanket_noqa(
|
pub(crate) fn blanket_noqa(
|
||||||
diagnostics: &mut Vec<Diagnostic>,
|
diagnostics: &mut Vec<Diagnostic>,
|
||||||
indexer: &Indexer,
|
noqa_directives: &NoqaDirectives,
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
) {
|
) {
|
||||||
for range in indexer.comment_ranges() {
|
for directive_line in noqa_directives.lines() {
|
||||||
let line = locator.slice(*range);
|
if let Directive::All(all) = &directive_line.directive {
|
||||||
let offset = range.start();
|
let line = locator.slice(directive_line.range);
|
||||||
if let Ok(Some(Directive::All(all))) = Directive::try_extract(line, TextSize::new(0)) {
|
let offset = directive_line.range.start();
|
||||||
// The `all` range is that of the `noqa` directive in, e.g., `# noqa` or `# noqa F401`.
|
let noqa_end = all.end() - offset;
|
||||||
let noqa_start = offset + all.start();
|
|
||||||
let noqa_end = offset + all.end();
|
|
||||||
|
|
||||||
// Skip the `# noqa`, plus any trailing whitespace.
|
// Skip the `# noqa`, plus any trailing whitespace.
|
||||||
let mut cursor = Cursor::new(&line[all.end().to_usize()..]);
|
let mut cursor = Cursor::new(&line[noqa_end.to_usize()..]);
|
||||||
cursor.eat_while(char::is_whitespace);
|
cursor.eat_while(char::is_whitespace);
|
||||||
|
|
||||||
// Check for extraneous spaces before the colon.
|
// Check for extraneous spaces before the colon.
|
||||||
// Ex) `# noqa : F401`
|
// Ex) `# noqa : F401`
|
||||||
if cursor.first() == ':' {
|
if cursor.first() == ':' {
|
||||||
let start = offset + all.end();
|
let start = all.end();
|
||||||
let end = start + cursor.token_len();
|
let end = start + cursor.token_len();
|
||||||
let mut diagnostic = Diagnostic::new(
|
let mut diagnostic = Diagnostic::new(
|
||||||
BlanketNOQA {
|
BlanketNOQA {
|
||||||
missing_colon: false,
|
missing_colon: false,
|
||||||
space_before_colon: true,
|
space_before_colon: true,
|
||||||
},
|
},
|
||||||
TextRange::new(noqa_start, end),
|
TextRange::new(all.start(), end),
|
||||||
);
|
);
|
||||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::deletion(start, end)));
|
diagnostic.set_fix(Fix::unsafe_edit(Edit::deletion(start, end)));
|
||||||
diagnostics.push(diagnostic);
|
diagnostics.push(diagnostic);
|
||||||
} else if Directive::lex_code(cursor.chars().as_str()).is_some() {
|
} else if Directive::lex_code(cursor.chars().as_str()).is_some() {
|
||||||
// Check for a missing colon.
|
// Check for a missing colon.
|
||||||
// Ex) `# noqa F401`
|
// Ex) `# noqa F401`
|
||||||
let start = offset + all.end();
|
let start = all.end();
|
||||||
let end = start + TextSize::new(1);
|
let end = start + cursor.token_len();
|
||||||
let mut diagnostic = Diagnostic::new(
|
let mut diagnostic = Diagnostic::new(
|
||||||
BlanketNOQA {
|
BlanketNOQA {
|
||||||
missing_colon: true,
|
missing_colon: true,
|
||||||
space_before_colon: false,
|
space_before_colon: false,
|
||||||
},
|
},
|
||||||
TextRange::new(noqa_start, end),
|
TextRange::new(all.start(), end),
|
||||||
);
|
);
|
||||||
diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion(':'.to_string(), start)));
|
diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion(':'.to_string(), start)));
|
||||||
diagnostics.push(diagnostic);
|
diagnostics.push(diagnostic);
|
||||||
|
@ -134,7 +131,7 @@ pub(crate) fn blanket_noqa(
|
||||||
missing_colon: false,
|
missing_colon: false,
|
||||||
space_before_colon: false,
|
space_before_colon: false,
|
||||||
},
|
},
|
||||||
TextRange::new(noqa_start, noqa_end),
|
all.range(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue