mirror of
https://github.com/astral-sh/ruff.git
synced 2025-10-02 06:42:02 +00:00
Split add_noqa
process into distinctive edit generation and edit application stages (#11265)
## Summary `--add-noqa` now runs in two stages: first, the linter finds all diagnostics that need noqa comments and generate edits on a per-line basis. Second, these edits are applied, in order, to the document. A public-facing function, `generate_noqa_edits`, has also been introduced, which returns noqa edits generated on a per-diagnostic basis. This will be used by `ruff server` for noqa comment quick-fixes. ## Test Plan Unit tests have been updated.
This commit is contained in:
parent
0726e82342
commit
890cc325d5
3 changed files with 320 additions and 74 deletions
|
@ -1553,3 +1553,68 @@ def unused(x): # noqa: ANN001, ARG001, D103
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_noqa_multiline_comment() -> Result<()> {
|
||||||
|
let tempdir = TempDir::new()?;
|
||||||
|
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||||
|
fs::write(
|
||||||
|
&ruff_toml,
|
||||||
|
r#"
|
||||||
|
[lint]
|
||||||
|
select = ["UP031"]
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let test_path = tempdir.path().join("noqa.py");
|
||||||
|
|
||||||
|
fs::write(
|
||||||
|
&test_path,
|
||||||
|
r#"
|
||||||
|
print(
|
||||||
|
"""First line
|
||||||
|
second line
|
||||||
|
third line
|
||||||
|
%s"""
|
||||||
|
% name
|
||||||
|
)
|
||||||
|
"#,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
insta::with_settings!({
|
||||||
|
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||||
|
}, {
|
||||||
|
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||||
|
.current_dir(tempdir.path())
|
||||||
|
.args(STDIN_BASE_OPTIONS)
|
||||||
|
.args(["--config", &ruff_toml.file_name().unwrap().to_string_lossy()])
|
||||||
|
.arg(&test_path)
|
||||||
|
.arg("--preview")
|
||||||
|
.args(["--add-noqa"])
|
||||||
|
.arg("-")
|
||||||
|
.pass_stdin(r#"
|
||||||
|
|
||||||
|
"#), @r###"
|
||||||
|
success: true
|
||||||
|
exit_code: 0
|
||||||
|
----- stdout -----
|
||||||
|
|
||||||
|
----- stderr -----
|
||||||
|
Added 1 noqa directive.
|
||||||
|
"###);
|
||||||
|
});
|
||||||
|
|
||||||
|
let test_code = std::fs::read_to_string(&test_path).expect("should read test file");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(test_code, @r###"
|
||||||
|
print(
|
||||||
|
"""First line
|
||||||
|
second line
|
||||||
|
third line
|
||||||
|
%s""" # noqa: UP031
|
||||||
|
% name
|
||||||
|
)
|
||||||
|
"###);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
//!
|
//!
|
||||||
//! [Ruff]: https://github.com/astral-sh/ruff
|
//! [Ruff]: https://github.com/astral-sh/ruff
|
||||||
|
|
||||||
|
pub use noqa::generate_noqa_edits;
|
||||||
#[cfg(feature = "clap")]
|
#[cfg(feature = "clap")]
|
||||||
pub use registry::clap_completion::RuleParser;
|
pub use registry::clap_completion::RuleParser;
|
||||||
#[cfg(feature = "clap")]
|
#[cfg(feature = "clap")]
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::{Display, Write};
|
use std::fmt::Display;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::ops::Add;
|
use std::ops::Add;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
@ -10,7 +10,7 @@ use itertools::Itertools;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::{Diagnostic, Edit};
|
||||||
use ruff_python_trivia::{indentation_at_offset, CommentRanges};
|
use ruff_python_trivia::{indentation_at_offset, CommentRanges};
|
||||||
use ruff_source_file::{LineEnding, Locator};
|
use ruff_source_file::{LineEnding, Locator};
|
||||||
|
|
||||||
|
@ -19,6 +19,27 @@ use crate::fs::relativize_path;
|
||||||
use crate::registry::{AsRule, Rule, RuleSet};
|
use crate::registry::{AsRule, Rule, RuleSet};
|
||||||
use crate::rule_redirects::get_redirect_target;
|
use crate::rule_redirects::get_redirect_target;
|
||||||
|
|
||||||
|
/// Generates an array of edits that matches the length of `diagnostics`.
|
||||||
|
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
|
||||||
|
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
|
||||||
|
/// the diagnostic. These edits may conflict with each other and should not be applied
|
||||||
|
/// simultaneously.
|
||||||
|
pub fn generate_noqa_edits(
|
||||||
|
path: &Path,
|
||||||
|
diagnostics: &[Diagnostic],
|
||||||
|
locator: &Locator,
|
||||||
|
comment_ranges: &CommentRanges,
|
||||||
|
external: &[String],
|
||||||
|
noqa_line_for: &NoqaMapping,
|
||||||
|
line_ending: LineEnding,
|
||||||
|
) -> Vec<Option<Edit>> {
|
||||||
|
let exemption =
|
||||||
|
FileExemption::try_extract(locator.contents(), comment_ranges, external, path, locator);
|
||||||
|
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, path, locator);
|
||||||
|
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||||
|
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
|
||||||
|
}
|
||||||
|
|
||||||
/// A directive to ignore a set of rules for a given line of Python source code (e.g.,
|
/// A directive to ignore a set of rules for a given line of Python source code (e.g.,
|
||||||
/// `# noqa: F401, F841`).
|
/// `# noqa: F401, F841`).
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -511,6 +532,7 @@ pub(crate) fn add_noqa(
|
||||||
noqa_line_for,
|
noqa_line_for,
|
||||||
line_ending,
|
line_ending,
|
||||||
);
|
);
|
||||||
|
|
||||||
fs::write(path, output)?;
|
fs::write(path, output)?;
|
||||||
Ok(count)
|
Ok(count)
|
||||||
}
|
}
|
||||||
|
@ -524,9 +546,7 @@ fn add_noqa_inner(
|
||||||
noqa_line_for: &NoqaMapping,
|
noqa_line_for: &NoqaMapping,
|
||||||
line_ending: LineEnding,
|
line_ending: LineEnding,
|
||||||
) -> (usize, String) {
|
) -> (usize, String) {
|
||||||
// Map of line start offset to set of (non-ignored) diagnostic codes that are triggered on that line.
|
let mut count = 0;
|
||||||
let mut matches_by_line: BTreeMap<TextSize, (RuleSet, Option<&Directive>)> =
|
|
||||||
BTreeMap::default();
|
|
||||||
|
|
||||||
// Whether the file is exempted from all checks.
|
// Whether the file is exempted from all checks.
|
||||||
// Codes that are globally exempted (within the current file).
|
// Codes that are globally exempted (within the current file).
|
||||||
|
@ -534,16 +554,117 @@ fn add_noqa_inner(
|
||||||
FileExemption::try_extract(locator.contents(), comment_ranges, external, path, locator);
|
FileExemption::try_extract(locator.contents(), comment_ranges, external, path, locator);
|
||||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, path, locator);
|
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, path, locator);
|
||||||
|
|
||||||
|
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||||
|
|
||||||
|
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
|
||||||
|
|
||||||
|
let contents = locator.contents();
|
||||||
|
|
||||||
|
let mut output = String::with_capacity(contents.len());
|
||||||
|
let mut last_append = TextSize::default();
|
||||||
|
|
||||||
|
for (_, edit) in edits {
|
||||||
|
output.push_str(&contents[TextRange::new(last_append, edit.start())]);
|
||||||
|
|
||||||
|
edit.write(&mut output);
|
||||||
|
|
||||||
|
count += 1;
|
||||||
|
|
||||||
|
last_append = edit.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
output.push_str(&contents[TextRange::new(last_append, TextSize::of(contents))]);
|
||||||
|
|
||||||
|
(count, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_noqa_edits_by_diagnostic(
|
||||||
|
comments: Vec<Option<NoqaComment>>,
|
||||||
|
locator: &Locator,
|
||||||
|
line_ending: LineEnding,
|
||||||
|
) -> Vec<Option<Edit>> {
|
||||||
|
let mut edits = Vec::default();
|
||||||
|
for comment in comments {
|
||||||
|
match comment {
|
||||||
|
Some(comment) => {
|
||||||
|
if let Some(noqa_edit) = generate_noqa_edit(
|
||||||
|
comment.directive,
|
||||||
|
comment.line,
|
||||||
|
RuleSet::from_rule(comment.diagnostic.kind.rule()),
|
||||||
|
locator,
|
||||||
|
line_ending,
|
||||||
|
) {
|
||||||
|
edits.push(Some(noqa_edit.into_edit()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => edits.push(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
edits
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_noqa_edits_by_line<'a>(
|
||||||
|
comments: Vec<Option<NoqaComment<'a>>>,
|
||||||
|
locator: &Locator,
|
||||||
|
line_ending: LineEnding,
|
||||||
|
) -> BTreeMap<TextSize, NoqaEdit<'a>> {
|
||||||
|
let mut comments_by_line = BTreeMap::default();
|
||||||
|
for comment in comments.into_iter().flatten() {
|
||||||
|
comments_by_line
|
||||||
|
.entry(comment.line)
|
||||||
|
.or_insert_with(Vec::default)
|
||||||
|
.push(comment);
|
||||||
|
}
|
||||||
|
let mut edits = BTreeMap::default();
|
||||||
|
for (offset, matches) in comments_by_line {
|
||||||
|
let Some(first_match) = matches.first() else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let directive = first_match.directive;
|
||||||
|
if let Some(edit) = generate_noqa_edit(
|
||||||
|
directive,
|
||||||
|
offset,
|
||||||
|
matches
|
||||||
|
.into_iter()
|
||||||
|
.map(|NoqaComment { diagnostic, .. }| diagnostic.kind.rule())
|
||||||
|
.collect(),
|
||||||
|
locator,
|
||||||
|
line_ending,
|
||||||
|
) {
|
||||||
|
edits.insert(offset, edit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
edits
|
||||||
|
}
|
||||||
|
|
||||||
|
struct NoqaComment<'a> {
|
||||||
|
line: TextSize,
|
||||||
|
diagnostic: &'a Diagnostic,
|
||||||
|
directive: Option<&'a Directive<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_noqa_comments<'a>(
|
||||||
|
diagnostics: &'a [Diagnostic],
|
||||||
|
locator: &'a Locator,
|
||||||
|
exemption: &Option<FileExemption>,
|
||||||
|
directives: &'a NoqaDirectives,
|
||||||
|
noqa_line_for: &NoqaMapping,
|
||||||
|
) -> Vec<Option<NoqaComment<'a>>> {
|
||||||
|
// List of noqa comments, ordered to match up with `diagnostics`
|
||||||
|
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
|
||||||
|
|
||||||
// Mark any non-ignored diagnostics.
|
// Mark any non-ignored diagnostics.
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
match &exemption {
|
match &exemption {
|
||||||
Some(FileExemption::All) => {
|
Some(FileExemption::All) => {
|
||||||
// If the file is exempted, don't add any noqa directives.
|
// If the file is exempted, don't add any noqa directives.
|
||||||
|
comments_by_line.push(None);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Some(FileExemption::Codes(codes)) => {
|
Some(FileExemption::Codes(codes)) => {
|
||||||
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
|
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
|
||||||
if codes.contains(&diagnostic.kind.rule().noqa_code()) {
|
if codes.contains(&diagnostic.kind.rule().noqa_code()) {
|
||||||
|
comments_by_line.push(None);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -557,10 +678,12 @@ fn add_noqa_inner(
|
||||||
{
|
{
|
||||||
match &directive_line.directive {
|
match &directive_line.directive {
|
||||||
Directive::All(_) => {
|
Directive::All(_) => {
|
||||||
|
comments_by_line.push(None);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Directive::Codes(codes) => {
|
Directive::Codes(codes) => {
|
||||||
if codes.includes(diagnostic.kind.rule()) {
|
if codes.includes(diagnostic.kind.rule()) {
|
||||||
|
comments_by_line.push(None);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -574,18 +697,17 @@ fn add_noqa_inner(
|
||||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||||
match &directive_line.directive {
|
match &directive_line.directive {
|
||||||
Directive::All(_) => {
|
Directive::All(_) => {
|
||||||
|
comments_by_line.push(None);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Directive::Codes(codes) => {
|
directive @ Directive::Codes(codes) => {
|
||||||
let rule = diagnostic.kind.rule();
|
let rule = diagnostic.kind.rule();
|
||||||
if !codes.includes(rule) {
|
if !codes.includes(rule) {
|
||||||
matches_by_line
|
comments_by_line.push(Some(NoqaComment {
|
||||||
.entry(directive_line.start())
|
line: directive_line.start(),
|
||||||
.or_insert_with(|| {
|
diagnostic,
|
||||||
(RuleSet::default(), Some(&directive_line.directive))
|
directive: Some(directive),
|
||||||
})
|
}));
|
||||||
.0
|
|
||||||
.insert(rule);
|
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -593,87 +715,106 @@ fn add_noqa_inner(
|
||||||
}
|
}
|
||||||
|
|
||||||
// There's no existing noqa directive that suppresses the diagnostic.
|
// There's no existing noqa directive that suppresses the diagnostic.
|
||||||
matches_by_line
|
comments_by_line.push(Some(NoqaComment {
|
||||||
.entry(locator.line_start(noqa_offset))
|
line: locator.line_start(noqa_offset),
|
||||||
.or_insert_with(|| (RuleSet::default(), None))
|
diagnostic,
|
||||||
.0
|
directive: None,
|
||||||
.insert(diagnostic.kind.rule());
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut count = 0;
|
comments_by_line
|
||||||
let mut output = String::with_capacity(locator.len());
|
|
||||||
let mut prev_end = TextSize::default();
|
|
||||||
|
|
||||||
for (offset, (rules, directive)) in matches_by_line {
|
|
||||||
output.push_str(locator.slice(TextRange::new(prev_end, offset)));
|
|
||||||
|
|
||||||
let line = locator.full_line(offset);
|
|
||||||
|
|
||||||
match directive {
|
|
||||||
None => {
|
|
||||||
// Add existing content.
|
|
||||||
output.push_str(line.trim_end());
|
|
||||||
|
|
||||||
// Add `noqa` directive.
|
|
||||||
output.push_str(" # noqa: ");
|
|
||||||
|
|
||||||
// Add codes.
|
|
||||||
push_codes(&mut output, rules.iter().map(|rule| rule.noqa_code()));
|
|
||||||
output.push_str(&line_ending);
|
|
||||||
count += 1;
|
|
||||||
}
|
}
|
||||||
Some(Directive::All(_)) => {
|
|
||||||
// Does not get inserted into the map.
|
struct NoqaEdit<'a> {
|
||||||
|
edit_range: TextRange,
|
||||||
|
rules: RuleSet,
|
||||||
|
codes: Option<&'a Codes<'a>>,
|
||||||
|
line_ending: LineEnding,
|
||||||
}
|
}
|
||||||
Some(Directive::Codes(codes)) => {
|
|
||||||
// Reconstruct the line based on the preserved rule codes.
|
|
||||||
// This enables us to tally the number of edits.
|
|
||||||
let output_start = output.len();
|
|
||||||
|
|
||||||
// Add existing content.
|
impl<'a> NoqaEdit<'a> {
|
||||||
output.push_str(
|
fn into_edit(self) -> Edit {
|
||||||
locator
|
let mut edit_content = String::new();
|
||||||
.slice(TextRange::new(offset, codes.start()))
|
self.write(&mut edit_content);
|
||||||
.trim_end(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Add `noqa` directive.
|
Edit::range_replacement(edit_content, self.edit_range)
|
||||||
output.push_str(" # noqa: ");
|
}
|
||||||
|
|
||||||
// Add codes.
|
fn write(&self, writer: &mut impl std::fmt::Write) {
|
||||||
|
write!(writer, " # noqa: ").unwrap();
|
||||||
|
match self.codes {
|
||||||
|
Some(codes) => {
|
||||||
push_codes(
|
push_codes(
|
||||||
&mut output,
|
writer,
|
||||||
rules
|
self.rules
|
||||||
.iter()
|
.iter()
|
||||||
.map(|rule| rule.noqa_code().to_string())
|
.map(|rule| rule.noqa_code().to_string())
|
||||||
.chain(codes.iter().map(ToString::to_string))
|
.chain(codes.iter().map(ToString::to_string))
|
||||||
.sorted_unstable(),
|
.sorted_unstable(),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Only count if the new line is an actual edit.
|
|
||||||
if &output[output_start..] != line.trim_end() {
|
|
||||||
count += 1;
|
|
||||||
}
|
}
|
||||||
|
None => {
|
||||||
output.push_str(&line_ending);
|
push_codes(
|
||||||
|
writer,
|
||||||
|
self.rules.iter().map(|rule| rule.noqa_code().to_string()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write!(writer, "{}", self.line_ending.as_str()).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
prev_end = offset + line.text_len();
|
impl<'a> Ranged for NoqaEdit<'a> {
|
||||||
|
fn range(&self) -> TextRange {
|
||||||
|
self.edit_range
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output.push_str(locator.after(prev_end));
|
fn generate_noqa_edit<'a>(
|
||||||
|
directive: Option<&'a Directive>,
|
||||||
|
offset: TextSize,
|
||||||
|
rules: RuleSet,
|
||||||
|
locator: &Locator,
|
||||||
|
line_ending: LineEnding,
|
||||||
|
) -> Option<NoqaEdit<'a>> {
|
||||||
|
let line_range = locator.full_line_range(offset);
|
||||||
|
|
||||||
(count, output)
|
let edit_range;
|
||||||
|
let codes;
|
||||||
|
|
||||||
|
// Add codes.
|
||||||
|
match directive {
|
||||||
|
None => {
|
||||||
|
let trimmed_line = locator.slice(line_range).trim_end();
|
||||||
|
edit_range = TextRange::new(TextSize::of(trimmed_line), line_range.len()) + offset;
|
||||||
|
codes = None;
|
||||||
|
}
|
||||||
|
Some(Directive::Codes(existing_codes)) => {
|
||||||
|
// find trimmed line without the noqa
|
||||||
|
let trimmed_line = locator
|
||||||
|
.slice(TextRange::new(line_range.start(), existing_codes.start()))
|
||||||
|
.trim_end();
|
||||||
|
edit_range = TextRange::new(TextSize::of(trimmed_line), line_range.len()) + offset;
|
||||||
|
codes = Some(existing_codes);
|
||||||
|
}
|
||||||
|
Some(Directive::All(_)) => return None,
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(NoqaEdit {
|
||||||
|
edit_range,
|
||||||
|
rules,
|
||||||
|
codes,
|
||||||
|
line_ending,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_codes<I: Display>(str: &mut String, codes: impl Iterator<Item = I>) {
|
fn push_codes<I: Display>(writer: &mut dyn std::fmt::Write, codes: impl Iterator<Item = I>) {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for code in codes {
|
for code in codes {
|
||||||
if !first {
|
if !first {
|
||||||
str.push_str(", ");
|
write!(writer, ", ").unwrap();
|
||||||
}
|
}
|
||||||
write!(str, "{code}").unwrap();
|
write!(writer, "{code}").unwrap();
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -846,13 +987,15 @@ mod tests {
|
||||||
use insta::assert_debug_snapshot;
|
use insta::assert_debug_snapshot;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
use ruff_diagnostics::Diagnostic;
|
use ruff_diagnostics::{Diagnostic, Edit};
|
||||||
use ruff_python_trivia::CommentRanges;
|
use ruff_python_trivia::CommentRanges;
|
||||||
use ruff_source_file::{LineEnding, Locator};
|
use ruff_source_file::{LineEnding, Locator};
|
||||||
|
|
||||||
|
use crate::generate_noqa_edits;
|
||||||
use crate::noqa::{add_noqa_inner, Directive, NoqaMapping, ParsedFileExemption};
|
use crate::noqa::{add_noqa_inner, Directive, NoqaMapping, ParsedFileExemption};
|
||||||
use crate::rules::pycodestyle::rules::AmbiguousVariableName;
|
use crate::rules::pycodestyle::rules::AmbiguousVariableName;
|
||||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||||
|
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn noqa_all() {
|
fn noqa_all() {
|
||||||
|
@ -1130,4 +1273,41 @@ mod tests {
|
||||||
assert_eq!(count, 0);
|
assert_eq!(count, 0);
|
||||||
assert_eq!(output, "x = 1 # noqa");
|
assert_eq!(output, "x = 1 # noqa");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn multiline_comment() {
|
||||||
|
let path = Path::new("/tmp/foo.txt");
|
||||||
|
let source = r#"
|
||||||
|
print(
|
||||||
|
"""First line
|
||||||
|
second line
|
||||||
|
third line
|
||||||
|
%s"""
|
||||||
|
% name
|
||||||
|
)
|
||||||
|
"#;
|
||||||
|
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||||
|
let diagnostics = [Diagnostic::new(
|
||||||
|
PrintfStringFormatting,
|
||||||
|
TextRange::new(12.into(), 79.into()),
|
||||||
|
)];
|
||||||
|
let comment_ranges = CommentRanges::default();
|
||||||
|
let edits = generate_noqa_edits(
|
||||||
|
path,
|
||||||
|
&diagnostics,
|
||||||
|
&Locator::new(source),
|
||||||
|
&comment_ranges,
|
||||||
|
&[],
|
||||||
|
&noqa_line_for,
|
||||||
|
LineEnding::Lf,
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
edits,
|
||||||
|
vec![Some(Edit::replacement(
|
||||||
|
" # noqa: UP031\n".to_string(),
|
||||||
|
68.into(),
|
||||||
|
69.into()
|
||||||
|
))]
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue