mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-30 05:45:24 +00:00
Only enforce multi-line noqa directives for strings (#258)
This commit is contained in:
parent
38b19b78b7
commit
de9ceb2fe1
5 changed files with 43 additions and 92 deletions
7
resources/test/fixtures/M001.py
vendored
7
resources/test/fixtures/M001.py
vendored
|
@ -29,10 +29,3 @@ _ = """Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
""" # noqa: E501, F841
|
""" # noqa: E501, F841
|
||||||
|
|
||||||
_ = """Lorem ipsum dolor sit amet.
|
|
||||||
|
|
||||||
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. # noqa: E501
|
|
||||||
""" # noqa: E501
|
|
||||||
|
|
|
@ -39,8 +39,6 @@ pub fn check_lines(
|
||||||
|
|
||||||
let lines: Vec<&str> = contents.lines().collect();
|
let lines: Vec<&str> = contents.lines().collect();
|
||||||
for (lineno, line) in lines.iter().enumerate() {
|
for (lineno, line) in lines.iter().enumerate() {
|
||||||
let mut did_insert: bool = false;
|
|
||||||
|
|
||||||
// Grab the noqa (logical) line number for the current (physical) line.
|
// Grab the noqa (logical) line number for the current (physical) line.
|
||||||
// If there are newlines at the end of the file, they won't be represented in
|
// If there are newlines at the end of the file, they won't be represented in
|
||||||
// `noqa_line_for`, so fallback to the current line.
|
// `noqa_line_for`, so fallback to the current line.
|
||||||
|
@ -49,45 +47,19 @@ pub fn check_lines(
|
||||||
.map(|lineno| lineno - 1)
|
.map(|lineno| lineno - 1)
|
||||||
.unwrap_or(lineno);
|
.unwrap_or(lineno);
|
||||||
|
|
||||||
if enforce_noqa && !did_insert {
|
if enforce_noqa {
|
||||||
// Try the current physical line.
|
|
||||||
noqa_directives
|
|
||||||
.entry(lineno)
|
|
||||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
|
||||||
// Try the current logical line.
|
|
||||||
if lineno != noqa_lineno {
|
|
||||||
noqa_directives
|
noqa_directives
|
||||||
.entry(noqa_lineno)
|
.entry(noqa_lineno)
|
||||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||||
}
|
}
|
||||||
did_insert = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove any ignored checks.
|
// Remove any ignored checks.
|
||||||
// TODO(charlie): Only validate checks for the current line.
|
// TODO(charlie): Only validate checks for the current line.
|
||||||
for (index, check) in checks.iter().enumerate() {
|
for (index, check) in checks.iter().enumerate() {
|
||||||
if check.location.row() == lineno + 1 {
|
if check.location.row() == lineno + 1 {
|
||||||
if !did_insert {
|
let noqa = noqa_directives
|
||||||
// Try the current physical line.
|
.entry(noqa_lineno)
|
||||||
noqa_directives
|
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||||
.entry(lineno)
|
|
||||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
|
||||||
// Try the current logical line.
|
|
||||||
if lineno != noqa_lineno {
|
|
||||||
noqa_directives.entry(noqa_lineno).or_insert_with(|| {
|
|
||||||
(noqa::extract_noqa_directive(lines[noqa_lineno]), vec![])
|
|
||||||
});
|
|
||||||
}
|
|
||||||
did_insert = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let noqa = if lineno != noqa_lineno
|
|
||||||
&& matches!(noqa_directives.get(&lineno).unwrap(), (Directive::None, _))
|
|
||||||
{
|
|
||||||
noqa_directives.get_mut(&noqa_lineno).unwrap()
|
|
||||||
} else {
|
|
||||||
noqa_directives.get_mut(&lineno).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
match noqa {
|
match noqa {
|
||||||
(Directive::All(_), matches) => {
|
(Directive::All(_), matches) => {
|
||||||
|
@ -109,26 +81,9 @@ pub fn check_lines(
|
||||||
if enforce_line_too_long {
|
if enforce_line_too_long {
|
||||||
let line_length = line.chars().count();
|
let line_length = line.chars().count();
|
||||||
if should_enforce_line_length(line, line_length, settings.line_length) {
|
if should_enforce_line_length(line, line_length, settings.line_length) {
|
||||||
if !did_insert {
|
let noqa = noqa_directives
|
||||||
// Try the current physical line.
|
.entry(noqa_lineno)
|
||||||
noqa_directives
|
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||||
.entry(lineno)
|
|
||||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
|
||||||
// Try the current logical line.
|
|
||||||
if lineno != noqa_lineno {
|
|
||||||
noqa_directives.entry(noqa_lineno).or_insert_with(|| {
|
|
||||||
(noqa::extract_noqa_directive(lines[noqa_lineno]), vec![])
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let noqa = if lineno != noqa_lineno
|
|
||||||
&& matches!(noqa_directives.get(&lineno).unwrap(), (Directive::None, _))
|
|
||||||
{
|
|
||||||
noqa_directives.get_mut(&noqa_lineno).unwrap()
|
|
||||||
} else {
|
|
||||||
noqa_directives.get_mut(&lineno).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let check = Check::new(
|
let check = Check::new(
|
||||||
CheckKind::LineTooLong(line_length, settings.line_length),
|
CheckKind::LineTooLong(line_length, settings.line_length),
|
||||||
|
|
|
@ -27,7 +27,7 @@ fn check_path(
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||||
|
|
||||||
// Determine the noqa line for every line in the source.
|
// Determine the noqa line for every line in the source.
|
||||||
let noqa_line_for = noqa::extract_line_map(&lxr);
|
let noqa_line_for = noqa::extract_noqa_line_for(&lxr);
|
||||||
|
|
||||||
// Run the AST-based checks.
|
// Run the AST-based checks.
|
||||||
if settings
|
if settings
|
||||||
|
|
55
src/noqa.rs
55
src/noqa.rs
|
@ -37,9 +37,10 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
let mut line_map: Vec<usize> = vec![];
|
let mut line_map: Vec<usize> = vec![];
|
||||||
|
|
||||||
|
let mut last_is_string = false;
|
||||||
let mut last_seen = usize::MIN;
|
let mut last_seen = usize::MIN;
|
||||||
let mut min_line = usize::MAX;
|
let mut min_line = usize::MAX;
|
||||||
let mut max_line = usize::MIN;
|
let mut max_line = usize::MIN;
|
||||||
|
@ -53,7 +54,14 @@ pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
min_line = min(min_line, start.row());
|
min_line = min(min_line, start.row());
|
||||||
max_line = max(max_line, start.row());
|
max_line = max(max_line, start.row());
|
||||||
|
|
||||||
|
// For now, we only care about preserving noqa directives across multi-line strings.
|
||||||
|
if last_is_string {
|
||||||
line_map.extend(vec![max_line; (max_line + 1) - min_line]);
|
line_map.extend(vec![max_line; (max_line + 1) - min_line]);
|
||||||
|
} else {
|
||||||
|
for i in (min_line - 1)..(max_line) {
|
||||||
|
line_map.push(i + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
min_line = usize::MAX;
|
min_line = usize::MAX;
|
||||||
max_line = usize::MIN;
|
max_line = usize::MIN;
|
||||||
|
@ -69,6 +77,7 @@ pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
||||||
max_line = max(max_line, end.row());
|
max_line = max(max_line, end.row());
|
||||||
}
|
}
|
||||||
last_seen = start.row();
|
last_seen = start.row();
|
||||||
|
last_is_string = matches!(tok, Tok::String { .. });
|
||||||
}
|
}
|
||||||
|
|
||||||
line_map
|
line_map
|
||||||
|
@ -80,7 +89,7 @@ mod tests {
|
||||||
use rustpython_parser::lexer;
|
use rustpython_parser::lexer;
|
||||||
use rustpython_parser::lexer::LexResult;
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
use crate::noqa::extract_line_map;
|
use crate::noqa::extract_noqa_line_for;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn line_map() -> Result<()> {
|
fn line_map() -> Result<()> {
|
||||||
|
@ -90,50 +99,50 @@ y = 2
|
||||||
z = x + 1",
|
z = x + 1",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
println!("{:?}", extract_line_map(&lxr));
|
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3]);
|
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"
|
"
|
||||||
x = 1
|
x = 1
|
||||||
y = 2
|
y = 2
|
||||||
z = x + 1",
|
z = x + 1",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
println!("{:?}", extract_line_map(&lxr));
|
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3, 4]);
|
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = 1
|
"x = 1
|
||||||
y = 2
|
y = 2
|
||||||
z = x + 1
|
z = x + 1
|
||||||
",
|
",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
println!("{:?}", extract_line_map(&lxr));
|
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3]);
|
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = 1
|
"x = 1
|
||||||
|
|
||||||
y = 2
|
y = 2
|
||||||
z = x + 1
|
z = x + 1
|
||||||
",
|
",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
println!("{:?}", extract_line_map(&lxr));
|
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3, 4]);
|
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = '''abc
|
"x = '''abc
|
||||||
def
|
def
|
||||||
ghi
|
ghi
|
||||||
'''
|
'''
|
||||||
y = 2
|
y = 2
|
||||||
z = x + 1",
|
z = x + 1",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(extract_line_map(&lxr), vec![4, 4, 4, 4, 5, 6]);
|
assert_eq!(extract_noqa_line_for(&lxr), vec![4, 4, 4, 4, 5, 6]);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,10 +26,4 @@ expression: checks
|
||||||
row: 31
|
row: 31
|
||||||
column: 6
|
column: 6
|
||||||
fix: ~
|
fix: ~
|
||||||
- kind:
|
|
||||||
UnusedNOQA: E501
|
|
||||||
location:
|
|
||||||
row: 38
|
|
||||||
column: 6
|
|
||||||
fix: ~
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue