mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-29 13:24:57 +00:00
Respect continuations in noqa
enforcement (#2783)
This commit is contained in:
parent
86d0749ed7
commit
81651a8479
1 changed files with 60 additions and 9 deletions
|
@ -59,6 +59,7 @@ pub fn extract_directives(lxr: &[LexResult], flags: Flags) -> Directives {
|
||||||
/// Extract a mapping from logical line to noqa line.
|
/// Extract a mapping from logical line to noqa line.
|
||||||
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
|
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
|
||||||
let mut noqa_line_for: IntMap<usize, usize> = IntMap::default();
|
let mut noqa_line_for: IntMap<usize, usize> = IntMap::default();
|
||||||
|
let mut prev_non_newline: Option<(&Location, &Tok, &Location)> = None;
|
||||||
for (start, tok, end) in lxr.iter().flatten() {
|
for (start, tok, end) in lxr.iter().flatten() {
|
||||||
if matches!(tok, Tok::EndOfFile) {
|
if matches!(tok, Tok::EndOfFile) {
|
||||||
break;
|
break;
|
||||||
|
@ -70,6 +71,21 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
|
||||||
noqa_line_for.insert(i, end.row());
|
noqa_line_for.insert(i, end.row());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// For continuations, we expect `noqa` directives on the last line of the
|
||||||
|
// continuation.
|
||||||
|
if matches!(
|
||||||
|
tok,
|
||||||
|
Tok::Newline | Tok::NonLogicalNewline | Tok::Comment(..)
|
||||||
|
) {
|
||||||
|
if let Some((.., end)) = prev_non_newline {
|
||||||
|
for i in end.row()..start.row() {
|
||||||
|
noqa_line_for.insert(i, start.row());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prev_non_newline = None;
|
||||||
|
} else if prev_non_newline.is_none() {
|
||||||
|
prev_non_newline = Some((start, tok, end));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
noqa_line_for
|
noqa_line_for
|
||||||
}
|
}
|
||||||
|
@ -193,11 +209,11 @@ z = x + 1",
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = 1
|
"x = 1
|
||||||
y = '''abc
|
y = '''abc
|
||||||
def
|
def
|
||||||
ghi
|
ghi
|
||||||
'''
|
'''
|
||||||
z = 2",
|
z = 2",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -207,16 +223,51 @@ z = x + 1",
|
||||||
|
|
||||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
"x = 1
|
"x = 1
|
||||||
y = '''abc
|
y = '''abc
|
||||||
def
|
def
|
||||||
ghi
|
ghi
|
||||||
'''",
|
'''",
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
extract_noqa_line_for(&lxr),
|
extract_noqa_line_for(&lxr),
|
||||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
|
r#"x = \
|
||||||
|
1"#,
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(extract_noqa_line_for(&lxr), IntMap::from_iter([(1, 2)]));
|
||||||
|
|
||||||
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
|
r#"from foo import \
|
||||||
|
bar as baz, \
|
||||||
|
qux as quux"#,
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
extract_noqa_line_for(&lxr),
|
||||||
|
IntMap::from_iter([(1, 3), (2, 3)])
|
||||||
|
);
|
||||||
|
|
||||||
|
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||||
|
r#"
|
||||||
|
# Foo
|
||||||
|
from foo import \
|
||||||
|
bar as baz, \
|
||||||
|
qux as quux # Baz
|
||||||
|
x = \
|
||||||
|
1
|
||||||
|
y = \
|
||||||
|
2"#,
|
||||||
|
)
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
extract_noqa_line_for(&lxr),
|
||||||
|
IntMap::from_iter([(3, 5), (4, 5), (6, 7), (8, 9)])
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue