mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-30 22:01:47 +00:00
Remove E999
as a rule, disallow any disablement methods for syntax error (#11901)
## Summary This PR updates the way syntax errors are handled throughout the linter. The main change is that it's now not considered as a rule which involves the following changes: * Update `Message` to be an enum with two variants - one for diagnostic message and the other for syntax error message * Provide methods on the new message enum to query information required by downstream usages This means that the syntax errors cannot be hidden / disabled via any disablement methods. These are: 1. Configuration via `select`, `ignore`, `per-file-ignores`, and their `extend-*` variants ```console $ cargo run -- check ~/playground/ruff/src/lsp.py --extend-select=E999 --no-preview --no-cache Finished `dev` profile [unoptimized + debuginfo] target(s) in 0.10s Running `target/debug/ruff check /Users/dhruv/playground/ruff/src/lsp.py --extend-select=E999 --no-preview --no-cache` warning: Rule `E999` is deprecated and will be removed in a future release. Syntax errors will always be shown regardless of whether this rule is selected or not. /Users/dhruv/playground/ruff/src/lsp.py:1:8: F401 [*] `abc` imported but unused | 1 | import abc | ^^^ F401 2 | from pathlib import Path 3 | import os | = help: Remove unused import: `abc` ``` 3. Command-line flags via `--select`, `--ignore`, `--per-file-ignores`, and their `--extend-*` variants ```console $ cargo run -- check ~/playground/ruff/src/lsp.py --no-cache --config=~/playground/ruff/pyproject.toml Finished `dev` profile [unoptimized + debuginfo] target(s) in 0.11s Running `target/debug/ruff check /Users/dhruv/playground/ruff/src/lsp.py --no-cache --config=/Users/dhruv/playground/ruff/pyproject.toml` warning: Rule `E999` is deprecated and will be removed in a future release. Syntax errors will always be shown regardless of whether this rule is selected or not. /Users/dhruv/playground/ruff/src/lsp.py:1:8: F401 [*] `abc` imported but unused | 1 | import abc | ^^^ F401 2 | from pathlib import Path 3 | import os | = help: Remove unused import: `abc` ``` This also means that the **output format** needs to be updated: 1. The `code`, `noqa_row`, `url` fields in the JSON output is optional (`null` for syntax errors) 2. Other formats are changed accordingly For each format, a new test case specific to syntax errors have been added. Please refer to the snapshot output for the exact format for syntax error message. The output of the `--statistics` flag will have a blank entry for syntax errors: ``` 315 F821 [ ] undefined-name 119 [ ] syntax-error 103 F811 [ ] redefined-while-unused ``` The **language server** is updated to consider the syntax errors by convert them into LSP diagnostic format separately. ### Preview There are no quick fixes provided to disable syntax errors. This will automatically work for `ruff-lsp` because the `noqa_row` field will be `null` in that case. <img width="772" alt="Screenshot 2024-06-26 at 14 57 08" src="aaac827e
-4777-4ac8-8c68-eaf9f2c36774"> Even with `noqa` comment, the syntax error is displayed: <img width="763" alt="Screenshot 2024-06-26 at 14 59 51" src="ba1afb68
-7eaf-4b44-91af-6d93246475e2"> Rule documentation page: <img width="1371" alt="Screenshot 2024-06-26 at 16 48 07" src="524f01df
-d91f-4ac0-86cc-40e76b318b24"> ## Test Plan - [x] Disablement methods via config shows a warning - [x] `select`, `extend-select` - [ ] ~`ignore`~ _doesn't show any message_ - [ ] ~`per-file-ignores`, `extend-per-file-ignores`~ _doesn't show any message_ - [x] Disablement methods via command-line flag shows a warning - [x] `--select`, `--extend-select` - [ ] ~`--ignore`~ _doesn't show any message_ - [ ] ~`--per-file-ignores`, `--extend-per-file-ignores`~ _doesn't show any message_ - [x] File with syntax errors should exit with code 1 - [x] Language server - [x] Should show diagnostics for syntax errors - [x] Should not recommend a quick fix edit for adding `noqa` comment - [x] Same for `ruff-lsp` resolves: #8447
This commit is contained in:
parent
c98d8a040f
commit
e7b49694a7
52 changed files with 1235 additions and 380 deletions
|
@ -30,7 +30,6 @@ use crate::logging::DisplayParseError;
|
|||
use crate::message::Message;
|
||||
use crate::noqa::add_noqa;
|
||||
use crate::registry::{AsRule, Rule, RuleSet};
|
||||
use crate::rules::pycodestyle;
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
use crate::rules::ruff::rules::test_rules::{self, TestRule, TEST_RULES};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
@ -85,7 +84,6 @@ pub fn check_path(
|
|||
) -> LinterResult<Vec<Diagnostic>> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
let mut error = None;
|
||||
|
||||
let tokens = parsed.tokens();
|
||||
let comment_ranges = indexer.comment_ranges();
|
||||
|
@ -142,67 +140,53 @@ pub fn check_path(
|
|||
));
|
||||
}
|
||||
|
||||
// Run the AST-based rules.
|
||||
let use_ast = settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_ast());
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
// Run the AST-based rules only if there are no syntax errors.
|
||||
if parsed.is_valid() {
|
||||
let use_ast = settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_ast || use_imports || use_doc_lines {
|
||||
match parsed.as_result() {
|
||||
Ok(parsed) => {
|
||||
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
|
||||
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
|
||||
if use_ast {
|
||||
diagnostics.extend(check_ast(
|
||||
parsed,
|
||||
locator,
|
||||
stylist,
|
||||
indexer,
|
||||
&directives.noqa_line_for,
|
||||
settings,
|
||||
noqa,
|
||||
path,
|
||||
package,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
));
|
||||
}
|
||||
if use_imports {
|
||||
let import_diagnostics = check_imports(
|
||||
parsed,
|
||||
locator,
|
||||
indexer,
|
||||
&directives.isort,
|
||||
settings,
|
||||
stylist,
|
||||
package,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
);
|
||||
|
||||
diagnostics.extend(import_diagnostics);
|
||||
}
|
||||
if use_doc_lines {
|
||||
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
|
||||
}
|
||||
.any(|rule_code| rule_code.lint_source().is_ast());
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_ast || use_imports || use_doc_lines {
|
||||
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
|
||||
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
|
||||
if use_ast {
|
||||
diagnostics.extend(check_ast(
|
||||
parsed,
|
||||
locator,
|
||||
stylist,
|
||||
indexer,
|
||||
&directives.noqa_line_for,
|
||||
settings,
|
||||
noqa,
|
||||
path,
|
||||
package,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
));
|
||||
}
|
||||
Err(parse_errors) => {
|
||||
// Always add a diagnostic for the syntax error, regardless of whether
|
||||
// `Rule::SyntaxError` is enabled. We avoid propagating the syntax error
|
||||
// if it's disabled via any of the usual mechanisms (e.g., `noqa`,
|
||||
// `per-file-ignores`), and the easiest way to detect that suppression is
|
||||
// to see if the diagnostic persists to the end of the function.
|
||||
for parse_error in parse_errors {
|
||||
pycodestyle::rules::syntax_error(&mut diagnostics, parse_error, locator);
|
||||
}
|
||||
// TODO(dhruvmanila): Remove this clone
|
||||
error = parse_errors.iter().next().cloned();
|
||||
if use_imports {
|
||||
let import_diagnostics = check_imports(
|
||||
parsed,
|
||||
locator,
|
||||
indexer,
|
||||
&directives.isort,
|
||||
settings,
|
||||
stylist,
|
||||
package,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
);
|
||||
|
||||
diagnostics.extend(import_diagnostics);
|
||||
}
|
||||
if use_doc_lines {
|
||||
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -305,7 +289,7 @@ pub fn check_path(
|
|||
locator,
|
||||
comment_ranges,
|
||||
&directives.noqa_line_for,
|
||||
error.is_none(),
|
||||
parsed.is_valid(),
|
||||
&per_file_ignores,
|
||||
settings,
|
||||
);
|
||||
|
@ -316,23 +300,6 @@ pub fn check_path(
|
|||
}
|
||||
}
|
||||
|
||||
// If there was a syntax error, check if it should be discarded.
|
||||
if error.is_some() {
|
||||
// If the syntax error was removed by _any_ of the above disablement methods (e.g., a
|
||||
// `noqa` directive, or a `per-file-ignore`), discard it.
|
||||
if !diagnostics
|
||||
.iter()
|
||||
.any(|diagnostic| diagnostic.kind.rule() == Rule::SyntaxError)
|
||||
{
|
||||
error = None;
|
||||
}
|
||||
|
||||
// If the syntax error _diagnostic_ is disabled, discard the _diagnostic_.
|
||||
if !settings.rules.enabled(Rule::SyntaxError) {
|
||||
diagnostics.retain(|diagnostic| diagnostic.kind.rule() != Rule::SyntaxError);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove fixes for any rules marked as unfixable.
|
||||
for diagnostic in &mut diagnostics {
|
||||
if !settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
|
@ -352,7 +319,7 @@ pub fn check_path(
|
|||
}
|
||||
}
|
||||
|
||||
LinterResult::new(diagnostics, error)
|
||||
LinterResult::new(diagnostics, parsed.errors().iter().next().cloned())
|
||||
}
|
||||
|
||||
const MAX_ITERATIONS: usize = 100;
|
||||
|
@ -474,12 +441,15 @@ pub fn lint_only(
|
|||
&parsed,
|
||||
);
|
||||
|
||||
result.map(|diagnostics| diagnostics_to_messages(diagnostics, path, &locator, &directives))
|
||||
result.map(|diagnostics| {
|
||||
diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert from diagnostics to messages.
|
||||
fn diagnostics_to_messages(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
parse_errors: &[ParseError],
|
||||
path: &Path,
|
||||
locator: &Locator,
|
||||
directives: &Directives,
|
||||
|
@ -495,12 +465,13 @@ fn diagnostics_to_messages(
|
|||
builder.finish()
|
||||
});
|
||||
|
||||
diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
parse_errors
|
||||
.iter()
|
||||
.map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
|
||||
.chain(diagnostics.into_iter().map(|diagnostic| {
|
||||
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
||||
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset)
|
||||
})
|
||||
}))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -609,7 +580,7 @@ pub fn lint_fix<'a>(
|
|||
|
||||
return Ok(FixerResult {
|
||||
result: result.map(|diagnostics| {
|
||||
diagnostics_to_messages(diagnostics, path, &locator, &directives)
|
||||
diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
|
||||
}),
|
||||
transformed,
|
||||
fixed,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue