Remove E999 as a rule, disallow any disablement methods for syntax error (#11901)

## Summary

This PR updates the way syntax errors are handled throughout the linter.

The main change is that it's now not considered as a rule which involves
the following changes:
* Update `Message` to be an enum with two variants - one for diagnostic
message and the other for syntax error message
* Provide methods on the new message enum to query information required
by downstream usages

This means that the syntax errors cannot be hidden / disabled via any
disablement methods. These are:
1. Configuration via `select`, `ignore`, `per-file-ignores`, and their
`extend-*` variants
	```console
$ cargo run -- check ~/playground/ruff/src/lsp.py --extend-select=E999
--no-preview --no-cache
	    Finished `dev` profile [unoptimized + debuginfo] target(s) in 0.10s
Running `target/debug/ruff check /Users/dhruv/playground/ruff/src/lsp.py
--extend-select=E999 --no-preview --no-cache`
warning: Rule `E999` is deprecated and will be removed in a future
release. Syntax errors will always be shown regardless of whether this
rule is selected or not.
/Users/dhruv/playground/ruff/src/lsp.py:1:8: F401 [*] `abc` imported but
unused
	  |
	1 | import abc
	  |        ^^^ F401
	2 | from pathlib import Path
	3 | import os
	  |
	  = help: Remove unused import: `abc`
	```
3. Command-line flags via `--select`, `--ignore`, `--per-file-ignores`,
and their `--extend-*` variants
	```console
$ cargo run -- check ~/playground/ruff/src/lsp.py --no-cache
--config=~/playground/ruff/pyproject.toml
	    Finished `dev` profile [unoptimized + debuginfo] target(s) in 0.11s
Running `target/debug/ruff check /Users/dhruv/playground/ruff/src/lsp.py
--no-cache --config=/Users/dhruv/playground/ruff/pyproject.toml`
warning: Rule `E999` is deprecated and will be removed in a future
release. Syntax errors will always be shown regardless of whether this
rule is selected or not.
/Users/dhruv/playground/ruff/src/lsp.py:1:8: F401 [*] `abc` imported but
unused
	  |
	1 | import abc
	  |        ^^^ F401
	2 | from pathlib import Path
	3 | import os
	  |
	  = help: Remove unused import: `abc`
	```

This also means that the **output format** needs to be updated:
1. The `code`, `noqa_row`, `url` fields in the JSON output is optional
(`null` for syntax errors)
2. Other formats are changed accordingly
For each format, a new test case specific to syntax errors have been
added. Please refer to the snapshot output for the exact format for
syntax error message.

The output of the `--statistics` flag will have a blank entry for syntax
errors:
```
315     F821    [ ] undefined-name
119             [ ] syntax-error
103     F811    [ ] redefined-while-unused
```

The **language server** is updated to consider the syntax errors by
convert them into LSP diagnostic format separately.

### Preview

There are no quick fixes provided to disable syntax errors. This will
automatically work for `ruff-lsp` because the `noqa_row` field will be
`null` in that case.
<img width="772" alt="Screenshot 2024-06-26 at 14 57 08"
src="aaac827e-4777-4ac8-8c68-eaf9f2c36774">

Even with `noqa` comment, the syntax error is displayed:
<img width="763" alt="Screenshot 2024-06-26 at 14 59 51"
src="ba1afb68-7eaf-4b44-91af-6d93246475e2">

Rule documentation page:
<img width="1371" alt="Screenshot 2024-06-26 at 16 48 07"
src="524f01df-d91f-4ac0-86cc-40e76b318b24">


## Test Plan

- [x] Disablement methods via config shows a warning
	- [x] `select`, `extend-select`
	- [ ] ~`ignore`~ _doesn't show any message_
- [ ] ~`per-file-ignores`, `extend-per-file-ignores`~ _doesn't show any
message_
- [x] Disablement methods via command-line flag shows a warning
	- [x] `--select`, `--extend-select`
	- [ ] ~`--ignore`~ _doesn't show any message_
- [ ] ~`--per-file-ignores`, `--extend-per-file-ignores`~ _doesn't show
any message_
- [x] File with syntax errors should exit with code 1
- [x] Language server
	- [x] Should show diagnostics for syntax errors
	- [x] Should not recommend a quick fix edit for adding `noqa` comment
	- [x] Same for `ruff-lsp`

resolves: #8447
This commit is contained in:
Dhruv Manilawala 2024-06-27 07:51:32 +05:30 committed by Micha Reiser
parent c98d8a040f
commit e7b49694a7
52 changed files with 1235 additions and 380 deletions

View file

@ -19,7 +19,7 @@ use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher}; use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_diagnostics::{DiagnosticKind, Fix}; use ruff_diagnostics::{DiagnosticKind, Fix};
use ruff_linter::message::Message; use ruff_linter::message::{DiagnosticMessage, Message};
use ruff_linter::{warn_user, VERSION}; use ruff_linter::{warn_user, VERSION};
use ruff_macros::CacheKey; use ruff_macros::CacheKey;
use ruff_notebook::NotebookIndex; use ruff_notebook::NotebookIndex;
@ -333,12 +333,14 @@ impl FileCache {
let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish(); let file = SourceFileBuilder::new(path.to_string_lossy(), &*lint.source).finish();
lint.messages lint.messages
.iter() .iter()
.map(|msg| Message { .map(|msg| {
kind: msg.kind.clone(), Message::Diagnostic(DiagnosticMessage {
range: msg.range, kind: msg.kind.clone(),
fix: msg.fix.clone(), range: msg.range,
file: file.clone(), fix: msg.fix.clone(),
noqa_offset: msg.noqa_offset, file: file.clone(),
noqa_offset: msg.noqa_offset,
})
}) })
.collect() .collect()
}; };
@ -412,18 +414,19 @@ impl LintCacheData {
notebook_index: Option<NotebookIndex>, notebook_index: Option<NotebookIndex>,
) -> Self { ) -> Self {
let source = if let Some(msg) = messages.first() { let source = if let Some(msg) = messages.first() {
msg.file.source_text().to_owned() msg.source_file().source_text().to_owned()
} else { } else {
String::new() // No messages, no need to keep the source! String::new() // No messages, no need to keep the source!
}; };
let messages = messages let messages = messages
.iter() .iter()
.filter_map(|message| message.as_diagnostic_message())
.map(|msg| { .map(|msg| {
// Make sure that all message use the same source file. // Make sure that all message use the same source file.
assert_eq!( assert_eq!(
msg.file, &msg.file,
messages.first().unwrap().file, messages.first().unwrap().source_file(),
"message uses a different source file" "message uses a different source file"
); );
CacheMessage { CacheMessage {
@ -571,6 +574,7 @@ mod tests {
use test_case::test_case; use test_case::test_case;
use ruff_cache::CACHE_DIR_NAME; use ruff_cache::CACHE_DIR_NAME;
use ruff_linter::message::Message;
use ruff_linter::settings::flags; use ruff_linter::settings::flags;
use ruff_linter::settings::types::UnsafeFixes; use ruff_linter::settings::types::UnsafeFixes;
use ruff_python_ast::PySourceType; use ruff_python_ast::PySourceType;
@ -633,11 +637,7 @@ mod tests {
UnsafeFixes::Enabled, UnsafeFixes::Enabled,
) )
.unwrap(); .unwrap();
if diagnostics if diagnostics.messages.iter().any(Message::is_syntax_error) {
.messages
.iter()
.any(|m| m.kind.name == "SyntaxError")
{
parse_errors.push(path.clone()); parse_errors.push(path.clone());
} }
paths.push(path); paths.push(path);

View file

@ -10,18 +10,18 @@ use std::path::Path;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use colored::Colorize; use colored::Colorize;
use log::{debug, error, warn}; use log::{debug, error, warn};
use ruff_linter::codes::Rule;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic; use ruff_diagnostics::Diagnostic;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource}; use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::logging::DisplayParseError; use ruff_linter::logging::DisplayParseError;
use ruff_linter::message::Message; use ruff_linter::message::{Message, SyntaxErrorMessage};
use ruff_linter::pyproject_toml::lint_pyproject_toml; use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::types::UnsafeFixes; use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{flags, LinterSettings}; use ruff_linter::settings::{flags, LinterSettings};
use ruff_linter::source_kind::{SourceError, SourceKind}; use ruff_linter::source_kind::{SourceError, SourceKind};
use ruff_linter::{fs, IOError, SyntaxError}; use ruff_linter::{fs, IOError};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex}; use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType}; use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder; use ruff_source_file::SourceFileBuilder;
@ -55,57 +55,61 @@ impl Diagnostics {
path: Option<&Path>, path: Option<&Path>,
settings: &LinterSettings, settings: &LinterSettings,
) -> Self { ) -> Self {
let diagnostic = match err { match err {
// IO errors. // IO errors.
SourceError::Io(_) SourceError::Io(_)
| SourceError::Notebook(NotebookError::Io(_) | NotebookError::Json(_)) => { | SourceError::Notebook(NotebookError::Io(_) | NotebookError::Json(_)) => {
Diagnostic::new( if settings.rules.enabled(Rule::IOError) {
IOError { let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
message: err.to_string(), let source_file = SourceFileBuilder::new(name, "").finish();
}, Self::new(
TextRange::default(), vec![Message::from_diagnostic(
) Diagnostic::new(
IOError {
message: err.to_string(),
},
TextRange::default(),
),
source_file,
TextSize::default(),
)],
FxHashMap::default(),
)
} else {
match path {
Some(path) => {
warn!(
"{}{}{} {err}",
"Failed to lint ".bold(),
fs::relativize_path(path).bold(),
":".bold()
);
}
None => {
warn!("{}{} {err}", "Failed to lint".bold(), ":".bold());
}
}
Self::default()
}
} }
// Syntax errors. // Syntax errors.
SourceError::Notebook( SourceError::Notebook(
NotebookError::InvalidJson(_) NotebookError::InvalidJson(_)
| NotebookError::InvalidSchema(_) | NotebookError::InvalidSchema(_)
| NotebookError::InvalidFormat(_), | NotebookError::InvalidFormat(_),
) => Diagnostic::new( ) => {
SyntaxError { let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
message: err.to_string(), let dummy = SourceFileBuilder::new(name, "").finish();
}, Self::new(
TextRange::default(), vec![Message::SyntaxError(SyntaxErrorMessage {
), message: err.to_string(),
}; range: TextRange::default(),
file: dummy,
if settings.rules.enabled(diagnostic.kind.rule()) { })],
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy); FxHashMap::default(),
let dummy = SourceFileBuilder::new(name, "").finish(); )
Self::new(
vec![Message::from_diagnostic(
diagnostic,
dummy,
TextSize::default(),
)],
FxHashMap::default(),
)
} else {
match path {
Some(path) => {
warn!(
"{}{}{} {err}",
"Failed to lint ".bold(),
fs::relativize_path(path).bold(),
":".bold()
);
}
None => {
warn!("{}{} {err}", "Failed to lint".bold(), ":".bold());
}
} }
Self::default()
} }
} }
} }

View file

@ -13,11 +13,11 @@ use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel; use ruff_linter::logging::LogLevel;
use ruff_linter::message::{ use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter, JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, MessageKind, PylintEmitter,
TextEmitter, RdjsonEmitter, SarifEmitter, TextEmitter,
}; };
use ruff_linter::notify_user; use ruff_linter::notify_user;
use ruff_linter::registry::{AsRule, Rule}; use ruff_linter::registry::Rule;
use ruff_linter::settings::flags::{self}; use ruff_linter::settings::flags::{self};
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes}; use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
@ -37,12 +37,13 @@ bitflags! {
#[derive(Serialize)] #[derive(Serialize)]
struct ExpandedStatistics { struct ExpandedStatistics {
code: SerializeRuleAsCode, code: Option<SerializeRuleAsCode>,
name: SerializeRuleAsTitle, name: SerializeMessageKindAsTitle,
count: usize, count: usize,
fixable: bool, fixable: bool,
} }
#[derive(Copy, Clone)]
struct SerializeRuleAsCode(Rule); struct SerializeRuleAsCode(Rule);
impl Serialize for SerializeRuleAsCode { impl Serialize for SerializeRuleAsCode {
@ -66,26 +67,26 @@ impl From<Rule> for SerializeRuleAsCode {
} }
} }
struct SerializeRuleAsTitle(Rule); struct SerializeMessageKindAsTitle(MessageKind);
impl Serialize for SerializeRuleAsTitle { impl Serialize for SerializeMessageKindAsTitle {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where where
S: serde::Serializer, S: serde::Serializer,
{ {
serializer.serialize_str(self.0.as_ref()) serializer.serialize_str(self.0.as_str())
} }
} }
impl Display for SerializeRuleAsTitle { impl Display for SerializeMessageKindAsTitle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.as_ref()) f.write_str(self.0.as_str())
} }
} }
impl From<Rule> for SerializeRuleAsTitle { impl From<MessageKind> for SerializeMessageKindAsTitle {
fn from(rule: Rule) -> Self { fn from(kind: MessageKind) -> Self {
Self(rule) Self(kind)
} }
} }
@ -341,24 +342,23 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics let statistics: Vec<ExpandedStatistics> = diagnostics
.messages .messages
.iter() .iter()
.map(|message| (message.kind.rule(), message.fix.is_some())) .sorted_by_key(|message| (message.rule(), message.fixable()))
.sorted() .fold(vec![], |mut acc: Vec<(&Message, usize)>, message| {
.fold(vec![], |mut acc, (rule, fixable)| { if let Some((prev_message, count)) = acc.last_mut() {
if let Some((prev_rule, _, count)) = acc.last_mut() { if prev_message.rule() == message.rule() {
if *prev_rule == rule {
*count += 1; *count += 1;
return acc; return acc;
} }
} }
acc.push((rule, fixable, 1)); acc.push((message, 1));
acc acc
}) })
.iter() .iter()
.map(|(rule, fixable, count)| ExpandedStatistics { .map(|&(message, count)| ExpandedStatistics {
code: (*rule).into(), code: message.rule().map(std::convert::Into::into),
name: (*rule).into(), name: message.kind().into(),
count: *count, count,
fixable: *fixable, fixable: message.fixable(),
}) })
.sorted_by_key(|statistic| Reverse(statistic.count)) .sorted_by_key(|statistic| Reverse(statistic.count))
.collect(); .collect();
@ -381,7 +381,12 @@ impl Printer {
); );
let code_width = statistics let code_width = statistics
.iter() .iter()
.map(|statistic| statistic.code.to_string().len()) .map(|statistic| {
statistic
.code
.map_or_else(String::new, |rule| rule.to_string())
.len()
})
.max() .max()
.unwrap(); .unwrap();
let any_fixable = statistics.iter().any(|statistic| statistic.fixable); let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
@ -395,7 +400,11 @@ impl Printer {
writer, writer,
"{:>count_width$}\t{:<code_width$}\t{}{}", "{:>count_width$}\t{:<code_width$}\t{}{}",
statistic.count.to_string().bold(), statistic.count.to_string().bold(),
statistic.code.to_string().red().bold(), statistic
.code
.map_or_else(String::new, |rule| rule.to_string())
.red()
.bold(),
if any_fixable { if any_fixable {
if statistic.fixable { if statistic.fixable {
&fixable &fixable
@ -545,7 +554,7 @@ impl FixableStatistics {
let mut unapplicable_unsafe = 0; let mut unapplicable_unsafe = 0;
for message in &diagnostics.messages { for message in &diagnostics.messages {
if let Some(fix) = &message.fix { if let Some(fix) = message.fix() {
if fix.applies(unsafe_fixes.required_applicability()) { if fix.applies(unsafe_fixes.required_applicability()) {
applicable += 1; applicable += 1;
} else { } else {

View file

@ -798,10 +798,10 @@ fn stdin_parse_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
-:1:16: E999 SyntaxError: Expected one or more symbol names after import -:1:16: SyntaxError: Expected one or more symbol names after import
| |
1 | from foo import 1 | from foo import
| ^ E999 | ^
| |
Found 1 error. Found 1 error.
@ -819,18 +819,18 @@ fn stdin_multiple_parse_error() {
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
-:1:16: E999 SyntaxError: Expected one or more symbol names after import -:1:16: SyntaxError: Expected one or more symbol names after import
| |
1 | from foo import 1 | from foo import
| ^ E999 | ^
2 | bar = 2 | bar =
| |
-:2:6: E999 SyntaxError: Expected an expression -:2:6: SyntaxError: Expected an expression
| |
1 | from foo import 1 | from foo import
2 | bar = 2 | bar =
| ^ E999 | ^
| |
Found 2 errors. Found 2 errors.
@ -840,6 +840,50 @@ fn stdin_multiple_parse_error() {
"###); "###);
} }
#[test]
fn parse_error_not_included() {
// Select any rule except for `E999`, syntax error should still be shown.
let mut cmd = RuffCheck::default().args(["--select=I"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("foo =\n"), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:6: SyntaxError: Expected an expression
|
1 | foo =
| ^
|
Found 1 error.
----- stderr -----
error: Failed to parse at 1:6: Expected an expression
"###);
}
#[test]
fn deprecated_parse_error_selection() {
let mut cmd = RuffCheck::default().args(["--select=E999"]).build();
assert_cmd_snapshot!(cmd
.pass_stdin("foo =\n"), @r###"
success: false
exit_code: 1
----- stdout -----
-:1:6: SyntaxError: Expected an expression
|
1 | foo =
| ^
|
Found 1 error.
----- stderr -----
warning: Rule `E999` is deprecated and will be removed in a future release. Syntax errors will always be shown regardless of whether this rule is selected or not.
error: Failed to parse at 1:6: Expected an expression
"###);
}
#[test] #[test]
fn full_output_preview() { fn full_output_preview() {
let mut cmd = RuffCheck::default().args(["--preview"]).build(); let mut cmd = RuffCheck::default().args(["--preview"]).build();

View file

@ -510,7 +510,7 @@ image[:,]
image[:,:,] image[:,:,]
lambda x, : lambda x, : x
# ==> unpack.py <== # ==> unpack.py <==
def function( def function(

View file

@ -1,4 +0,0 @@
def x():

View file

@ -125,7 +125,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Pycodestyle, "E742") => (RuleGroup::Stable, rules::pycodestyle::rules::AmbiguousClassName), (Pycodestyle, "E742") => (RuleGroup::Stable, rules::pycodestyle::rules::AmbiguousClassName),
(Pycodestyle, "E743") => (RuleGroup::Stable, rules::pycodestyle::rules::AmbiguousFunctionName), (Pycodestyle, "E743") => (RuleGroup::Stable, rules::pycodestyle::rules::AmbiguousFunctionName),
(Pycodestyle, "E902") => (RuleGroup::Stable, rules::pycodestyle::rules::IOError), (Pycodestyle, "E902") => (RuleGroup::Stable, rules::pycodestyle::rules::IOError),
(Pycodestyle, "E999") => (RuleGroup::Stable, rules::pycodestyle::rules::SyntaxError), (Pycodestyle, "E999") => (RuleGroup::Deprecated, rules::pycodestyle::rules::SyntaxError),
// pycodestyle warnings // pycodestyle warnings
(Pycodestyle, "W191") => (RuleGroup::Stable, rules::pycodestyle::rules::TabIndentation), (Pycodestyle, "W191") => (RuleGroup::Stable, rules::pycodestyle::rules::TabIndentation),

View file

@ -11,7 +11,7 @@ pub use registry::clap_completion::RuleParser;
#[cfg(feature = "clap")] #[cfg(feature = "clap")]
pub use rule_selector::clap_completion::RuleSelectorParser; pub use rule_selector::clap_completion::RuleSelectorParser;
pub use rule_selector::RuleSelector; pub use rule_selector::RuleSelector;
pub use rules::pycodestyle::rules::{IOError, SyntaxError}; pub use rules::pycodestyle::rules::IOError;
pub const VERSION: &str = env!("CARGO_PKG_VERSION"); pub const VERSION: &str = env!("CARGO_PKG_VERSION");

View file

@ -30,7 +30,6 @@ use crate::logging::DisplayParseError;
use crate::message::Message; use crate::message::Message;
use crate::noqa::add_noqa; use crate::noqa::add_noqa;
use crate::registry::{AsRule, Rule, RuleSet}; use crate::registry::{AsRule, Rule, RuleSet};
use crate::rules::pycodestyle;
#[cfg(any(feature = "test-rules", test))] #[cfg(any(feature = "test-rules", test))]
use crate::rules::ruff::rules::test_rules::{self, TestRule, TEST_RULES}; use crate::rules::ruff::rules::test_rules::{self, TestRule, TEST_RULES};
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
@ -85,7 +84,6 @@ pub fn check_path(
) -> LinterResult<Vec<Diagnostic>> { ) -> LinterResult<Vec<Diagnostic>> {
// Aggregate all diagnostics. // Aggregate all diagnostics.
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let mut error = None;
let tokens = parsed.tokens(); let tokens = parsed.tokens();
let comment_ranges = indexer.comment_ranges(); let comment_ranges = indexer.comment_ranges();
@ -142,67 +140,53 @@ pub fn check_path(
)); ));
} }
// Run the AST-based rules. // Run the AST-based rules only if there are no syntax errors.
let use_ast = settings if parsed.is_valid() {
.rules let use_ast = settings
.iter_enabled()
.any(|rule_code| rule_code.lint_source().is_ast());
let use_imports = !directives.isort.skip_file
&& settings
.rules .rules
.iter_enabled() .iter_enabled()
.any(|rule_code| rule_code.lint_source().is_imports()); .any(|rule_code| rule_code.lint_source().is_ast());
if use_ast || use_imports || use_doc_lines { let use_imports = !directives.isort.skip_file
match parsed.as_result() { && settings
Ok(parsed) => { .rules
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets); .iter_enabled()
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index); .any(|rule_code| rule_code.lint_source().is_imports());
if use_ast { if use_ast || use_imports || use_doc_lines {
diagnostics.extend(check_ast( let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
parsed, let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
locator, if use_ast {
stylist, diagnostics.extend(check_ast(
indexer, parsed,
&directives.noqa_line_for, locator,
settings, stylist,
noqa, indexer,
path, &directives.noqa_line_for,
package, settings,
source_type, noqa,
cell_offsets, path,
notebook_index, package,
)); source_type,
} cell_offsets,
if use_imports { notebook_index,
let import_diagnostics = check_imports( ));
parsed,
locator,
indexer,
&directives.isort,
settings,
stylist,
package,
source_type,
cell_offsets,
);
diagnostics.extend(import_diagnostics);
}
if use_doc_lines {
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
}
} }
Err(parse_errors) => { if use_imports {
// Always add a diagnostic for the syntax error, regardless of whether let import_diagnostics = check_imports(
// `Rule::SyntaxError` is enabled. We avoid propagating the syntax error parsed,
// if it's disabled via any of the usual mechanisms (e.g., `noqa`, locator,
// `per-file-ignores`), and the easiest way to detect that suppression is indexer,
// to see if the diagnostic persists to the end of the function. &directives.isort,
for parse_error in parse_errors { settings,
pycodestyle::rules::syntax_error(&mut diagnostics, parse_error, locator); stylist,
} package,
// TODO(dhruvmanila): Remove this clone source_type,
error = parse_errors.iter().next().cloned(); cell_offsets,
);
diagnostics.extend(import_diagnostics);
}
if use_doc_lines {
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
} }
} }
} }
@ -305,7 +289,7 @@ pub fn check_path(
locator, locator,
comment_ranges, comment_ranges,
&directives.noqa_line_for, &directives.noqa_line_for,
error.is_none(), parsed.is_valid(),
&per_file_ignores, &per_file_ignores,
settings, settings,
); );
@ -316,23 +300,6 @@ pub fn check_path(
} }
} }
// If there was a syntax error, check if it should be discarded.
if error.is_some() {
// If the syntax error was removed by _any_ of the above disablement methods (e.g., a
// `noqa` directive, or a `per-file-ignore`), discard it.
if !diagnostics
.iter()
.any(|diagnostic| diagnostic.kind.rule() == Rule::SyntaxError)
{
error = None;
}
// If the syntax error _diagnostic_ is disabled, discard the _diagnostic_.
if !settings.rules.enabled(Rule::SyntaxError) {
diagnostics.retain(|diagnostic| diagnostic.kind.rule() != Rule::SyntaxError);
}
}
// Remove fixes for any rules marked as unfixable. // Remove fixes for any rules marked as unfixable.
for diagnostic in &mut diagnostics { for diagnostic in &mut diagnostics {
if !settings.rules.should_fix(diagnostic.kind.rule()) { if !settings.rules.should_fix(diagnostic.kind.rule()) {
@ -352,7 +319,7 @@ pub fn check_path(
} }
} }
LinterResult::new(diagnostics, error) LinterResult::new(diagnostics, parsed.errors().iter().next().cloned())
} }
const MAX_ITERATIONS: usize = 100; const MAX_ITERATIONS: usize = 100;
@ -474,12 +441,15 @@ pub fn lint_only(
&parsed, &parsed,
); );
result.map(|diagnostics| diagnostics_to_messages(diagnostics, path, &locator, &directives)) result.map(|diagnostics| {
diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
})
} }
/// Convert from diagnostics to messages. /// Convert from diagnostics to messages.
fn diagnostics_to_messages( fn diagnostics_to_messages(
diagnostics: Vec<Diagnostic>, diagnostics: Vec<Diagnostic>,
parse_errors: &[ParseError],
path: &Path, path: &Path,
locator: &Locator, locator: &Locator,
directives: &Directives, directives: &Directives,
@ -495,12 +465,13 @@ fn diagnostics_to_messages(
builder.finish() builder.finish()
}); });
diagnostics parse_errors
.into_iter() .iter()
.map(|diagnostic| { .map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
.chain(diagnostics.into_iter().map(|diagnostic| {
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start()); let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset) Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset)
}) }))
.collect() .collect()
} }
@ -609,7 +580,7 @@ pub fn lint_fix<'a>(
return Ok(FixerResult { return Ok(FixerResult {
result: result.map(|diagnostics| { result: result.map(|diagnostics| {
diagnostics_to_messages(diagnostics, path, &locator, &directives) diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
}), }),
transformed, transformed,
fixed, fixed,

View file

@ -3,7 +3,6 @@ use std::io::Write;
use ruff_source_file::SourceLocation; use ruff_source_file::SourceLocation;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
/// Generate error logging commands for Azure Pipelines format. /// Generate error logging commands for Azure Pipelines format.
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning) /// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
@ -29,12 +28,14 @@ impl Emitter for AzureEmitter {
writeln!( writeln!(
writer, writer,
"##vso[task.logissue type=error\ "##vso[task.logissue type=error\
;sourcepath={filename};linenumber={line};columnnumber={col};code={code};]{body}", ;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
filename = message.filename(), filename = message.filename(),
line = location.row, line = location.row,
col = location.column, col = location.column,
code = message.kind.rule().noqa_code(), code = message
body = message.kind.body, .rule()
.map_or_else(String::new, |rule| format!("code={};", rule.noqa_code())),
body = message.body(),
)?; )?;
} }
@ -46,7 +47,9 @@ impl Emitter for AzureEmitter {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::AzureEmitter; use crate::message::AzureEmitter;
#[test] #[test]
@ -56,4 +59,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = AzureEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -27,8 +27,8 @@ pub(super) struct Diff<'a> {
impl<'a> Diff<'a> { impl<'a> Diff<'a> {
pub(crate) fn from_message(message: &'a Message) -> Option<Diff> { pub(crate) fn from_message(message: &'a Message) -> Option<Diff> {
message.fix.as_ref().map(|fix| Diff { message.fix().map(|fix| Diff {
source_code: &message.file, source_code: message.source_file(),
fix, fix,
}) })
} }

View file

@ -4,7 +4,6 @@ use ruff_source_file::SourceLocation;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
/// Generate error workflow command in GitHub Actions format. /// Generate error workflow command in GitHub Actions format.
/// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message) /// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message)
@ -32,9 +31,8 @@ impl Emitter for GithubEmitter {
write!( write!(
writer, writer,
"::error title=Ruff \ "::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
({code}),file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::", code = message.rule().map_or_else(String::new, |rule| format!(" ({})", rule.noqa_code())),
code = message.kind.rule().noqa_code(),
file = message.filename(), file = message.filename(),
row = source_location.row, row = source_location.row,
column = source_location.column, column = source_location.column,
@ -42,15 +40,19 @@ impl Emitter for GithubEmitter {
end_column = end_location.column, end_column = end_location.column,
)?; )?;
writeln!( write!(
writer, writer,
"{path}:{row}:{column}: {code} {body}", "{path}:{row}:{column}:",
path = relativize_path(message.filename()), path = relativize_path(message.filename()),
row = location.row, row = location.row,
column = location.column, column = location.column,
code = message.kind.rule().noqa_code(),
body = message.kind.body,
)?; )?;
if let Some(rule) = message.rule() {
write!(writer, " {}", rule.noqa_code())?;
}
writeln!(writer, " {}", message.body())?;
} }
Ok(()) Ok(())
@ -61,7 +63,9 @@ impl Emitter for GithubEmitter {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::GithubEmitter; use crate::message::GithubEmitter;
#[test] #[test]
@ -71,4 +75,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = GithubEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -9,7 +9,6 @@ use serde_json::json;
use crate::fs::{relativize_path, relativize_path_to}; use crate::fs::{relativize_path, relativize_path_to};
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
/// Generate JSON with violations in GitLab CI format /// Generate JSON with violations in GitLab CI format
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool // https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
@ -91,8 +90,14 @@ impl Serialize for SerializedMessages<'_> {
} }
fingerprints.insert(message_fingerprint); fingerprints.insert(message_fingerprint);
let description = if let Some(rule) = message.rule() {
format!("({}) {}", rule.noqa_code(), message.body())
} else {
message.body().to_string()
};
let value = json!({ let value = json!({
"description": format!("({}) {}", message.kind.rule().noqa_code(), message.kind.body), "description": description,
"severity": "major", "severity": "major",
"fingerprint": format!("{:x}", message_fingerprint), "fingerprint": format!("{:x}", message_fingerprint),
"location": { "location": {
@ -110,18 +115,10 @@ impl Serialize for SerializedMessages<'_> {
/// Generate a unique fingerprint to identify a violation. /// Generate a unique fingerprint to identify a violation.
fn fingerprint(message: &Message, project_path: &str, salt: u64) -> u64 { fn fingerprint(message: &Message, project_path: &str, salt: u64) -> u64 {
let Message {
kind,
range: _,
fix: _fix,
file: _,
noqa_offset: _,
} = message;
let mut hasher = DefaultHasher::new(); let mut hasher = DefaultHasher::new();
salt.hash(&mut hasher); salt.hash(&mut hasher);
kind.name.hash(&mut hasher); message.name().hash(&mut hasher);
project_path.hash(&mut hasher); project_path.hash(&mut hasher);
hasher.finish() hasher.finish()
@ -131,7 +128,9 @@ fn fingerprint(message: &Message, project_path: &str, salt: u64) -> u64 {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::GitlabEmitter; use crate::message::GitlabEmitter;
#[test] #[test]
@ -142,6 +141,14 @@ mod tests {
assert_snapshot!(redact_fingerprint(&content)); assert_snapshot!(redact_fingerprint(&content));
} }
#[test]
fn syntax_errors() {
let mut emitter = GitlabEmitter::default();
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(redact_fingerprint(&content));
}
// Redact the fingerprint because the default hasher isn't stable across platforms. // Redact the fingerprint because the default hasher isn't stable across platforms.
fn redact_fingerprint(content: &str) -> String { fn redact_fingerprint(content: &str) -> String {
static FINGERPRINT_HAY_KEY: &str = r#""fingerprint": ""#; static FINGERPRINT_HAY_KEY: &str = r#""fingerprint": ""#;

View file

@ -205,7 +205,9 @@ impl std::fmt::Write for PadAdapter<'_> {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::GroupedEmitter; use crate::message::GroupedEmitter;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
@ -217,6 +219,14 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = GroupedEmitter::default();
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
#[test] #[test]
fn show_source() { fn show_source() {
let mut emitter = GroupedEmitter::default().with_show_source(true); let mut emitter = GroupedEmitter::default().with_show_source(true);

View file

@ -10,7 +10,6 @@ use ruff_source_file::{OneIndexed, SourceCode, SourceLocation};
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
#[derive(Default)] #[derive(Default)]
pub struct JsonEmitter; pub struct JsonEmitter;
@ -50,20 +49,22 @@ impl Serialize for ExpandedMessages<'_> {
} }
pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext) -> Value { pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext) -> Value {
let source_code = message.file.to_source_code(); let source_code = message.source_file().to_source_code();
let notebook_index = context.notebook_index(message.filename()); let notebook_index = context.notebook_index(message.filename());
let fix = message.fix.as_ref().map(|fix| { let fix = message.fix().map(|fix| {
json!({ json!({
"applicability": fix.applicability(), "applicability": fix.applicability(),
"message": message.kind.suggestion.as_deref(), "message": message.suggestion(),
"edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index }, "edits": &ExpandedEdits { edits: fix.edits(), source_code: &source_code, notebook_index },
}) })
}); });
let mut start_location = source_code.source_location(message.start()); let mut start_location = source_code.source_location(message.start());
let mut end_location = source_code.source_location(message.end()); let mut end_location = source_code.source_location(message.end());
let mut noqa_location = source_code.source_location(message.noqa_offset); let mut noqa_location = message
.noqa_offset()
.map(|offset| source_code.source_location(offset));
let mut notebook_cell_index = None; let mut notebook_cell_index = None;
if let Some(notebook_index) = notebook_index { if let Some(notebook_index) = notebook_index {
@ -74,19 +75,19 @@ pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext)
); );
start_location = notebook_index.translate_location(&start_location); start_location = notebook_index.translate_location(&start_location);
end_location = notebook_index.translate_location(&end_location); end_location = notebook_index.translate_location(&end_location);
noqa_location = notebook_index.translate_location(&noqa_location); noqa_location = noqa_location.map(|location| notebook_index.translate_location(&location));
} }
json!({ json!({
"code": message.kind.rule().noqa_code().to_string(), "code": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.kind.rule().url(), "url": message.rule().and_then(|rule| rule.url()),
"message": message.kind.body, "message": message.body(),
"fix": fix, "fix": fix,
"cell": notebook_cell_index, "cell": notebook_cell_index,
"location": start_location, "location": start_location,
"end_location": end_location, "end_location": end_location,
"filename": message.filename(), "filename": message.filename(),
"noqa_row": noqa_location.row "noqa_row": noqa_location.map(|location| location.row)
}) })
} }
@ -170,7 +171,7 @@ mod tests {
use crate::message::tests::{ use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_messages, capture_emitter_notebook_output, capture_emitter_output, create_messages,
create_notebook_messages, create_notebook_messages, create_syntax_error_messages,
}; };
use crate::message::JsonEmitter; use crate::message::JsonEmitter;
@ -182,6 +183,14 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = JsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
#[test] #[test]
fn notebook_output() { fn notebook_output() {
let mut emitter = JsonEmitter; let mut emitter = JsonEmitter;

View file

@ -29,7 +29,7 @@ mod tests {
use crate::message::json_lines::JsonLinesEmitter; use crate::message::json_lines::JsonLinesEmitter;
use crate::message::tests::{ use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_messages, capture_emitter_notebook_output, capture_emitter_output, create_messages,
create_notebook_messages, create_notebook_messages, create_syntax_error_messages,
}; };
#[test] #[test]
@ -40,6 +40,14 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = JsonLinesEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
#[test] #[test]
fn notebook_output() { fn notebook_output() {
let mut emitter = JsonLinesEmitter; let mut emitter = JsonLinesEmitter;

View file

@ -8,7 +8,6 @@ use ruff_source_file::SourceLocation;
use crate::message::{ use crate::message::{
group_messages_by_filename, Emitter, EmitterContext, Message, MessageWithLocation, group_messages_by_filename, Emitter, EmitterContext, Message, MessageWithLocation,
}; };
use crate::registry::AsRule;
#[derive(Default)] #[derive(Default)]
pub struct JunitEmitter; pub struct JunitEmitter;
@ -44,7 +43,7 @@ impl Emitter for JunitEmitter {
start_location, start_location,
} = message; } = message;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure); let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(message.kind.body.clone()); status.set_message(message.body());
let location = if context.is_notebook(message.filename()) { let location = if context.is_notebook(message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
@ -57,10 +56,14 @@ impl Emitter for JunitEmitter {
"line {row}, col {col}, {body}", "line {row}, col {col}, {body}",
row = location.row, row = location.row,
col = location.column, col = location.column,
body = message.kind.body body = message.body()
)); ));
let mut case = TestCase::new( let mut case = TestCase::new(
format!("org.ruff.{}", message.kind.rule().noqa_code()), if let Some(rule) = message.rule() {
format!("org.ruff.{}", rule.noqa_code())
} else {
"org.ruff".to_string()
},
status, status,
); );
let file_path = Path::new(filename); let file_path = Path::new(filename);
@ -88,7 +91,9 @@ impl Emitter for JunitEmitter {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::JunitEmitter; use crate::message::JunitEmitter;
#[test] #[test]
@ -98,4 +103,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = JunitEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -14,13 +14,18 @@ pub use json_lines::JsonLinesEmitter;
pub use junit::JunitEmitter; pub use junit::JunitEmitter;
pub use pylint::PylintEmitter; pub use pylint::PylintEmitter;
pub use rdjson::RdjsonEmitter; pub use rdjson::RdjsonEmitter;
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
use ruff_notebook::NotebookIndex;
use ruff_source_file::{SourceFile, SourceLocation};
use ruff_text_size::{Ranged, TextRange, TextSize};
pub use sarif::SarifEmitter; pub use sarif::SarifEmitter;
pub use text::TextEmitter; pub use text::TextEmitter;
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
use ruff_notebook::NotebookIndex;
use ruff_python_parser::ParseError;
use ruff_source_file::{Locator, SourceFile, SourceLocation};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::logging::DisplayParseErrorType;
use crate::registry::{AsRule, Rule};
mod azure; mod azure;
mod diff; mod diff;
mod github; mod github;
@ -34,8 +39,17 @@ mod rdjson;
mod sarif; mod sarif;
mod text; mod text;
/// Message represents either a diagnostic message corresponding to a rule violation or a syntax
/// error message raised by the parser.
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct Message { pub enum Message {
Diagnostic(DiagnosticMessage),
SyntaxError(SyntaxErrorMessage),
}
/// A diagnostic message corresponding to a rule violation.
#[derive(Debug, PartialEq, Eq)]
pub struct DiagnosticMessage {
pub kind: DiagnosticKind, pub kind: DiagnosticKind,
pub range: TextRange, pub range: TextRange,
pub fix: Option<Fix>, pub fix: Option<Fix>,
@ -43,37 +57,174 @@ pub struct Message {
pub noqa_offset: TextSize, pub noqa_offset: TextSize,
} }
/// A syntax error message raised by the parser.
#[derive(Debug, PartialEq, Eq)]
pub struct SyntaxErrorMessage {
pub message: String,
pub range: TextRange,
pub file: SourceFile,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum MessageKind {
Diagnostic(Rule),
SyntaxError,
}
impl MessageKind {
pub fn as_str(&self) -> &str {
match self {
MessageKind::Diagnostic(rule) => rule.as_ref(),
MessageKind::SyntaxError => "syntax-error",
}
}
}
impl Message { impl Message {
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
pub fn from_diagnostic( pub fn from_diagnostic(
diagnostic: Diagnostic, diagnostic: Diagnostic,
file: SourceFile, file: SourceFile,
noqa_offset: TextSize, noqa_offset: TextSize,
) -> Self { ) -> Message {
Self { Message::Diagnostic(DiagnosticMessage {
range: diagnostic.range(), range: diagnostic.range(),
kind: diagnostic.kind, kind: diagnostic.kind,
fix: diagnostic.fix, fix: diagnostic.fix,
file, file,
noqa_offset, noqa_offset,
})
}
/// Create a [`Message`] from the given [`ParseError`].
pub fn from_parse_error(
parse_error: &ParseError,
locator: &Locator,
file: SourceFile,
) -> Message {
// Try to create a non-empty range so that the diagnostic can print a caret at the right
// position. This requires that we retrieve the next character, if any, and take its length
// to maintain char-boundaries.
let len = locator
.after(parse_error.location.start())
.chars()
.next()
.map_or(TextSize::new(0), TextLen::text_len);
Message::SyntaxError(SyntaxErrorMessage {
message: format!(
"SyntaxError: {}",
DisplayParseErrorType::new(&parse_error.error)
),
range: TextRange::at(parse_error.location.start(), len),
file,
})
}
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
match self {
Message::Diagnostic(m) => Some(m),
Message::SyntaxError(_) => None,
} }
} }
/// Returns `true` if `self` is a syntax error message.
pub const fn is_syntax_error(&self) -> bool {
matches!(self, Message::SyntaxError(_))
}
/// Returns a message kind.
pub fn kind(&self) -> MessageKind {
match self {
Message::Diagnostic(m) => MessageKind::Diagnostic(m.kind.rule()),
Message::SyntaxError(_) => MessageKind::SyntaxError,
}
}
/// Returns the name used to represent the diagnostic.
pub fn name(&self) -> &str {
match self {
Message::Diagnostic(m) => &m.kind.name,
Message::SyntaxError(_) => "SyntaxError",
}
}
/// Returns the message body to display to the user.
pub fn body(&self) -> &str {
match self {
Message::Diagnostic(m) => &m.kind.body,
Message::SyntaxError(m) => &m.message,
}
}
/// Returns the fix suggestion for the violation.
pub fn suggestion(&self) -> Option<&str> {
match self {
Message::Diagnostic(m) => m.kind.suggestion.as_deref(),
Message::SyntaxError(_) => None,
}
}
/// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message.
pub fn noqa_offset(&self) -> Option<TextSize> {
match self {
Message::Diagnostic(m) => Some(m.noqa_offset),
Message::SyntaxError(_) => None,
}
}
/// Returns the [`Fix`] for the message, if there is any.
pub fn fix(&self) -> Option<&Fix> {
match self {
Message::Diagnostic(m) => m.fix.as_ref(),
Message::SyntaxError(_) => None,
}
}
/// Returns `true` if the message contains a [`Fix`].
pub fn fixable(&self) -> bool {
self.fix().is_some()
}
/// Returns the [`Rule`] corresponding to the diagnostic message.
pub fn rule(&self) -> Option<Rule> {
match self {
Message::Diagnostic(m) => Some(m.kind.rule()),
Message::SyntaxError(_) => None,
}
}
/// Returns the filename for the message.
pub fn filename(&self) -> &str { pub fn filename(&self) -> &str {
self.file.name() self.source_file().name()
} }
/// Computes the start source location for the message.
pub fn compute_start_location(&self) -> SourceLocation { pub fn compute_start_location(&self) -> SourceLocation {
self.file.to_source_code().source_location(self.start()) self.source_file()
.to_source_code()
.source_location(self.start())
} }
/// Computes the end source location for the message.
pub fn compute_end_location(&self) -> SourceLocation { pub fn compute_end_location(&self) -> SourceLocation {
self.file.to_source_code().source_location(self.end()) self.source_file()
.to_source_code()
.source_location(self.end())
}
/// Returns the [`SourceFile`] which the message belongs to.
pub fn source_file(&self) -> &SourceFile {
match self {
Message::Diagnostic(m) => &m.file,
Message::SyntaxError(m) => &m.file,
}
} }
} }
impl Ord for Message { impl Ord for Message {
fn cmp(&self, other: &Self) -> Ordering { fn cmp(&self, other: &Self) -> Ordering {
(&self.file, self.start()).cmp(&(&other.file, other.start())) (self.source_file(), self.start()).cmp(&(other.source_file(), other.start()))
} }
} }
@ -85,7 +236,10 @@ impl PartialOrd for Message {
impl Ranged for Message { impl Ranged for Message {
fn range(&self) -> TextRange { fn range(&self) -> TextRange {
self.range match self {
Message::Diagnostic(m) => m.range,
Message::SyntaxError(m) => m.range,
}
} }
} }
@ -155,11 +309,30 @@ mod tests {
use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix}; use ruff_diagnostics::{Diagnostic, DiagnosticKind, Edit, Fix};
use ruff_notebook::NotebookIndex; use ruff_notebook::NotebookIndex;
use ruff_source_file::{OneIndexed, SourceFileBuilder}; use ruff_python_parser::{parse_unchecked, Mode};
use ruff_source_file::{Locator, OneIndexed, SourceFileBuilder};
use ruff_text_size::{Ranged, TextRange, TextSize}; use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
pub(super) fn create_syntax_error_messages() -> Vec<Message> {
let source = r"from os import
if call(foo
def bar():
pass
";
let locator = Locator::new(source);
let source_file = SourceFileBuilder::new("syntax_errors.py", source).finish();
parse_unchecked(source, Mode::Module)
.errors()
.iter()
.map(|parse_error| {
Message::from_parse_error(parse_error, &locator, source_file.clone())
})
.collect()
}
pub(super) fn create_messages() -> Vec<Message> { pub(super) fn create_messages() -> Vec<Message> {
let fib = r#"import os let fib = r#"import os

View file

@ -4,7 +4,6 @@ use ruff_source_file::OneIndexed;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
/// Generate violations in Pylint format. /// Generate violations in Pylint format.
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter) /// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
@ -27,12 +26,20 @@ impl Emitter for PylintEmitter {
message.compute_start_location().row message.compute_start_location().row
}; };
let body = if let Some(rule) = message.rule() {
format!(
"[{code}] {body}",
code = rule.noqa_code(),
body = message.body()
)
} else {
message.body().to_string()
};
writeln!( writeln!(
writer, writer,
"{path}:{row}: [{code}] {body}", "{path}:{row}: {body}",
path = relativize_path(message.filename()), path = relativize_path(message.filename()),
code = message.kind.rule().noqa_code(),
body = message.kind.body,
)?; )?;
} }
@ -44,7 +51,9 @@ impl Emitter for PylintEmitter {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::PylintEmitter; use crate::message::PylintEmitter;
#[test] #[test]
@ -54,4 +63,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = PylintEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -9,7 +9,6 @@ use ruff_source_file::SourceCode;
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use crate::message::{Emitter, EmitterContext, Message, SourceLocation}; use crate::message::{Emitter, EmitterContext, Message, SourceLocation};
use crate::registry::AsRule;
#[derive(Default)] #[derive(Default)]
pub struct RdjsonEmitter; pub struct RdjsonEmitter;
@ -58,34 +57,34 @@ impl Serialize for ExpandedMessages<'_> {
} }
fn message_to_rdjson_value(message: &Message) -> Value { fn message_to_rdjson_value(message: &Message) -> Value {
let source_code = message.file.to_source_code(); let source_code = message.source_file().to_source_code();
let start_location = source_code.source_location(message.start()); let start_location = source_code.source_location(message.start());
let end_location = source_code.source_location(message.end()); let end_location = source_code.source_location(message.end());
if let Some(fix) = message.fix.as_ref() { if let Some(fix) = message.fix() {
json!({ json!({
"message": message.kind.body, "message": message.body(),
"location": { "location": {
"path": message.filename(), "path": message.filename(),
"range": rdjson_range(&start_location, &end_location), "range": rdjson_range(&start_location, &end_location),
}, },
"code": { "code": {
"value": message.kind.rule().noqa_code().to_string(), "value": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.kind.rule().url(), "url": message.rule().and_then(|rule| rule.url()),
}, },
"suggestions": rdjson_suggestions(fix.edits(), &source_code), "suggestions": rdjson_suggestions(fix.edits(), &source_code),
}) })
} else { } else {
json!({ json!({
"message": message.kind.body, "message": message.body(),
"location": { "location": {
"path": message.filename(), "path": message.filename(),
"range": rdjson_range(&start_location, &end_location), "range": rdjson_range(&start_location, &end_location),
}, },
"code": { "code": {
"value": message.kind.rule().noqa_code().to_string(), "value": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.kind.rule().url(), "url": message.rule().and_then(|rule| rule.url()),
}, },
}) })
} }
@ -125,7 +124,9 @@ fn rdjson_range(start: &SourceLocation, end: &SourceLocation) -> Value {
mod tests { mod tests {
use insta::assert_snapshot; use insta::assert_snapshot;
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::RdjsonEmitter; use crate::message::RdjsonEmitter;
#[test] #[test]
@ -135,4 +136,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = RdjsonEmitter;
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -3,17 +3,16 @@ use std::io::Write;
use anyhow::Result; use anyhow::Result;
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use serde_json::json; use serde_json::json;
use strum::IntoEnumIterator;
use ruff_source_file::OneIndexed; use ruff_source_file::OneIndexed;
use crate::codes::Rule; use crate::codes::Rule;
use crate::fs::normalize_path; use crate::fs::normalize_path;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::{AsRule, Linter, RuleNamespace}; use crate::registry::{Linter, RuleNamespace};
use crate::VERSION; use crate::VERSION;
use strum::IntoEnumIterator;
pub struct SarifEmitter; pub struct SarifEmitter;
impl Emitter for SarifEmitter { impl Emitter for SarifEmitter {
@ -103,7 +102,7 @@ impl Serialize for SarifRule<'_> {
#[derive(Debug)] #[derive(Debug)]
struct SarifResult { struct SarifResult {
rule: Rule, rule: Option<Rule>,
level: String, level: String,
message: String, message: String,
uri: String, uri: String,
@ -120,9 +119,9 @@ impl SarifResult {
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(message.filename()); let path = normalize_path(message.filename());
Ok(Self { Ok(Self {
rule: message.kind.rule(), rule: message.rule(),
level: "error".to_string(), level: "error".to_string(),
message: message.kind.name.clone(), message: message.name().to_string(),
uri: url::Url::from_file_path(&path) uri: url::Url::from_file_path(&path)
.map_err(|()| anyhow::anyhow!("Failed to convert path to URL: {}", path.display()))? .map_err(|()| anyhow::anyhow!("Failed to convert path to URL: {}", path.display()))?
.to_string(), .to_string(),
@ -140,9 +139,9 @@ impl SarifResult {
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(message.filename()); let path = normalize_path(message.filename());
Ok(Self { Ok(Self {
rule: message.kind.rule(), rule: message.rule(),
level: "error".to_string(), level: "error".to_string(),
message: message.kind.name.clone(), message: message.name().to_string(),
uri: path.display().to_string(), uri: path.display().to_string(),
start_line: start_location.row, start_line: start_location.row,
start_column: start_location.column, start_column: start_location.column,
@ -175,7 +174,7 @@ impl Serialize for SarifResult {
} }
} }
}], }],
"ruleId": self.rule.noqa_code().to_string(), "ruleId": self.rule.map(|rule| rule.noqa_code().to_string()),
}) })
.serialize(serializer) .serialize(serializer)
} }
@ -184,7 +183,9 @@ impl Serialize for SarifResult {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::message::tests::{capture_emitter_output, create_messages}; use crate::message::tests::{
capture_emitter_output, create_messages, create_syntax_error_messages,
};
use crate::message::SarifEmitter; use crate::message::SarifEmitter;
fn get_output() -> String { fn get_output() -> String {
@ -198,6 +199,13 @@ mod tests {
serde_json::from_str::<serde_json::Value>(&content).unwrap(); serde_json::from_str::<serde_json::Value>(&content).unwrap();
} }
#[test]
fn valid_syntax_error_json() {
let mut emitter = SarifEmitter {};
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
serde_json::from_str::<serde_json::Value>(&content).unwrap();
}
#[test] #[test]
fn test_results() { fn test_results() {
let content = get_output(); let content = get_output();

View file

@ -0,0 +1,6 @@
---
source: crates/ruff_linter/src/message/azure.rs
expression: content
---
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=1;columnnumber=15;]SyntaxError: Expected one or more symbol names after import
##vso[task.logissue type=error;sourcepath=syntax_errors.py;linenumber=3;columnnumber=12;]SyntaxError: Expected ')', found newline

View file

@ -0,0 +1,6 @@
---
source: crates/ruff_linter/src/message/github.rs
expression: content
---
::error title=Ruff,file=syntax_errors.py,line=1,col=15,endLine=2,endColumn=1::syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
::error title=Ruff,file=syntax_errors.py,line=3,col=12,endLine=4,endColumn=1::syntax_errors.py:3:12: SyntaxError: Expected ')', found newline

View file

@ -0,0 +1,30 @@
---
source: crates/ruff_linter/src/message/gitlab.rs
expression: redact_fingerprint(&content)
---
[
{
"description": "SyntaxError: Expected one or more symbol names after import",
"fingerprint": "<redacted>",
"location": {
"lines": {
"begin": 1,
"end": 2
},
"path": "syntax_errors.py"
},
"severity": "major"
},
{
"description": "SyntaxError: Expected ')', found newline",
"fingerprint": "<redacted>",
"location": {
"lines": {
"begin": 3,
"end": 4
},
"path": "syntax_errors.py"
},
"severity": "major"
}
]

View file

@ -0,0 +1,7 @@
---
source: crates/ruff_linter/src/message/grouped.rs
expression: content
---
syntax_errors.py:
1:15 SyntaxError: Expected one or more symbol names after import
3:12 SyntaxError: Expected ')', found newline

View file

@ -0,0 +1,40 @@
---
source: crates/ruff_linter/src/message/json.rs
expression: content
---
[
{
"cell": null,
"code": null,
"end_location": {
"column": 1,
"row": 2
},
"filename": "syntax_errors.py",
"fix": null,
"location": {
"column": 15,
"row": 1
},
"message": "SyntaxError: Expected one or more symbol names after import",
"noqa_row": null,
"url": null
},
{
"cell": null,
"code": null,
"end_location": {
"column": 1,
"row": 4
},
"filename": "syntax_errors.py",
"fix": null,
"location": {
"column": 12,
"row": 3
},
"message": "SyntaxError: Expected ')', found newline",
"noqa_row": null,
"url": null
}
]

View file

@ -0,0 +1,6 @@
---
source: crates/ruff_linter/src/message/json_lines.rs
expression: content
---
{"cell":null,"code":null,"end_location":{"column":1,"row":2},"filename":"syntax_errors.py","fix":null,"location":{"column":15,"row":1},"message":"SyntaxError: Expected one or more symbol names after import","noqa_row":null,"url":null}
{"cell":null,"code":null,"end_location":{"column":1,"row":4},"filename":"syntax_errors.py","fix":null,"location":{"column":12,"row":3},"message":"SyntaxError: Expected ')', found newline","noqa_row":null,"url":null}

View file

@ -0,0 +1,15 @@
---
source: crates/ruff_linter/src/message/junit.rs
expression: content
---
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="ruff" tests="2" failures="2" errors="0">
<testsuite name="syntax_errors.py" tests="2" disabled="0" errors="0" failures="2" package="org.ruff">
<testcase name="org.ruff" classname="syntax_errors" line="1" column="15">
<failure message="SyntaxError: Expected one or more symbol names after import">line 1, col 15, SyntaxError: Expected one or more symbol names after import</failure>
</testcase>
<testcase name="org.ruff" classname="syntax_errors" line="3" column="12">
<failure message="SyntaxError: Expected &apos;)&apos;, found newline">line 3, col 12, SyntaxError: Expected &apos;)&apos;, found newline</failure>
</testcase>
</testsuite>
</testsuites>

View file

@ -0,0 +1,6 @@
---
source: crates/ruff_linter/src/message/pylint.rs
expression: content
---
syntax_errors.py:1: SyntaxError: Expected one or more symbol names after import
syntax_errors.py:3: SyntaxError: Expected ')', found newline

View file

@ -0,0 +1,53 @@
---
source: crates/ruff_linter/src/message/rdjson.rs
expression: content
---
{
"diagnostics": [
{
"code": {
"url": null,
"value": null
},
"location": {
"path": "syntax_errors.py",
"range": {
"end": {
"column": 1,
"line": 2
},
"start": {
"column": 15,
"line": 1
}
}
},
"message": "SyntaxError: Expected one or more symbol names after import"
},
{
"code": {
"url": null,
"value": null
},
"location": {
"path": "syntax_errors.py",
"range": {
"end": {
"column": 1,
"line": 4
},
"start": {
"column": 12,
"line": 3
}
}
},
"message": "SyntaxError: Expected ')', found newline"
}
],
"severity": "warning",
"source": {
"name": "ruff",
"url": "https://docs.astral.sh/ruff"
}
}

View file

@ -0,0 +1,22 @@
---
source: crates/ruff_linter/src/message/text.rs
expression: content
---
syntax_errors.py:1:15: SyntaxError: Expected one or more symbol names after import
|
1 | from os import
| ^
2 |
3 | if call(foo
4 | def bar():
|
syntax_errors.py:3:12: SyntaxError: Expected ')', found newline
|
1 | from os import
2 |
3 | if call(foo
| ^
4 | def bar():
5 | pass
|

View file

@ -15,7 +15,6 @@ use crate::fs::relativize_path;
use crate::line_width::{IndentWidth, LineWidthBuilder}; use crate::line_width::{IndentWidth, LineWidthBuilder};
use crate::message::diff::Diff; use crate::message::diff::Diff;
use crate::message::{Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::AsRule;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
use crate::text_helpers::ShowNonprinting; use crate::text_helpers::ShowNonprinting;
@ -146,28 +145,33 @@ pub(super) struct RuleCodeAndBody<'a> {
impl Display for RuleCodeAndBody<'_> { impl Display for RuleCodeAndBody<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let kind = &self.message.kind;
if self.show_fix_status { if self.show_fix_status {
if let Some(fix) = self.message.fix.as_ref() { if let Some(fix) = self.message.fix() {
// Do not display an indicator for unapplicable fixes // Do not display an indicator for unapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) { if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(rule) = self.message.rule() {
write!(f, "{} ", rule.noqa_code().to_string().red().bold())?;
}
return write!( return write!(
f, f,
"{code} {fix}{body}", "{fix}{body}",
code = kind.rule().noqa_code().to_string().red().bold(),
fix = format_args!("[{}] ", "*".cyan()), fix = format_args!("[{}] ", "*".cyan()),
body = kind.body, body = self.message.body(),
); );
} }
} }
}; };
write!( if let Some(rule) = self.message.rule() {
f, write!(
"{code} {body}", f,
code = kind.rule().noqa_code().to_string().red().bold(), "{code} {body}",
body = kind.body, code = rule.noqa_code().to_string().red().bold(),
) body = self.message.body(),
)
} else {
f.write_str(self.message.body())
}
} }
} }
@ -178,11 +182,7 @@ pub(super) struct MessageCodeFrame<'a> {
impl Display for MessageCodeFrame<'_> { impl Display for MessageCodeFrame<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let Message { let suggestion = self.message.suggestion();
kind, file, range, ..
} = self.message;
let suggestion = kind.suggestion.as_deref();
let footer = if suggestion.is_some() { let footer = if suggestion.is_some() {
vec![Annotation { vec![Annotation {
id: None, id: None,
@ -193,9 +193,9 @@ impl Display for MessageCodeFrame<'_> {
Vec::new() Vec::new()
}; };
let source_code = file.to_source_code(); let source_code = self.message.source_file().to_source_code();
let content_start_index = source_code.line_index(range.start()); let content_start_index = source_code.line_index(self.message.start());
let mut start_index = content_start_index.saturating_sub(2); let mut start_index = content_start_index.saturating_sub(2);
// If we're working with a Jupyter Notebook, skip the lines which are // If we're working with a Jupyter Notebook, skip the lines which are
@ -218,7 +218,7 @@ impl Display for MessageCodeFrame<'_> {
start_index = start_index.saturating_add(1); start_index = start_index.saturating_add(1);
} }
let content_end_index = source_code.line_index(range.end()); let content_end_index = source_code.line_index(self.message.end());
let mut end_index = content_end_index let mut end_index = content_end_index
.saturating_add(2) .saturating_add(2)
.min(OneIndexed::from_zero_indexed(source_code.line_count())); .min(OneIndexed::from_zero_indexed(source_code.line_count()));
@ -249,7 +249,7 @@ impl Display for MessageCodeFrame<'_> {
let source = replace_whitespace( let source = replace_whitespace(
source_code.slice(TextRange::new(start_offset, end_offset)), source_code.slice(TextRange::new(start_offset, end_offset)),
range - start_offset, self.message.range() - start_offset,
); );
let source_text = source.text.show_nonprinting(); let source_text = source.text.show_nonprinting();
@ -260,7 +260,10 @@ impl Display for MessageCodeFrame<'_> {
let char_length = source.text[source.annotation_range].chars().count(); let char_length = source.text[source.annotation_range].chars().count();
let label = kind.rule().noqa_code().to_string(); let label = self
.message
.rule()
.map_or_else(String::new, |rule| rule.noqa_code().to_string());
let snippet = Snippet { let snippet = Snippet {
title: None, title: None,
@ -356,7 +359,7 @@ mod tests {
use crate::message::tests::{ use crate::message::tests::{
capture_emitter_notebook_output, capture_emitter_output, create_messages, capture_emitter_notebook_output, capture_emitter_output, create_messages,
create_notebook_messages, create_notebook_messages, create_syntax_error_messages,
}; };
use crate::message::TextEmitter; use crate::message::TextEmitter;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
@ -401,4 +404,12 @@ mod tests {
assert_snapshot!(content); assert_snapshot!(content);
} }
#[test]
fn syntax_errors() {
let mut emitter = TextEmitter::default().with_show_source(true);
let content = capture_emitter_output(&mut emitter, &create_syntax_error_messages());
assert_snapshot!(content);
}
} }

View file

@ -1063,7 +1063,7 @@ mod tests {
use crate::generate_noqa_edits; use crate::generate_noqa_edits;
use crate::noqa::{add_noqa_inner, Directive, NoqaMapping, ParsedFileExemption}; use crate::noqa::{add_noqa_inner, Directive, NoqaMapping, ParsedFileExemption};
use crate::rules::pycodestyle::rules::AmbiguousVariableName; use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
use crate::rules::pyflakes::rules::UnusedVariable; use crate::rules::pyflakes::rules::UnusedVariable;
use crate::rules::pyupgrade::rules::PrintfStringFormatting; use crate::rules::pyupgrade::rules::PrintfStringFormatting;
@ -1380,4 +1380,36 @@ print(
))] ))]
); );
} }
#[test]
fn syntax_error() {
let path = Path::new("/tmp/foo.txt");
let source = "\
foo;
bar =
";
let diagnostics = [Diagnostic::new(
UselessSemicolon,
TextRange::new(4.into(), 5.into()),
)];
let noqa_line_for = NoqaMapping::default();
let comment_ranges = CommentRanges::default();
let edits = generate_noqa_edits(
path,
&diagnostics,
&Locator::new(source),
&comment_ranges,
&[],
&noqa_line_for,
LineEnding::Lf,
);
assert_eq!(
edits,
vec![Some(Edit::replacement(
" # noqa: E703\n".to_string(),
4.into(),
5.into()
))]
);
}
} }

View file

@ -601,7 +601,7 @@ COM81.py:511:10: COM819 [*] Trailing comma prohibited
511 | image[:,:,] 511 | image[:,:,]
| ^ COM819 | ^ COM819
512 | 512 |
513 | lambda x, : 513 | lambda x, : x
| |
= help: Remove trailing comma = help: Remove trailing comma
@ -612,14 +612,14 @@ COM81.py:511:10: COM819 [*] Trailing comma prohibited
511 |-image[:,:,] 511 |-image[:,:,]
511 |+image[:,:] 511 |+image[:,:]
512 512 | 512 512 |
513 513 | lambda x, : 513 513 | lambda x, : x
514 514 | 514 514 |
COM81.py:513:9: COM819 [*] Trailing comma prohibited COM81.py:513:9: COM819 [*] Trailing comma prohibited
| |
511 | image[:,:,] 511 | image[:,:,]
512 | 512 |
513 | lambda x, : 513 | lambda x, : x
| ^ COM819 | ^ COM819
514 | 514 |
515 | # ==> unpack.py <== 515 | # ==> unpack.py <==
@ -630,8 +630,8 @@ COM81.py:513:9: COM819 [*] Trailing comma prohibited
510 510 | 510 510 |
511 511 | image[:,:,] 511 511 | image[:,:,]
512 512 | 512 512 |
513 |-lambda x, : 513 |-lambda x, : x
513 |+lambda x : 513 |+lambda x : x
514 514 | 514 514 |
515 515 | # ==> unpack.py <== 515 515 | # ==> unpack.py <==
516 516 | def function( 516 516 | def function(
@ -798,6 +798,14 @@ COM81.py:565:13: COM812 [*] Trailing comma missing
567 567 | 567 567 |
568 568 | ( 568 568 | (
COM81.py:569:5: SyntaxError: Starred expression cannot be used here
|
568 | (
569 | *args
| ^
570 | )
|
COM81.py:573:10: COM812 [*] Trailing comma missing COM81.py:573:10: COM812 [*] Trailing comma missing
| |
572 | { 572 | {

View file

@ -50,7 +50,6 @@ mod tests {
#[test_case(Rule::NoneComparison, Path::new("E711.py"))] #[test_case(Rule::NoneComparison, Path::new("E711.py"))]
#[test_case(Rule::NotInTest, Path::new("E713.py"))] #[test_case(Rule::NotInTest, Path::new("E713.py"))]
#[test_case(Rule::NotIsTest, Path::new("E714.py"))] #[test_case(Rule::NotIsTest, Path::new("E714.py"))]
#[test_case(Rule::SyntaxError, Path::new("E999.py"))]
#[test_case(Rule::TabIndentation, Path::new("W19.py"))] #[test_case(Rule::TabIndentation, Path::new("W19.py"))]
#[test_case(Rule::TrailingWhitespace, Path::new("W29.py"))] #[test_case(Rule::TrailingWhitespace, Path::new("W29.py"))]
#[test_case(Rule::TrailingWhitespace, Path::new("W291.py"))] #[test_case(Rule::TrailingWhitespace, Path::new("W291.py"))]

View file

@ -1,11 +1,5 @@
use ruff_python_parser::ParseError; use ruff_diagnostics::Violation;
use ruff_text_size::{TextLen, TextRange, TextSize};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
use ruff_source_file::Locator;
use crate::logging::DisplayParseErrorType;
/// ## What it does /// ## What it does
/// This is not a regular diagnostic; instead, it's raised when a file cannot be read /// This is not a regular diagnostic; instead, it's raised when a file cannot be read
@ -43,6 +37,10 @@ impl Violation for IOError {
} }
} }
/// ## Deprecated
/// This rule has been deprecated and will be removed in a future release. Syntax errors will
/// always be shown regardless of whether this rule is selected or not.
///
/// ## What it does /// ## What it does
/// Checks for code that contains syntax errors. /// Checks for code that contains syntax errors.
/// ///
@ -74,27 +72,3 @@ impl Violation for SyntaxError {
format!("SyntaxError: {message}") format!("SyntaxError: {message}")
} }
} }
/// E901
pub(crate) fn syntax_error(
diagnostics: &mut Vec<Diagnostic>,
parse_error: &ParseError,
locator: &Locator,
) {
let rest = locator.after(parse_error.location.start());
// Try to create a non-empty range so that the diagnostic can print a caret at the
// right position. This requires that we retrieve the next character, if any, and take its length
// to maintain char-boundaries.
let len = rest
.chars()
.next()
.map_or(TextSize::new(0), TextLen::text_len);
diagnostics.push(Diagnostic::new(
SyntaxError {
message: format!("{}", DisplayParseErrorType::new(&parse_error.error)),
},
TextRange::at(parse_error.location.start(), len),
));
}

View file

@ -5,8 +5,8 @@ pub(crate) use bare_except::*;
pub(crate) use blank_lines::*; pub(crate) use blank_lines::*;
pub(crate) use compound_statements::*; pub(crate) use compound_statements::*;
pub(crate) use doc_line_too_long::*; pub(crate) use doc_line_too_long::*;
pub use errors::IOError;
pub(crate) use errors::*; pub(crate) use errors::*;
pub use errors::{IOError, SyntaxError};
pub(crate) use invalid_escape_sequence::*; pub(crate) use invalid_escape_sequence::*;
pub(crate) use lambda_assignment::*; pub(crate) use lambda_assignment::*;
pub(crate) use line_too_long::*; pub(crate) use line_too_long::*;

View file

@ -21,4 +21,42 @@ E11.py:6:1: E111 Indentation is not a multiple of 4
8 | if False: 8 | if False:
| |
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -11,6 +11,36 @@ E11.py:9:1: E112 Expected an indented block
11 | print() 11 | print()
| |
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:45:1: E112 Expected an indented block E11.py:45:1: E112 Expected an indented block
| |
43 | #: E112 43 | #: E112
@ -21,4 +51,12 @@ E11.py:45:1: E112 Expected an indented block
47 | if False: 47 | if False:
| |
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -1,6 +1,16 @@
--- ---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
--- ---
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: E113 Unexpected indentation E11.py:12:1: E113 Unexpected indentation
| |
10 | #: E113 10 | #: E113
@ -11,4 +21,32 @@ E11.py:12:1: E113 Unexpected indentation
14 | mimetype = 'application/x-directory' 14 | mimetype = 'application/x-directory'
| |
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -1,6 +1,36 @@
--- ---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
--- ---
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:15:1: E114 Indentation is not a multiple of 4 (comment) E11.py:15:1: E114 Indentation is not a multiple of 4 (comment)
| |
13 | #: E114 E116 13 | #: E114 E116
@ -11,4 +41,12 @@ E11.py:15:1: E114 Indentation is not a multiple of 4 (comment)
17 | #: E116 E116 E116 17 | #: E116 E116 E116
| |
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -1,6 +1,36 @@
--- ---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
--- ---
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:30:1: E115 Expected an indented block (comment) E11.py:30:1: E115 Expected an indented block (comment)
| |
28 | def start(self): 28 | def start(self):
@ -61,4 +91,12 @@ E11.py:35:1: E115 Expected an indented block (comment)
37 | #: E117 37 | #: E117
| |
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -1,6 +1,36 @@
--- ---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
--- ---
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:15:1: E116 Unexpected indentation (comment) E11.py:15:1: E116 Unexpected indentation (comment)
| |
13 | #: E114 E116 13 | #: E114 E116
@ -41,4 +71,12 @@ E11.py:26:1: E116 Unexpected indentation (comment)
28 | def start(self): 28 | def start(self):
| |
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -11,6 +11,36 @@ E11.py:6:1: E117 Over-indented
8 | if False: 8 | if False:
| |
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
7 | #: E112
8 | if False:
9 | print()
| ^
10 | #: E113
11 | print()
|
E11.py:12:1: SyntaxError: Unexpected indentation
|
10 | #: E113
11 | print()
12 | print()
| ^
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
|
E11.py:14:1: SyntaxError: Expected a statement
|
12 | print()
13 | #: E114 E116
14 | mimetype = 'application/x-directory'
| ^
15 | # 'httpd/unix-directory'
16 | create_date = False
|
E11.py:39:1: E117 Over-indented E11.py:39:1: E117 Over-indented
| |
37 | #: E117 37 | #: E117
@ -31,4 +61,12 @@ E11.py:42:1: E117 Over-indented
44 | if False: # 44 | if False: #
| |
E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
43 | #: E112
44 | if False: #
45 | print()
| ^
46 | #:
47 | if False:
|

View file

@ -1,9 +0,0 @@
---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
---
E999.py:2:9: E999 SyntaxError: Expected an indented block after function definition
|
2 | def x():
| ^ E999
3 |
|

View file

@ -8,6 +8,22 @@ W19.py:1:1: W191 Indentation contains tabs
2 | multiline string with tab in it''' 2 | multiline string with tab in it'''
| |
W19.py:1:1: SyntaxError: Unexpected indentation
|
1 | '''File starts with a tab
| ^^^^
2 | multiline string with tab in it'''
|
W19.py:5:1: SyntaxError: Expected a statement
|
4 | #: W191
5 | if False:
| ^
6 | print # indented with 1 tab
7 | #:
|
W19.py:6:1: W191 Indentation contains tabs W19.py:6:1: W191 Indentation contains tabs
| |
4 | #: W191 4 | #: W191

View file

@ -1,4 +1,8 @@
--- ---
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
--- ---
E2_syntax_error.py:1:10: SyntaxError: Expected an expression
|
1 | a = (1 or)
| ^
|

View file

@ -2414,7 +2414,7 @@ mod tests {
fn used_in_lambda() { fn used_in_lambda() {
flakes( flakes(
r"import fu; r"import fu;
lambda: fu lambda: fu
", ",
&[], &[],
); );
@ -2433,7 +2433,7 @@ mod tests {
fn used_in_slice_obj() { fn used_in_slice_obj() {
flakes( flakes(
r#"import fu; r#"import fu;
"meow"[::fu] "meow"[::fu]
"#, "#,
&[], &[],
); );
@ -3040,16 +3040,6 @@ mod tests {
&[], &[],
); );
flakes(
r#"
from interior import decorate
@decorate('value", &[]);
def f():
return "hello"
"#,
&[],
);
flakes( flakes(
r#" r#"
@decorate @decorate

View file

@ -16,6 +16,7 @@ use ruff_notebook::NotebookError;
use ruff_python_ast::PySourceType; use ruff_python_ast::PySourceType;
use ruff_python_codegen::Stylist; use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer; use ruff_python_index::Indexer;
use ruff_python_parser::ParseError;
use ruff_python_trivia::textwrap::dedent; use ruff_python_trivia::textwrap::dedent;
use ruff_source_file::{Locator, SourceFileBuilder}; use ruff_source_file::{Locator, SourceFileBuilder};
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
@ -26,7 +27,6 @@ use crate::linter::{check_path, LinterResult};
use crate::message::{Emitter, EmitterContext, Message, TextEmitter}; use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
use crate::packaging::detect_package_root; use crate::packaging::detect_package_root;
use crate::registry::AsRule; use crate::registry::AsRule;
use crate::rules::pycodestyle::rules::syntax_error;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
use crate::settings::{flags, LinterSettings}; use crate::settings::{flags, LinterSettings};
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
@ -188,7 +188,7 @@ pub(crate) fn test_contents<'a>(
let LinterResult { let LinterResult {
data: fixed_diagnostics, data: fixed_diagnostics,
error: fixed_error, ..
} = check_path( } = check_path(
path, path,
None, None,
@ -203,25 +203,21 @@ pub(crate) fn test_contents<'a>(
&parsed, &parsed,
); );
if let Some(fixed_error) = fixed_error { if !parsed.is_valid() && !source_has_errors {
if !source_has_errors { // Previous fix introduced a syntax error, abort
// Previous fix introduced a syntax error, abort let fixes = print_diagnostics(diagnostics, path, source_kind);
let fixes = print_diagnostics(diagnostics, path, source_kind); let syntax_errors =
print_syntax_errors(parsed.errors(), path, &locator, &transformed);
let mut syntax_diagnostics = Vec::new(); panic!(
syntax_error(&mut syntax_diagnostics, &fixed_error, &locator); r#"Fixed source has a syntax error where the source document does not. This is a bug in one of the generated fixes:
let syntax_errors = print_diagnostics(syntax_diagnostics, path, &transformed);
panic!(
r#"Fixed source has a syntax error where the source document does not. This is a bug in one of the generated fixes:
{syntax_errors} {syntax_errors}
Last generated fixes: Last generated fixes:
{fixes} {fixes}
Source with applied fixes: Source with applied fixes:
{}"#, {}"#,
transformed.source_code() transformed.source_code()
); );
}
} }
diagnostics = fixed_diagnostics; diagnostics = fixed_diagnostics;
@ -260,11 +256,40 @@ Source with applied fixes:
Message::from_diagnostic(diagnostic, source_code.clone(), noqa) Message::from_diagnostic(diagnostic, source_code.clone(), noqa)
}) })
.chain(
parsed
.errors()
.iter()
.map(|parse_error| {
Message::from_parse_error(parse_error, &locator, source_code.clone())
})
)
.sorted() .sorted()
.collect(); .collect();
(messages, transformed) (messages, transformed)
} }
fn print_syntax_errors(
errors: &[ParseError],
path: &Path,
locator: &Locator,
source: &SourceKind,
) -> String {
let filename = path.file_name().unwrap().to_string_lossy();
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();
let messages: Vec<_> = errors
.iter()
.map(|parse_error| Message::from_parse_error(parse_error, locator, source_file.clone()))
.collect();
if let Some(notebook) = source.as_ipy_notebook() {
print_jupyter_messages(&messages, path, notebook)
} else {
print_messages(&messages)
}
}
fn print_diagnostics(diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceKind) -> String { fn print_diagnostics(diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceKind) -> String {
let filename = path.file_name().unwrap().to_string_lossy(); let filename = path.file_name().unwrap().to_string_lossy();
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish(); let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();

View file

@ -1,5 +1,6 @@
//! Access to the Ruff linting API for the LSP //! Access to the Ruff linting API for the LSP
use ruff_python_parser::ParseError;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -153,7 +154,10 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
.zip(noqa_edits) .zip(noqa_edits)
.map(|(diagnostic, noqa_edit)| { .map(|(diagnostic, noqa_edit)| {
to_lsp_diagnostic(diagnostic, &noqa_edit, &source_kind, &index, encoding) to_lsp_diagnostic(diagnostic, &noqa_edit, &source_kind, &index, encoding)
}); })
.chain(parsed.errors().iter().map(|parse_error| {
parse_error_to_lsp_diagnostic(parse_error, &source_kind, &index, encoding)
}));
if let Some(notebook) = query.as_notebook() { if let Some(notebook) = query.as_notebook() {
for (index, diagnostic) in lsp_diagnostics { for (index, diagnostic) in lsp_diagnostics {
@ -287,6 +291,45 @@ fn to_lsp_diagnostic(
) )
} }
fn parse_error_to_lsp_diagnostic(
parse_error: &ParseError,
source_kind: &SourceKind,
index: &LineIndex,
encoding: PositionEncoding,
) -> (usize, lsp_types::Diagnostic) {
let range: lsp_types::Range;
let cell: usize;
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
NotebookRange { cell, range } = parse_error.location.to_notebook_range(
source_kind.source_code(),
index,
notebook_index,
encoding,
);
} else {
cell = usize::default();
range = parse_error
.location
.to_range(source_kind.source_code(), index, encoding);
}
(
cell,
lsp_types::Diagnostic {
range,
severity: Some(lsp_types::DiagnosticSeverity::ERROR),
tags: None,
code: None,
code_description: None,
source: Some(DIAGNOSTIC_NAME.into()),
message: format!("SyntaxError: {}", &parse_error.error),
related_information: None,
data: None,
},
)
}
fn diagnostic_edit_range( fn diagnostic_edit_range(
range: TextRange, range: TextRange,
source_kind: &SourceKind, source_kind: &SourceKind,

View file

@ -968,9 +968,13 @@ impl LintConfiguration {
if preview.mode.is_disabled() { if preview.mode.is_disabled() {
for selection in deprecated_selectors.iter().sorted() { for selection in deprecated_selectors.iter().sorted() {
let (prefix, code) = selection.prefix_and_code(); let (prefix, code) = selection.prefix_and_code();
warn_user_once_by_message!( let rule = format!("{prefix}{code}");
"Rule `{prefix}{code}` is deprecated and will be removed in a future release.", let mut message =
); format!("Rule `{rule}` is deprecated and will be removed in a future release.");
if matches!(rule.as_str(), "E999") {
message.push_str(" Syntax errors will always be shown regardless of whether this rule is selected or not.");
}
warn_user_once_by_message!("{message}");
} }
} else { } else {
let deprecated_selectors = deprecated_selectors.iter().sorted().collect::<Vec<_>>(); let deprecated_selectors = deprecated_selectors.iter().sorted().collect::<Vec<_>>();

View file

@ -43,8 +43,8 @@ fn do_fuzz(case: &[u8]) -> Corpus {
let mut warnings = HashMap::new(); let mut warnings = HashMap::new();
for msg in linter_results.data { for msg in &linter_results.data {
let count: &mut usize = warnings.entry(msg.kind.name).or_default(); let count: &mut usize = warnings.entry(msg.name()).or_default();
*count += 1; *count += 1;
} }
@ -67,8 +67,8 @@ fn do_fuzz(case: &[u8]) -> Corpus {
"formatter introduced a parse error" "formatter introduced a parse error"
); );
for msg in linter_results.data { for msg in &linter_results.data {
if let Some(count) = warnings.get_mut(&msg.kind.name) { if let Some(count) = warnings.get_mut(msg.name()) {
if let Some(decremented) = count.checked_sub(1) { if let Some(decremented) = count.checked_sub(1) {
*count = decremented; *count = decremented;
} else { } else {