Unify Message variants (#18051)

## Summary

This PR unifies the ruff `Message` enum variants for syntax errors and
rule violations into a single `Message` struct consisting of a shared
`db::Diagnostic` and some additional, optional fields used for some rule
violations.

This version of `Message` is nearly a drop-in replacement for
`ruff_diagnostics::Diagnostic`, which is the next step I have in mind
for the refactor.

I think this is also a useful checkpoint because we could possibly add
some of these optional fields to the new `Diagnostic` type. I think
we've previously discussed wanting support for `Fix`es, but the other
fields seem less relevant, so we may just need to preserve the `Message`
wrapper for a bit longer.

## Test plan

Existing tests

---------

Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
Brent Westbrook 2025-05-19 13:34:04 -04:00 committed by GitHub
parent 236633cd42
commit d6009eb942
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 384 additions and 463 deletions

View file

@ -13,19 +13,19 @@ use itertools::Itertools;
use log::{debug, error}; use log::{debug, error};
use rayon::iter::ParallelIterator; use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge}; use rayon::iter::{IntoParallelIterator, ParallelBridge};
use ruff_linter::{codes::Rule, registry::AsRule}; use ruff_linter::codes::Rule;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher}; use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_diagnostics::Fix; use ruff_diagnostics::Fix;
use ruff_linter::message::{DiagnosticMessage, Message}; use ruff_linter::message::Message;
use ruff_linter::package::PackageRoot; use ruff_linter::package::PackageRoot;
use ruff_linter::{VERSION, warn_user}; use ruff_linter::{VERSION, warn_user};
use ruff_macros::CacheKey; use ruff_macros::CacheKey;
use ruff_notebook::NotebookIndex; use ruff_notebook::NotebookIndex;
use ruff_source_file::SourceFileBuilder; use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::{Ranged, TextRange, TextSize};
use ruff_workspace::Settings; use ruff_workspace::Settings;
use ruff_workspace::resolver::Resolver; use ruff_workspace::resolver::Resolver;
@ -348,16 +348,16 @@ impl FileCache {
lint.messages lint.messages
.iter() .iter()
.map(|msg| { .map(|msg| {
Message::Diagnostic(DiagnosticMessage { Message::diagnostic(
name: msg.rule.into(), msg.rule.into(),
body: msg.body.clone(), msg.body.clone(),
suggestion: msg.suggestion.clone(), msg.suggestion.clone(),
range: msg.range, msg.range,
fix: msg.fix.clone(), msg.fix.clone(),
file: file.clone(), msg.parent,
noqa_offset: msg.noqa_offset, file.clone(),
parent: msg.parent, msg.noqa_offset,
}) )
}) })
.collect() .collect()
}; };
@ -439,22 +439,22 @@ impl LintCacheData {
let messages = messages let messages = messages
.iter() .iter()
.filter_map(|message| message.as_diagnostic_message()) .filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.map(|msg| { .map(|(rule, msg)| {
// Make sure that all message use the same source file. // Make sure that all message use the same source file.
assert_eq!( assert_eq!(
msg.file, msg.source_file(),
messages.first().unwrap().source_file(), messages.first().unwrap().source_file(),
"message uses a different source file" "message uses a different source file"
); );
CacheMessage { CacheMessage {
rule: msg.rule(), rule,
body: msg.body.clone(), body: msg.body().to_string(),
suggestion: msg.suggestion.clone(), suggestion: msg.suggestion().map(ToString::to_string),
range: msg.range, range: msg.range(),
parent: msg.parent, parent: msg.parent,
fix: msg.fix.clone(), fix: msg.fix().cloned(),
noqa_offset: msg.noqa_offset, noqa_offset: msg.noqa_offset(),
} }
}) })
.collect(); .collect();
@ -485,7 +485,7 @@ pub(super) struct CacheMessage {
#[bincode(with_serde)] #[bincode(with_serde)]
fix: Option<Fix>, fix: Option<Fix>,
#[bincode(with_serde)] #[bincode(with_serde)]
noqa_offset: TextSize, noqa_offset: Option<TextSize>,
} }
pub(crate) trait PackageCaches { pub(crate) trait PackageCaches {

View file

@ -20,7 +20,7 @@ use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags}; use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::{IOError, fs, warn_user_once}; use ruff_linter::{IOError, fs, warn_user_once};
use ruff_source_file::SourceFileBuilder; use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::TextRange;
use ruff_workspace::resolver::{ use ruff_workspace::resolver::{
PyprojectConfig, ResolvedFile, match_exclusion, python_files_in_path, PyprojectConfig, ResolvedFile, match_exclusion, python_files_in_path,
}; };
@ -133,7 +133,7 @@ pub(crate) fn check(
vec![Message::from_diagnostic( vec![Message::from_diagnostic(
Diagnostic::new(IOError { message }, TextRange::default()), Diagnostic::new(IOError { message }, TextRange::default()),
dummy, dummy,
TextSize::default(), None,
)], )],
FxHashMap::default(), FxHashMap::default(),
) )

View file

@ -25,7 +25,7 @@ use ruff_linter::{IOError, fs};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex}; use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType}; use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder; use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::TextRange;
use ruff_workspace::Settings; use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData}; use crate::cache::{Cache, FileCacheKey, LintCacheData};
@ -71,7 +71,7 @@ impl Diagnostics {
TextRange::default(), TextRange::default(),
), ),
source_file, source_file,
TextSize::default(), None,
)], )],
FxHashMap::default(), FxHashMap::default(),
) )

View file

@ -1,5 +1,4 @@
use std::cmp::Reverse; use std::cmp::Reverse;
use std::fmt::Display;
use std::hash::Hash; use std::hash::Hash;
use std::io::Write; use std::io::Write;
@ -7,17 +6,17 @@ use anyhow::Result;
use bitflags::bitflags; use bitflags::bitflags;
use colored::Colorize; use colored::Colorize;
use itertools::{Itertools, iterate}; use itertools::{Itertools, iterate};
use ruff_linter::codes::NoqaCode;
use serde::Serialize; use serde::Serialize;
use ruff_linter::fs::relativize_path; use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel; use ruff_linter::logging::LogLevel;
use ruff_linter::message::{ use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, MessageKind, PylintEmitter, JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, PylintEmitter, RdjsonEmitter,
RdjsonEmitter, SarifEmitter, TextEmitter, SarifEmitter, TextEmitter,
}; };
use ruff_linter::notify_user; use ruff_linter::notify_user;
use ruff_linter::registry::Rule;
use ruff_linter::settings::flags::{self}; use ruff_linter::settings::flags::{self};
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes}; use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
@ -37,59 +36,12 @@ bitflags! {
#[derive(Serialize)] #[derive(Serialize)]
struct ExpandedStatistics { struct ExpandedStatistics {
code: Option<SerializeRuleAsCode>, code: Option<NoqaCode>,
name: SerializeMessageKindAsTitle, name: &'static str,
count: usize, count: usize,
fixable: bool, fixable: bool,
} }
#[derive(Copy, Clone)]
struct SerializeRuleAsCode(Rule);
impl Serialize for SerializeRuleAsCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.0.noqa_code().to_string())
}
}
impl Display for SerializeRuleAsCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.noqa_code())
}
}
impl From<Rule> for SerializeRuleAsCode {
fn from(rule: Rule) -> Self {
Self(rule)
}
}
struct SerializeMessageKindAsTitle(MessageKind);
impl Serialize for SerializeMessageKindAsTitle {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.0.as_str())
}
}
impl Display for SerializeMessageKindAsTitle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.0.as_str())
}
}
impl From<MessageKind> for SerializeMessageKindAsTitle {
fn from(kind: MessageKind) -> Self {
Self(kind)
}
}
pub(crate) struct Printer { pub(crate) struct Printer {
format: OutputFormat, format: OutputFormat,
log_level: LogLevel, log_level: LogLevel,
@ -350,21 +302,25 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics let statistics: Vec<ExpandedStatistics> = diagnostics
.messages .messages
.iter() .iter()
.sorted_by_key(|message| (message.rule(), message.fixable())) .map(|message| (message.to_noqa_code(), message))
.fold(vec![], |mut acc: Vec<(&Message, usize)>, message| { .sorted_by_key(|(code, message)| (*code, message.fixable()))
if let Some((prev_message, count)) = acc.last_mut() { .fold(
if prev_message.rule() == message.rule() { vec![],
|mut acc: Vec<((Option<NoqaCode>, &Message), usize)>, (code, message)| {
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
if *prev_code == code {
*count += 1; *count += 1;
return acc; return acc;
} }
} }
acc.push((message, 1)); acc.push(((code, message), 1));
acc acc
}) },
)
.iter() .iter()
.map(|&(message, count)| ExpandedStatistics { .map(|&((code, message), count)| ExpandedStatistics {
code: message.rule().map(std::convert::Into::into), code,
name: message.kind().into(), name: message.name(),
count, count,
fixable: if let Some(fix) = message.fix() { fixable: if let Some(fix) = message.fix() {
fix.applies(self.unsafe_fixes.required_applicability()) fix.applies(self.unsafe_fixes.required_applicability())

View file

@ -563,6 +563,11 @@ impl Annotation {
&self.span &self.span
} }
/// Sets the span on this annotation.
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
/// Returns the tags associated with this annotation. /// Returns the tags associated with this annotation.
pub fn get_tags(&self) -> &[DiagnosticTag] { pub fn get_tags(&self) -> &[DiagnosticTag] {
&self.tags &self.tags
@ -686,7 +691,7 @@ impl DiagnosticId {
/// ///
/// Note that this doesn't include the lint's category. It /// Note that this doesn't include the lint's category. It
/// only includes the lint's name. /// only includes the lint's name.
pub fn as_str(&self) -> &str { pub fn as_str(&self) -> &'static str {
match self { match self {
DiagnosticId::Panic => "panic", DiagnosticId::Panic => "panic",
DiagnosticId::Io => "io", DiagnosticId::Io => "io",

View file

@ -47,10 +47,14 @@ pub(crate) fn check_noqa(
// Remove any ignored diagnostics. // Remove any ignored diagnostics.
'outer: for (index, diagnostic) in diagnostics.iter().enumerate() { 'outer: for (index, diagnostic) in diagnostics.iter().enumerate() {
if matches!(diagnostic.rule(), Rule::BlanketNOQA) { let rule = diagnostic.rule();
if matches!(rule, Rule::BlanketNOQA) {
continue; continue;
} }
let code = rule.noqa_code();
match &exemption { match &exemption {
FileExemption::All(_) => { FileExemption::All(_) => {
// If the file is exempted, ignore all diagnostics. // If the file is exempted, ignore all diagnostics.
@ -59,7 +63,7 @@ pub(crate) fn check_noqa(
} }
FileExemption::Codes(codes) => { FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, ignore it. // If the diagnostic is ignored by a global exemption, ignore it.
if codes.contains(&&diagnostic.rule().noqa_code()) { if codes.contains(&&code) {
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
continue; continue;
} }
@ -78,13 +82,13 @@ pub(crate) fn check_noqa(
{ {
let suppressed = match &directive_line.directive { let suppressed = match &directive_line.directive {
Directive::All(_) => { Directive::All(_) => {
directive_line.matches.push(diagnostic.rule().noqa_code()); directive_line.matches.push(code);
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
true true
} }
Directive::Codes(directive) => { Directive::Codes(directive) => {
if directive.includes(diagnostic.rule()) { if directive.includes(code) {
directive_line.matches.push(diagnostic.rule().noqa_code()); directive_line.matches.push(code);
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
true true
} else { } else {

View file

@ -10,7 +10,7 @@ use crate::registry::{AsRule, Linter};
use crate::rule_selector::is_single_rule_selector; use crate::rule_selector::is_single_rule_selector;
use crate::rules; use crate::rules;
#[derive(PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str, &'static str); pub struct NoqaCode(&'static str, &'static str);
impl NoqaCode { impl NoqaCode {
@ -46,6 +46,15 @@ impl PartialEq<&str> for NoqaCode {
} }
} }
impl serde::Serialize for NoqaCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum RuleGroup { pub enum RuleGroup {
/// The rule is stable. /// The rule is stable.

View file

@ -606,7 +606,7 @@ mod tests {
use crate::fix::edits::{ use crate::fix::edits::{
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon, add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
}; };
use crate::message::DiagnosticMessage; use crate::message::Message;
/// Parse the given source using [`Mode::Module`] and return the first statement. /// Parse the given source using [`Mode::Module`] and return the first statement.
fn parse_first_stmt(source: &str) -> Result<Stmt> { fn parse_first_stmt(source: &str) -> Result<Stmt> {
@ -745,16 +745,16 @@ x = 1 \
iter.next().ok_or(anyhow!("expected edits nonempty"))?, iter.next().ok_or(anyhow!("expected edits nonempty"))?,
iter, iter,
)); ));
DiagnosticMessage { Message::diagnostic(
name: diag.name, diag.name,
body: diag.body, diag.body,
suggestion: diag.suggestion, diag.suggestion,
range: diag.range, diag.range,
fix: diag.fix, diag.fix,
parent: diag.parent, diag.parent,
file: SourceFileBuilder::new("<filename>", "<code>").finish(), SourceFileBuilder::new("<filename>", "<code>").finish(),
noqa_offset: TextSize::default(), None,
} )
}; };
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect); assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
Ok(()) Ok(())

View file

@ -8,8 +8,8 @@ use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Locator; use crate::Locator;
use crate::linter::FixTable; use crate::linter::FixTable;
use crate::message::{DiagnosticMessage, Message}; use crate::message::Message;
use crate::registry::{AsRule, Rule}; use crate::registry::Rule;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
pub(crate) mod codemods; pub(crate) mod codemods;
@ -35,11 +35,9 @@ pub(crate) fn fix_file(
let mut with_fixes = messages let mut with_fixes = messages
.iter() .iter()
.filter_map(Message::as_diagnostic_message)
.filter(|message| { .filter(|message| {
message message
.fix .fix()
.as_ref()
.is_some_and(|fix| fix.applies(required_applicability)) .is_some_and(|fix| fix.applies(required_applicability))
}) })
.peekable(); .peekable();
@ -53,7 +51,7 @@ pub(crate) fn fix_file(
/// Apply a series of fixes. /// Apply a series of fixes.
fn apply_fixes<'a>( fn apply_fixes<'a>(
diagnostics: impl Iterator<Item = &'a DiagnosticMessage>, diagnostics: impl Iterator<Item = &'a Message>,
locator: &'a Locator<'a>, locator: &'a Locator<'a>,
) -> FixResult { ) -> FixResult {
let mut output = String::with_capacity(locator.len()); let mut output = String::with_capacity(locator.len());
@ -64,7 +62,8 @@ fn apply_fixes<'a>(
let mut source_map = SourceMap::default(); let mut source_map = SourceMap::default();
for (rule, fix) in diagnostics for (rule, fix) in diagnostics
.filter_map(|diagnostic| diagnostic.fix.as_ref().map(|fix| (diagnostic.rule(), fix))) .filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.filter_map(|(rule, diagnostic)| diagnostic.fix().map(|fix| (rule, fix)))
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2)) .sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
{ {
let mut edits = fix let mut edits = fix
@ -164,29 +163,23 @@ mod tests {
use crate::Locator; use crate::Locator;
use crate::fix::{FixResult, apply_fixes}; use crate::fix::{FixResult, apply_fixes};
use crate::message::DiagnosticMessage; use crate::message::Message;
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile; use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
fn create_diagnostics( fn create_diagnostics(
filename: &str, filename: &str,
source: &str, source: &str,
edit: impl IntoIterator<Item = Edit>, edit: impl IntoIterator<Item = Edit>,
) -> Vec<DiagnosticMessage> { ) -> Vec<Message> {
// The choice of rule here is arbitrary.
edit.into_iter() edit.into_iter()
.map(|edit| { .map(|edit| {
let range = edit.range(); // The choice of rule here is arbitrary.
let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, range); let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, edit.range());
DiagnosticMessage { Message::from_diagnostic(
name: diagnostic.name, diagnostic.with_fix(Fix::safe_edit(edit)),
body: diagnostic.body, SourceFileBuilder::new(filename, source).finish(),
suggestion: diagnostic.suggestion, None,
range, )
fix: Some(Fix::safe_edit(edit)),
parent: None,
file: SourceFileBuilder::new(filename, source).finish(),
noqa_offset: TextSize::default(),
}
}) })
.collect() .collect()
} }

View file

@ -535,7 +535,7 @@ fn diagnostics_to_messages(
) )
.chain(diagnostics.into_iter().map(|diagnostic| { .chain(diagnostics.into_iter().map(|diagnostic| {
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start()); let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset) Message::from_diagnostic(diagnostic, file.deref().clone(), Some(noqa_offset))
})) }))
.collect() .collect()
} }
@ -682,7 +682,7 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
#[expect(clippy::print_stderr)] #[expect(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) { fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
let codes = collect_rule_codes(messages.iter().filter_map(Message::rule)); let codes = collect_rule_codes(messages.iter().filter_map(Message::to_rule));
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
eprintln!( eprintln!(
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---", "{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",

View file

@ -33,8 +33,8 @@ impl Emitter for AzureEmitter {
line = location.line, line = location.line,
col = location.column, col = location.column,
code = message code = message
.rule() .to_noqa_code()
.map_or_else(String::new, |rule| format!("code={};", rule.noqa_code())), .map_or_else(String::new, |code| format!("code={code};")),
body = message.body(), body = message.body(),
)?; )?;
} }

View file

@ -33,8 +33,8 @@ impl Emitter for GithubEmitter {
writer, writer,
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::", "::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
code = message code = message
.rule() .to_noqa_code()
.map_or_else(String::new, |rule| format!(" ({})", rule.noqa_code())), .map_or_else(String::new, |code| format!(" ({code})")),
file = message.filename(), file = message.filename(),
row = source_location.line, row = source_location.line,
column = source_location.column, column = source_location.column,
@ -50,8 +50,8 @@ impl Emitter for GithubEmitter {
column = location.column, column = location.column,
)?; )?;
if let Some(rule) = message.rule() { if let Some(code) = message.to_noqa_code() {
write!(writer, " {}", rule.noqa_code())?; write!(writer, " {code}")?;
} }
writeln!(writer, " {}", message.body())?; writeln!(writer, " {}", message.body())?;

View file

@ -90,8 +90,8 @@ impl Serialize for SerializedMessages<'_> {
} }
fingerprints.insert(message_fingerprint); fingerprints.insert(message_fingerprint);
let (description, check_name) = if let Some(rule) = message.rule() { let (description, check_name) = if let Some(code) = message.to_noqa_code() {
(message.body().to_string(), rule.noqa_code().to_string()) (message.body().to_string(), code.to_string())
} else { } else {
let description = message.body(); let description = message.body();
let description_without_prefix = description let description_without_prefix = description

View file

@ -81,8 +81,8 @@ pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext)
} }
json!({ json!({
"code": message.rule().map(|rule| rule.noqa_code().to_string()), "code": message.to_noqa_code().map(|code| code.to_string()),
"url": message.rule().and_then(|rule| rule.url()), "url": message.to_rule().and_then(|rule| rule.url()),
"message": message.body(), "message": message.body(),
"fix": fix, "fix": fix,
"cell": notebook_cell_index, "cell": notebook_cell_index,

View file

@ -59,8 +59,8 @@ impl Emitter for JunitEmitter {
body = message.body() body = message.body()
)); ));
let mut case = TestCase::new( let mut case = TestCase::new(
if let Some(rule) = message.rule() { if let Some(code) = message.to_noqa_code() {
format!("org.ruff.{}", rule.noqa_code()) format!("org.ruff.{code}")
} else { } else {
"org.ruff".to_string() "org.ruff".to_string()
}, },

View file

@ -1,10 +1,9 @@
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::io::Write; use std::io::Write;
use std::ops::Deref; use std::ops::Deref;
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, Severity, Span}; use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, LintName, Severity, Span};
use ruff_python_parser::semantic_errors::SemanticSyntaxError; use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -26,8 +25,9 @@ pub use sarif::SarifEmitter;
pub use text::TextEmitter; pub use text::TextEmitter;
use crate::Locator; use crate::Locator;
use crate::codes::NoqaCode;
use crate::logging::DisplayParseErrorType; use crate::logging::DisplayParseErrorType;
use crate::registry::{AsRule, Rule}; use crate::registry::Rule;
mod azure; mod azure;
mod diff; mod diff;
@ -43,39 +43,23 @@ mod sarif;
mod text; mod text;
/// Message represents either a diagnostic message corresponding to a rule violation or a syntax /// Message represents either a diagnostic message corresponding to a rule violation or a syntax
/// error message raised by the parser. /// error message.
///
/// All of the information for syntax errors is captured in the underlying [`db::Diagnostic`], while
/// rule violations can have the additional optional fields like fixes, suggestions, and (parent)
/// `noqa` offsets.
///
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
/// [`Diagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
/// a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived from the `Rule`.
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum Message { pub struct Message {
Diagnostic(DiagnosticMessage), pub diagnostic: db::Diagnostic,
SyntaxError(db::Diagnostic),
}
/// A diagnostic message corresponding to a rule violation. // these fields are specific to rule violations
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticMessage {
pub name: &'static str,
pub body: String,
pub suggestion: Option<String>,
pub range: TextRange,
pub fix: Option<Fix>, pub fix: Option<Fix>,
pub parent: Option<TextSize>, pub parent: Option<TextSize>,
pub file: SourceFile, pub(crate) noqa_offset: Option<TextSize>,
pub noqa_offset: TextSize,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum MessageKind {
Diagnostic(Rule),
SyntaxError,
}
impl MessageKind {
pub fn as_str(&self) -> &str {
match self {
MessageKind::Diagnostic(rule) => rule.as_ref(),
MessageKind::SyntaxError => "syntax-error",
}
}
} }
impl Message { impl Message {
@ -84,28 +68,72 @@ impl Message {
range: TextRange, range: TextRange,
file: SourceFile, file: SourceFile,
) -> Message { ) -> Message {
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, ""); let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
let span = Span::from(file).with_range(range); let span = Span::from(file).with_range(range);
diag.annotate(Annotation::primary(span).message(message)); diag.annotate(Annotation::primary(span));
Self::SyntaxError(diag) Self {
diagnostic: diag,
fix: None,
parent: None,
noqa_offset: None,
}
}
#[expect(clippy::too_many_arguments)]
pub fn diagnostic(
name: &'static str,
body: String,
suggestion: Option<String>,
range: TextRange,
fix: Option<Fix>,
parent: Option<TextSize>,
file: SourceFile,
noqa_offset: Option<TextSize>,
) -> Message {
let mut diagnostic = db::Diagnostic::new(
DiagnosticId::Lint(LintName::of(name)),
Severity::Error,
body,
);
let span = Span::from(file).with_range(range);
let mut annotation = Annotation::primary(span);
if let Some(suggestion) = suggestion {
annotation = annotation.message(suggestion);
}
diagnostic.annotate(annotation);
Message {
diagnostic,
fix,
parent,
noqa_offset,
}
} }
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation. /// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
pub fn from_diagnostic( pub fn from_diagnostic(
diagnostic: Diagnostic, diagnostic: Diagnostic,
file: SourceFile, file: SourceFile,
noqa_offset: TextSize, noqa_offset: Option<TextSize>,
) -> Message { ) -> Message {
Message::Diagnostic(DiagnosticMessage { let Diagnostic {
range: diagnostic.range(), name,
name: diagnostic.name, body,
body: diagnostic.body, suggestion,
suggestion: diagnostic.suggestion, range,
fix: diagnostic.fix, fix,
parent: diagnostic.parent, parent,
} = diagnostic;
Self::diagnostic(
name,
body,
suggestion,
range,
fix,
parent,
file, file,
noqa_offset, noqa_offset,
}) )
} }
/// Create a [`Message`] from the given [`ParseError`]. /// Create a [`Message`] from the given [`ParseError`].
@ -157,83 +185,38 @@ impl Message {
) )
} }
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
match self {
Message::Diagnostic(m) => Some(m),
Message::SyntaxError(_) => None,
}
}
pub fn into_diagnostic_message(self) -> Option<DiagnosticMessage> {
match self {
Message::Diagnostic(m) => Some(m),
Message::SyntaxError(_) => None,
}
}
/// Returns `true` if `self` is a diagnostic message.
pub const fn is_diagnostic_message(&self) -> bool {
matches!(self, Message::Diagnostic(_))
}
/// Returns `true` if `self` is a syntax error message. /// Returns `true` if `self` is a syntax error message.
pub fn is_syntax_error(&self) -> bool { pub fn is_syntax_error(&self) -> bool {
match self { self.diagnostic.id().is_invalid_syntax()
Message::Diagnostic(_) => false,
Message::SyntaxError(diag) => diag.id().is_invalid_syntax(),
}
}
/// Returns a message kind.
pub fn kind(&self) -> MessageKind {
match self {
Message::Diagnostic(m) => MessageKind::Diagnostic(m.rule()),
Message::SyntaxError(_) => MessageKind::SyntaxError,
}
} }
/// Returns the name used to represent the diagnostic. /// Returns the name used to represent the diagnostic.
pub fn name(&self) -> &str { pub fn name(&self) -> &'static str {
match self { if self.is_syntax_error() {
Message::Diagnostic(m) => m.name, "syntax-error"
Message::SyntaxError(_) => "SyntaxError", } else {
self.diagnostic.id().as_str()
} }
} }
/// Returns the message body to display to the user. /// Returns the message body to display to the user.
pub fn body(&self) -> &str { pub fn body(&self) -> &str {
match self { self.diagnostic.primary_message()
Message::Diagnostic(m) => &m.body,
Message::SyntaxError(m) => m
.primary_annotation()
.expect("Expected a primary annotation for a ruff diagnostic")
.get_message()
.expect("Expected a message for a ruff diagnostic"),
}
} }
/// Returns the fix suggestion for the violation. /// Returns the fix suggestion for the violation.
pub fn suggestion(&self) -> Option<&str> { pub fn suggestion(&self) -> Option<&str> {
match self { self.diagnostic.primary_annotation()?.get_message()
Message::Diagnostic(m) => m.suggestion.as_deref(),
Message::SyntaxError(_) => None,
}
} }
/// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message. /// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message.
pub fn noqa_offset(&self) -> Option<TextSize> { pub fn noqa_offset(&self) -> Option<TextSize> {
match self { self.noqa_offset
Message::Diagnostic(m) => Some(m.noqa_offset),
Message::SyntaxError(_) => None,
}
} }
/// Returns the [`Fix`] for the message, if there is any. /// Returns the [`Fix`] for the message, if there is any.
pub fn fix(&self) -> Option<&Fix> { pub fn fix(&self) -> Option<&Fix> {
match self { self.fix.as_ref()
Message::Diagnostic(m) => m.fix.as_ref(),
Message::SyntaxError(_) => None,
}
} }
/// Returns `true` if the message contains a [`Fix`]. /// Returns `true` if the message contains a [`Fix`].
@ -242,56 +225,64 @@ impl Message {
} }
/// Returns the [`Rule`] corresponding to the diagnostic message. /// Returns the [`Rule`] corresponding to the diagnostic message.
pub fn rule(&self) -> Option<Rule> { pub fn to_rule(&self) -> Option<Rule> {
match self { if self.is_syntax_error() {
Message::Diagnostic(m) => Some(m.rule()), None
Message::SyntaxError(_) => None, } else {
Some(self.name().parse().expect("Expected a valid rule name"))
} }
} }
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
pub fn to_noqa_code(&self) -> Option<NoqaCode> {
self.to_rule().map(|rule| rule.noqa_code())
}
/// Returns the URL for the rule documentation, if it exists.
pub fn to_url(&self) -> Option<String> {
// TODO(brent) Rule::url calls Rule::explanation, which calls ViolationMetadata::explain,
// which when derived (seems always to be the case?) is always `Some`, so I think it's
// pretty safe to inline the Rule::url implementation here, using `self.name()`:
//
// format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.name())
//
// at least in the case of diagnostics, I guess syntax errors will return None
self.to_rule().and_then(|rule| rule.url())
}
/// Returns the filename for the message. /// Returns the filename for the message.
pub fn filename(&self) -> Cow<'_, str> { pub fn filename(&self) -> String {
match self { self.diagnostic
Message::Diagnostic(m) => Cow::Borrowed(m.file.name()), .expect_primary_span()
Message::SyntaxError(diag) => Cow::Owned(
diag.expect_primary_span()
.expect_ruff_file() .expect_ruff_file()
.name() .name()
.to_string(), .to_string()
),
}
} }
/// Computes the start source location for the message. /// Computes the start source location for the message.
pub fn compute_start_location(&self) -> LineColumn { pub fn compute_start_location(&self) -> LineColumn {
match self { self.diagnostic
Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.start()),
Message::SyntaxError(diag) => diag
.expect_primary_span() .expect_primary_span()
.expect_ruff_file() .expect_ruff_file()
.to_source_code() .to_source_code()
.line_column(self.start()), .line_column(self.start())
}
} }
/// Computes the end source location for the message. /// Computes the end source location for the message.
pub fn compute_end_location(&self) -> LineColumn { pub fn compute_end_location(&self) -> LineColumn {
match self { self.diagnostic
Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.end()),
Message::SyntaxError(diag) => diag
.expect_primary_span() .expect_primary_span()
.expect_ruff_file() .expect_ruff_file()
.to_source_code() .to_source_code()
.line_column(self.end()), .line_column(self.end())
}
} }
/// Returns the [`SourceFile`] which the message belongs to. /// Returns the [`SourceFile`] which the message belongs to.
pub fn source_file(&self) -> SourceFile { pub fn source_file(&self) -> SourceFile {
match self { self.diagnostic
Message::Diagnostic(m) => m.file.clone(), .expect_primary_span()
Message::SyntaxError(m) => m.expect_primary_span().expect_ruff_file().clone(), .expect_ruff_file()
} .clone()
} }
} }
@ -309,13 +300,10 @@ impl PartialOrd for Message {
impl Ranged for Message { impl Ranged for Message {
fn range(&self) -> TextRange { fn range(&self) -> TextRange {
match self { self.diagnostic
Message::Diagnostic(m) => m.range,
Message::SyntaxError(m) => m
.expect_primary_span() .expect_primary_span()
.range() .range()
.expect("Expected range for ruff span"), .expect("Expected range for ruff span")
}
} }
} }
@ -390,7 +378,7 @@ mod tests {
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::{TextRange, TextSize};
use crate::Locator; use crate::Locator;
use crate::message::{DiagnosticMessage, Emitter, EmitterContext, Message}; use crate::message::{Emitter, EmitterContext, Message};
pub(super) fn create_syntax_error_messages() -> Vec<Message> { pub(super) fn create_syntax_error_messages() -> Vec<Message> {
let source = r"from os import let source = r"from os import
@ -428,54 +416,50 @@ def fibonacci(n):
let fib_source = SourceFileBuilder::new("fib.py", fib).finish(); let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
let unused_import_start = TextSize::from(7); let unused_import_start = TextSize::from(7);
let unused_import = DiagnosticMessage { let unused_import = Message::diagnostic(
name: "unused-import", "unused-import",
body: "`os` imported but unused".to_string(), "`os` imported but unused".to_string(),
suggestion: Some("Remove unused import: `os`".to_string()), Some("Remove unused import: `os`".to_string()),
range: TextRange::new(unused_import_start, TextSize::from(9)), TextRange::new(unused_import_start, TextSize::from(9)),
fix: Some(Fix::unsafe_edit(Edit::range_deletion(TextRange::new( Some(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(0), TextSize::from(0),
TextSize::from(10), TextSize::from(10),
)))), )))),
parent: None, None,
noqa_offset: unused_import_start, fib_source.clone(),
file: fib_source.clone(), Some(unused_import_start),
}; );
let unused_variable_start = TextSize::from(94); let unused_variable_start = TextSize::from(94);
let unused_variable = DiagnosticMessage { let unused_variable = Message::diagnostic(
name: "unused-variable", "unused-variable",
body: "Local variable `x` is assigned to but never used".to_string(), "Local variable `x` is assigned to but never used".to_string(),
suggestion: Some("Remove assignment to unused variable `x`".to_string()), Some("Remove assignment to unused variable `x`".to_string()),
range: TextRange::new(unused_variable_start, TextSize::from(95)), TextRange::new(unused_variable_start, TextSize::from(95)),
fix: Some(Fix::unsafe_edit(Edit::deletion( Some(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94), TextSize::from(94),
TextSize::from(99), TextSize::from(99),
))), ))),
parent: None, None,
noqa_offset: unused_variable_start, fib_source,
file: fib_source, Some(unused_variable_start),
}; );
let file_2 = r"if a == 1: pass"; let file_2 = r"if a == 1: pass";
let undefined_name_start = TextSize::from(3); let undefined_name_start = TextSize::from(3);
let undefined_name = DiagnosticMessage { let undefined_name = Message::diagnostic(
name: "undefined-name", "undefined-name",
body: "Undefined name `a`".to_string(), "Undefined name `a`".to_string(),
suggestion: None, None,
range: TextRange::new(undefined_name_start, TextSize::from(4)), TextRange::new(undefined_name_start, TextSize::from(4)),
fix: None, None,
parent: None, None,
noqa_offset: undefined_name_start, SourceFileBuilder::new("undef.py", file_2).finish(),
file: SourceFileBuilder::new("undef.py", file_2).finish(), Some(undefined_name_start),
}; );
vec![ vec![unused_import, unused_variable, undefined_name]
Message::Diagnostic(unused_import),
Message::Diagnostic(unused_variable),
Message::Diagnostic(undefined_name),
]
} }
pub(super) fn create_notebook_messages() -> (Vec<Message>, FxHashMap<String, NotebookIndex>) { pub(super) fn create_notebook_messages() -> (Vec<Message>, FxHashMap<String, NotebookIndex>) {
@ -494,49 +478,49 @@ def foo():
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish(); let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
let unused_import_os_start = TextSize::from(16); let unused_import_os_start = TextSize::from(16);
let unused_import_os = DiagnosticMessage { let unused_import_os = Message::diagnostic(
name: "unused-import", "unused-import",
body: "`os` imported but unused".to_string(), "`os` imported but unused".to_string(),
suggestion: Some("Remove unused import: `os`".to_string()), Some("Remove unused import: `os`".to_string()),
range: TextRange::new(unused_import_os_start, TextSize::from(18)), TextRange::new(unused_import_os_start, TextSize::from(18)),
fix: Some(Fix::safe_edit(Edit::range_deletion(TextRange::new( Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(9), TextSize::from(9),
TextSize::from(19), TextSize::from(19),
)))), )))),
parent: None, None,
file: notebook_source.clone(), notebook_source.clone(),
noqa_offset: unused_import_os_start, Some(unused_import_os_start),
}; );
let unused_import_math_start = TextSize::from(35); let unused_import_math_start = TextSize::from(35);
let unused_import_math = DiagnosticMessage { let unused_import_math = Message::diagnostic(
name: "unused-import", "unused-import",
body: "`math` imported but unused".to_string(), "`math` imported but unused".to_string(),
suggestion: Some("Remove unused import: `math`".to_string()), Some("Remove unused import: `math`".to_string()),
range: TextRange::new(unused_import_math_start, TextSize::from(39)), TextRange::new(unused_import_math_start, TextSize::from(39)),
fix: Some(Fix::safe_edit(Edit::range_deletion(TextRange::new( Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(28), TextSize::from(28),
TextSize::from(40), TextSize::from(40),
)))), )))),
parent: None, None,
file: notebook_source.clone(), notebook_source.clone(),
noqa_offset: unused_import_math_start, Some(unused_import_math_start),
}; );
let unused_variable_start = TextSize::from(98); let unused_variable_start = TextSize::from(98);
let unused_variable = DiagnosticMessage { let unused_variable = Message::diagnostic(
name: "unused-variable", "unused-variable",
body: "Local variable `x` is assigned to but never used".to_string(), "Local variable `x` is assigned to but never used".to_string(),
suggestion: Some("Remove assignment to unused variable `x`".to_string()), Some("Remove assignment to unused variable `x`".to_string()),
range: TextRange::new(unused_variable_start, TextSize::from(99)), TextRange::new(unused_variable_start, TextSize::from(99)),
fix: Some(Fix::unsafe_edit(Edit::deletion( Some(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94), TextSize::from(94),
TextSize::from(104), TextSize::from(104),
))), ))),
parent: None, None,
file: notebook_source, notebook_source,
noqa_offset: unused_variable_start, Some(unused_variable_start),
}; );
let mut notebook_indexes = FxHashMap::default(); let mut notebook_indexes = FxHashMap::default();
notebook_indexes.insert( notebook_indexes.insert(
@ -570,11 +554,7 @@ def foo():
); );
( (
vec![ vec![unused_import_os, unused_import_math, unused_variable],
Message::Diagnostic(unused_import_os),
Message::Diagnostic(unused_import_math),
Message::Diagnostic(unused_variable),
],
notebook_indexes, notebook_indexes,
) )
} }

View file

@ -26,12 +26,8 @@ impl Emitter for PylintEmitter {
message.compute_start_location().line message.compute_start_location().line
}; };
let body = if let Some(rule) = message.rule() { let body = if let Some(code) = message.to_noqa_code() {
format!( format!("[{code}] {body}", body = message.body())
"[{code}] {body}",
code = rule.noqa_code(),
body = message.body()
)
} else { } else {
message.body().to_string() message.body().to_string()
}; };

View file

@ -71,8 +71,8 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location), "range": rdjson_range(start_location, end_location),
}, },
"code": { "code": {
"value": message.rule().map(|rule| rule.noqa_code().to_string()), "value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.rule().and_then(|rule| rule.url()), "url": message.to_url(),
}, },
"suggestions": rdjson_suggestions(fix.edits(), &source_code), "suggestions": rdjson_suggestions(fix.edits(), &source_code),
}) })
@ -84,8 +84,8 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location), "range": rdjson_range(start_location, end_location),
}, },
"code": { "code": {
"value": message.rule().map(|rule| rule.noqa_code().to_string()), "value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.rule().and_then(|rule| rule.url()), "url": message.to_url(),
}, },
}) })
} }

View file

@ -123,7 +123,7 @@ impl SarifResult {
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
rule: message.rule(), rule: message.to_rule(),
level: "error".to_string(), level: "error".to_string(),
message: message.body().to_string(), message: message.body().to_string(),
uri: url::Url::from_file_path(&path) uri: url::Url::from_file_path(&path)
@ -143,7 +143,7 @@ impl SarifResult {
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
rule: message.rule(), rule: message.to_rule(),
level: "error".to_string(), level: "error".to_string(),
message: message.body().to_string(), message: message.body().to_string(),
uri: path.display().to_string(), uri: path.display().to_string(),

View file

@ -151,8 +151,8 @@ impl Display for RuleCodeAndBody<'_> {
if let Some(fix) = self.message.fix() { if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes // Do not display an indicator for inapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) { if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(rule) = self.message.rule() { if let Some(code) = self.message.to_noqa_code() {
write!(f, "{} ", rule.noqa_code().to_string().red().bold())?; write!(f, "{} ", code.to_string().red().bold())?;
} }
return write!( return write!(
f, f,
@ -164,11 +164,11 @@ impl Display for RuleCodeAndBody<'_> {
} }
} }
if let Some(rule) = self.message.rule() { if let Some(code) = self.message.to_noqa_code() {
write!( write!(
f, f,
"{code} {body}", "{code} {body}",
code = rule.noqa_code().to_string().red().bold(), code = code.to_string().red().bold(),
body = self.message.body(), body = self.message.body(),
) )
} else { } else {
@ -254,8 +254,8 @@ impl Display for MessageCodeFrame<'_> {
let label = self let label = self
.message .message
.rule() .to_noqa_code()
.map_or_else(String::new, |rule| rule.noqa_code().to_string()); .map_or_else(String::new, |code| code.to_string());
let line_start = self.notebook_index.map_or_else( let line_start = self.notebook_index.map_or_else(
|| start_index.get(), || start_index.get(),

View file

@ -18,7 +18,7 @@ use crate::Locator;
use crate::codes::NoqaCode; use crate::codes::NoqaCode;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::message::Message; use crate::message::Message;
use crate::registry::{AsRule, Rule, RuleSet}; use crate::registry::{Rule, RuleSet};
use crate::rule_redirects::get_redirect_target; use crate::rule_redirects::get_redirect_target;
/// Generates an array of edits that matches the length of `messages`. /// Generates an array of edits that matches the length of `messages`.
@ -105,8 +105,7 @@ impl Codes<'_> {
/// Returns `true` if the string list of `codes` includes `code` (or an alias /// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof). /// thereof).
pub(crate) fn includes(&self, needle: Rule) -> bool { pub(crate) fn includes(&self, needle: NoqaCode) -> bool {
let needle = needle.noqa_code();
self.iter() self.iter()
.any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str())) .any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
} }
@ -140,7 +139,7 @@ pub(crate) fn rule_is_ignored(
Ok(Some(NoqaLexerOutput { Ok(Some(NoqaLexerOutput {
directive: Directive::Codes(codes), directive: Directive::Codes(codes),
.. ..
})) => codes.includes(code), })) => codes.includes(code.noqa_code()),
_ => false, _ => false,
} }
} }
@ -846,11 +845,13 @@ fn find_noqa_comments<'a>(
// Mark any non-ignored diagnostics. // Mark any non-ignored diagnostics.
for message in messages { for message in messages {
let Message::Diagnostic(diagnostic) = message else { let Some(rule) = message.to_rule() else {
comments_by_line.push(None); comments_by_line.push(None);
continue; continue;
}; };
let code = rule.noqa_code();
match &exemption { match &exemption {
FileExemption::All(_) => { FileExemption::All(_) => {
// If the file is exempted, don't add any noqa directives. // If the file is exempted, don't add any noqa directives.
@ -859,7 +860,7 @@ fn find_noqa_comments<'a>(
} }
FileExemption::Codes(codes) => { FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, don't add a noqa directive. // If the diagnostic is ignored by a global exemption, don't add a noqa directive.
if codes.contains(&&diagnostic.rule().noqa_code()) { if codes.contains(&&code) {
comments_by_line.push(None); comments_by_line.push(None);
continue; continue;
} }
@ -867,7 +868,7 @@ fn find_noqa_comments<'a>(
} }
// Is the violation ignored by a `noqa` directive on the parent line? // Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = diagnostic.parent { if let Some(parent) = message.parent {
if let Some(directive_line) = if let Some(directive_line) =
directives.find_line_with_directive(noqa_line_for.resolve(parent)) directives.find_line_with_directive(noqa_line_for.resolve(parent))
{ {
@ -877,7 +878,7 @@ fn find_noqa_comments<'a>(
continue; continue;
} }
Directive::Codes(codes) => { Directive::Codes(codes) => {
if codes.includes(diagnostic.rule()) { if codes.includes(code) {
comments_by_line.push(None); comments_by_line.push(None);
continue; continue;
} }
@ -886,9 +887,7 @@ fn find_noqa_comments<'a>(
} }
} }
let noqa_offset = noqa_line_for.resolve(diagnostic.range.start()); let noqa_offset = noqa_line_for.resolve(message.range().start());
let rule = diagnostic.rule();
// Or ignored by the directive itself? // Or ignored by the directive itself?
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) { if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
@ -898,7 +897,7 @@ fn find_noqa_comments<'a>(
continue; continue;
} }
directive @ Directive::Codes(codes) => { directive @ Directive::Codes(codes) => {
if !codes.includes(rule) { if !codes.includes(code) {
comments_by_line.push(Some(NoqaComment { comments_by_line.push(Some(NoqaComment {
line: directive_line.start(), line: directive_line.start(),
rule, rule,
@ -1260,7 +1259,7 @@ mod tests {
) -> Message { ) -> Message {
let noqa_offset = diagnostic.start(); let noqa_offset = diagnostic.start();
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish(); let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
Message::from_diagnostic(diagnostic, file, noqa_offset) Message::from_diagnostic(diagnostic, file, Some(noqa_offset))
} }
#[test] #[test]

View file

@ -30,11 +30,7 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
); );
if settings.rules.enabled(Rule::IOError) { if settings.rules.enabled(Rule::IOError) {
let diagnostic = Diagnostic::new(IOError { message }, TextRange::default()); let diagnostic = Diagnostic::new(IOError { message }, TextRange::default());
messages.push(Message::from_diagnostic( messages.push(Message::from_diagnostic(diagnostic, source_file, None));
diagnostic,
source_file,
TextSize::default(),
));
} else { } else {
warn!( warn!(
"{}{}{} {message}", "{}{}{} {message}",
@ -56,11 +52,7 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
if settings.rules.enabled(Rule::InvalidPyprojectToml) { if settings.rules.enabled(Rule::InvalidPyprojectToml) {
let toml_err = err.message().to_string(); let toml_err = err.message().to_string();
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range); let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
messages.push(Message::from_diagnostic( messages.push(Message::from_diagnostic(diagnostic, source_file, None));
diagnostic,
source_file,
TextSize::default(),
));
} }
messages messages

View file

@ -24,7 +24,7 @@ mod tests {
use crate::Locator; use crate::Locator;
use crate::linter::check_path; use crate::linter::check_path;
use crate::message::Message; use crate::message::Message;
use crate::registry::{AsRule, Linter, Rule}; use crate::registry::{Linter, Rule};
use crate::rules::isort; use crate::rules::isort;
use crate::rules::pyflakes; use crate::rules::pyflakes;
use crate::settings::types::PreviewMode; use crate::settings::types::PreviewMode;
@ -776,8 +776,7 @@ mod tests {
messages.sort_by_key(Ranged::start); messages.sort_by_key(Ranged::start);
let actual = messages let actual = messages
.iter() .iter()
.filter_map(Message::as_diagnostic_message) .filter_map(Message::to_rule)
.map(AsRule::rule)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
assert_eq!(actual, expected); assert_eq!(actual, expected);
} }

View file

@ -7,6 +7,7 @@ use std::path::Path;
#[cfg(not(fuzzing))] #[cfg(not(fuzzing))]
use anyhow::Result; use anyhow::Result;
use itertools::Itertools; use itertools::Itertools;
use ruff_text_size::Ranged;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use ruff_diagnostics::{Applicability, FixAvailability}; use ruff_diagnostics::{Applicability, FixAvailability};
@ -25,7 +26,6 @@ use crate::linter::check_path;
use crate::message::{Emitter, EmitterContext, Message, TextEmitter}; use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
use crate::package::PackageRoot; use crate::package::PackageRoot;
use crate::packaging::detect_package_root; use crate::packaging::detect_package_root;
use crate::registry::AsRule;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, flags}; use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
@ -233,10 +233,9 @@ Source with applied fixes:
let messages = messages let messages = messages
.into_iter() .into_iter()
.filter_map(Message::into_diagnostic_message) .filter_map(|msg| Some((msg.to_rule()?, msg)))
.map(|mut diagnostic| { .map(|(rule, mut diagnostic)| {
let rule = diagnostic.rule(); let fixable = diagnostic.fix().is_some_and(|fix| {
let fixable = diagnostic.fix.as_ref().is_some_and(|fix| {
matches!( matches!(
fix.applicability(), fix.applicability(),
Applicability::Safe | Applicability::Unsafe Applicability::Safe | Applicability::Unsafe
@ -269,16 +268,22 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
} }
assert!( assert!(
!(fixable && diagnostic.suggestion.is_none()), !(fixable && diagnostic.suggestion().is_none()),
"Diagnostic emitted by {rule:?} is fixable but \ "Diagnostic emitted by {rule:?} is fixable but \
`Violation::fix_title` returns `None`" `Violation::fix_title` returns `None`"
); );
// Not strictly necessary but adds some coverage for this code path // Not strictly necessary but adds some coverage for this code path by overriding the
diagnostic.noqa_offset = directives.noqa_line_for.resolve(diagnostic.range.start()); // noqa offset and the source file
diagnostic.file = source_code.clone(); let range = diagnostic.range();
diagnostic.noqa_offset = Some(directives.noqa_line_for.resolve(range.start()));
if let Some(annotation) = diagnostic.diagnostic.primary_annotation_mut() {
annotation.set_span(
ruff_db::diagnostic::Span::from(source_code.clone()).with_range(range),
);
}
Message::Diagnostic(diagnostic) diagnostic
}) })
.chain(parsed.errors().iter().map(|parse_error| { .chain(parsed.errors().iter().map(|parse_error| {
Message::from_parse_error(parse_error, &locator, source_code.clone()) Message::from_parse_error(parse_error, &locator, source_code.clone())
@ -311,7 +316,7 @@ fn print_syntax_errors(
/// Print the [`Message::Diagnostic`]s in `messages`. /// Print the [`Message::Diagnostic`]s in `messages`.
fn print_diagnostics(mut messages: Vec<Message>, path: &Path, source: &SourceKind) -> String { fn print_diagnostics(mut messages: Vec<Message>, path: &Path, source: &SourceKind) -> String {
messages.retain(Message::is_diagnostic_message); messages.retain(|msg| !msg.is_syntax_error());
if let Some(notebook) = source.as_ipy_notebook() { if let Some(notebook) = source.as_ipy_notebook() {
print_jupyter_messages(&messages, path, notebook) print_jupyter_messages(&messages, path, notebook)

View file

@ -471,13 +471,6 @@ fn register_rules<'a>(input: impl Iterator<Item = &'a Rule>) -> TokenStream {
} }
} }
impl AsRule for crate::message::DiagnosticMessage {
fn rule(&self) -> Rule {
self.name
.parse()
.unwrap_or_else(|_| unreachable!("invalid rule name: {}", self.name))
}
}
} }
} }

View file

@ -12,13 +12,13 @@ use crate::{
use ruff_diagnostics::{Applicability, Edit, Fix}; use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_linter::{ use ruff_linter::{
Locator, Locator,
codes::Rule,
directives::{Flags, extract_directives}, directives::{Flags, extract_directives},
generate_noqa_edits, generate_noqa_edits,
linter::check_path, linter::check_path,
message::{DiagnosticMessage, Message}, message::Message,
package::PackageRoot, package::PackageRoot,
packaging::detect_package_root, packaging::detect_package_root,
registry::AsRule,
settings::flags, settings::flags,
source_kind::SourceKind, source_kind::SourceKind,
}; };
@ -32,6 +32,7 @@ use ruff_text_size::{Ranged, TextRange};
/// This is serialized on the diagnostic `data` field. /// This is serialized on the diagnostic `data` field.
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub(crate) struct AssociatedDiagnosticData { pub(crate) struct AssociatedDiagnosticData {
/// The message describing what the fix does, if it exists, or the diagnostic name otherwise.
pub(crate) title: String, pub(crate) title: String,
/// Edits to fix the diagnostic. If this is empty, a fix /// Edits to fix the diagnostic. If this is empty, a fix
/// does not exist. /// does not exist.
@ -165,15 +166,16 @@ pub(crate) fn check(
messages messages
.into_iter() .into_iter()
.zip(noqa_edits) .zip(noqa_edits)
.filter_map(|(message, noqa_edit)| match message { .filter_map(|(message, noqa_edit)| match message.to_rule() {
Message::Diagnostic(diagnostic_message) => Some(to_lsp_diagnostic( Some(rule) => Some(to_lsp_diagnostic(
diagnostic_message, rule,
&message,
noqa_edit, noqa_edit,
&source_kind, &source_kind,
locator.to_index(), locator.to_index(),
encoding, encoding,
)), )),
Message::SyntaxError(_) => { None => {
if show_syntax_errors { if show_syntax_errors {
Some(syntax_error_to_lsp_diagnostic( Some(syntax_error_to_lsp_diagnostic(
&message, &message,
@ -239,28 +241,24 @@ pub(crate) fn fixes_for_diagnostics(
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in. /// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
/// If the source kind is a text document, the cell index will always be `0`. /// If the source kind is a text document, the cell index will always be `0`.
fn to_lsp_diagnostic( fn to_lsp_diagnostic(
diagnostic: DiagnosticMessage, rule: Rule,
diagnostic: &Message,
noqa_edit: Option<Edit>, noqa_edit: Option<Edit>,
source_kind: &SourceKind, source_kind: &SourceKind,
index: &LineIndex, index: &LineIndex,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> (usize, lsp_types::Diagnostic) { ) -> (usize, lsp_types::Diagnostic) {
let rule = diagnostic.rule(); let diagnostic_range = diagnostic.range();
let DiagnosticMessage { let name = diagnostic.name();
range: diagnostic_range, let body = diagnostic.body().to_string();
fix, let fix = diagnostic.fix();
name, let suggestion = diagnostic.suggestion();
body,
suggestion,
..
} = diagnostic;
let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix)); let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix));
let data = (fix.is_some() || noqa_edit.is_some()) let data = (fix.is_some() || noqa_edit.is_some())
.then(|| { .then(|| {
let edits = fix let edits = fix
.as_ref()
.into_iter() .into_iter()
.flat_map(Fix::edits) .flat_map(Fix::edits)
.map(|edit| lsp_types::TextEdit { .map(|edit| lsp_types::TextEdit {
@ -273,7 +271,7 @@ fn to_lsp_diagnostic(
new_text: noqa_edit.into_content().unwrap_or_default().into_string(), new_text: noqa_edit.into_content().unwrap_or_default().into_string(),
}); });
serde_json::to_value(AssociatedDiagnosticData { serde_json::to_value(AssociatedDiagnosticData {
title: suggestion.unwrap_or_else(|| name.to_string()), title: suggestion.unwrap_or(name).to_string(),
noqa_edit, noqa_edit,
edits, edits,
code: rule.noqa_code().to_string(), code: rule.noqa_code().to_string(),

View file

@ -1,7 +1,6 @@
use std::path::Path; use std::path::Path;
use js_sys::Error; use js_sys::Error;
use ruff_linter::message::{DiagnosticMessage, Message};
use ruff_linter::settings::types::PythonVersion; use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
@ -12,7 +11,6 @@ use ruff_linter::Locator;
use ruff_linter::directives; use ruff_linter::directives;
use ruff_linter::line_width::{IndentWidth, LineLength}; use ruff_linter::line_width::{IndentWidth, LineLength};
use ruff_linter::linter::check_path; use ruff_linter::linter::check_path;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::{DEFAULT_SELECTORS, DUMMY_VARIABLE_RGX, flags}; use ruff_linter::settings::{DEFAULT_SELECTORS, DUMMY_VARIABLE_RGX, flags};
use ruff_linter::source_kind::SourceKind; use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::{Mod, PySourceType}; use ruff_python_ast::{Mod, PySourceType};
@ -209,23 +207,17 @@ impl Workspace {
let messages: Vec<ExpandedMessage> = messages let messages: Vec<ExpandedMessage> = messages
.into_iter() .into_iter()
.map(|message| match message { .map(|msg| {
Message::Diagnostic(m) => { let message = msg.body().to_string();
let rule = m.rule(); let range = msg.range();
let DiagnosticMessage { match msg.to_noqa_code() {
body, Some(code) => ExpandedMessage {
suggestion, code: Some(code.to_string()),
range, message,
fix,
..
} = m;
ExpandedMessage {
code: Some(rule.noqa_code().to_string()),
message: body,
start_location: source_code.line_column(range.start()).into(), start_location: source_code.line_column(range.start()).into(),
end_location: source_code.line_column(range.end()).into(), end_location: source_code.line_column(range.end()).into(),
fix: fix.map(|fix| ExpandedFix { fix: msg.fix().map(|fix| ExpandedFix {
message: suggestion, message: msg.suggestion().map(ToString::to_string),
edits: fix edits: fix
.edits() .edits()
.iter() .iter()
@ -236,15 +228,15 @@ impl Workspace {
}) })
.collect(), .collect(),
}), }),
} },
} None => ExpandedMessage {
Message::SyntaxError(_) => ExpandedMessage {
code: None, code: None,
message: message.body().to_string(), message,
start_location: source_code.line_column(message.range().start()).into(), start_location: source_code.line_column(range.start()).into(),
end_location: source_code.line_column(message.range().end()).into(), end_location: source_code.line_column(range.end()).into(),
fix: None, fix: None,
}, },
}
}) })
.collect(); .collect();