Unify Message variants (#18051)

## Summary

This PR unifies the ruff `Message` enum variants for syntax errors and
rule violations into a single `Message` struct consisting of a shared
`db::Diagnostic` and some additional, optional fields used for some rule
violations.

This version of `Message` is nearly a drop-in replacement for
`ruff_diagnostics::Diagnostic`, which is the next step I have in mind
for the refactor.

I think this is also a useful checkpoint because we could possibly add
some of these optional fields to the new `Diagnostic` type. I think
we've previously discussed wanting support for `Fix`es, but the other
fields seem less relevant, so we may just need to preserve the `Message`
wrapper for a bit longer.

## Test plan

Existing tests

---------

Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
Brent Westbrook 2025-05-19 13:34:04 -04:00 committed by GitHub
parent 236633cd42
commit d6009eb942
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 384 additions and 463 deletions

View file

@ -13,19 +13,19 @@ use itertools::Itertools;
use log::{debug, error};
use rayon::iter::ParallelIterator;
use rayon::iter::{IntoParallelIterator, ParallelBridge};
use ruff_linter::{codes::Rule, registry::AsRule};
use ruff_linter::codes::Rule;
use rustc_hash::FxHashMap;
use tempfile::NamedTempFile;
use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_diagnostics::Fix;
use ruff_linter::message::{DiagnosticMessage, Message};
use ruff_linter::message::Message;
use ruff_linter::package::PackageRoot;
use ruff_linter::{VERSION, warn_user};
use ruff_macros::CacheKey;
use ruff_notebook::NotebookIndex;
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_text_size::{Ranged, TextRange, TextSize};
use ruff_workspace::Settings;
use ruff_workspace::resolver::Resolver;
@ -348,16 +348,16 @@ impl FileCache {
lint.messages
.iter()
.map(|msg| {
Message::Diagnostic(DiagnosticMessage {
name: msg.rule.into(),
body: msg.body.clone(),
suggestion: msg.suggestion.clone(),
range: msg.range,
fix: msg.fix.clone(),
file: file.clone(),
noqa_offset: msg.noqa_offset,
parent: msg.parent,
})
Message::diagnostic(
msg.rule.into(),
msg.body.clone(),
msg.suggestion.clone(),
msg.range,
msg.fix.clone(),
msg.parent,
file.clone(),
msg.noqa_offset,
)
})
.collect()
};
@ -439,22 +439,22 @@ impl LintCacheData {
let messages = messages
.iter()
.filter_map(|message| message.as_diagnostic_message())
.map(|msg| {
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.map(|(rule, msg)| {
// Make sure that all message use the same source file.
assert_eq!(
msg.file,
msg.source_file(),
messages.first().unwrap().source_file(),
"message uses a different source file"
);
CacheMessage {
rule: msg.rule(),
body: msg.body.clone(),
suggestion: msg.suggestion.clone(),
range: msg.range,
rule,
body: msg.body().to_string(),
suggestion: msg.suggestion().map(ToString::to_string),
range: msg.range(),
parent: msg.parent,
fix: msg.fix.clone(),
noqa_offset: msg.noqa_offset,
fix: msg.fix().cloned(),
noqa_offset: msg.noqa_offset(),
}
})
.collect();
@ -485,7 +485,7 @@ pub(super) struct CacheMessage {
#[bincode(with_serde)]
fix: Option<Fix>,
#[bincode(with_serde)]
noqa_offset: TextSize,
noqa_offset: Option<TextSize>,
}
pub(crate) trait PackageCaches {

View file

@ -20,7 +20,7 @@ use ruff_linter::settings::types::UnsafeFixes;
use ruff_linter::settings::{LinterSettings, flags};
use ruff_linter::{IOError, fs, warn_user_once};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_text_size::TextRange;
use ruff_workspace::resolver::{
PyprojectConfig, ResolvedFile, match_exclusion, python_files_in_path,
};
@ -133,7 +133,7 @@ pub(crate) fn check(
vec![Message::from_diagnostic(
Diagnostic::new(IOError { message }, TextRange::default()),
dummy,
TextSize::default(),
None,
)],
FxHashMap::default(),
)

View file

@ -25,7 +25,7 @@ use ruff_linter::{IOError, fs};
use ruff_notebook::{Notebook, NotebookError, NotebookIndex};
use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
use ruff_source_file::SourceFileBuilder;
use ruff_text_size::{TextRange, TextSize};
use ruff_text_size::TextRange;
use ruff_workspace::Settings;
use crate::cache::{Cache, FileCacheKey, LintCacheData};
@ -71,7 +71,7 @@ impl Diagnostics {
TextRange::default(),
),
source_file,
TextSize::default(),
None,
)],
FxHashMap::default(),
)

View file

@ -1,5 +1,4 @@
use std::cmp::Reverse;
use std::fmt::Display;
use std::hash::Hash;
use std::io::Write;
@ -7,17 +6,17 @@ use anyhow::Result;
use bitflags::bitflags;
use colored::Colorize;
use itertools::{Itertools, iterate};
use ruff_linter::codes::NoqaCode;
use serde::Serialize;
use ruff_linter::fs::relativize_path;
use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, MessageKind, PylintEmitter,
RdjsonEmitter, SarifEmitter, TextEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, Message, PylintEmitter, RdjsonEmitter,
SarifEmitter, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::registry::Rule;
use ruff_linter::settings::flags::{self};
use ruff_linter::settings::types::{OutputFormat, UnsafeFixes};
@ -37,59 +36,12 @@ bitflags! {
#[derive(Serialize)]
struct ExpandedStatistics {
code: Option<SerializeRuleAsCode>,
name: SerializeMessageKindAsTitle,
code: Option<NoqaCode>,
name: &'static str,
count: usize,
fixable: bool,
}
#[derive(Copy, Clone)]
struct SerializeRuleAsCode(Rule);
impl Serialize for SerializeRuleAsCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.0.noqa_code().to_string())
}
}
impl Display for SerializeRuleAsCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0.noqa_code())
}
}
impl From<Rule> for SerializeRuleAsCode {
fn from(rule: Rule) -> Self {
Self(rule)
}
}
struct SerializeMessageKindAsTitle(MessageKind);
impl Serialize for SerializeMessageKindAsTitle {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.0.as_str())
}
}
impl Display for SerializeMessageKindAsTitle {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.0.as_str())
}
}
impl From<MessageKind> for SerializeMessageKindAsTitle {
fn from(kind: MessageKind) -> Self {
Self(kind)
}
}
pub(crate) struct Printer {
format: OutputFormat,
log_level: LogLevel,
@ -350,21 +302,25 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics
.messages
.iter()
.sorted_by_key(|message| (message.rule(), message.fixable()))
.fold(vec![], |mut acc: Vec<(&Message, usize)>, message| {
if let Some((prev_message, count)) = acc.last_mut() {
if prev_message.rule() == message.rule() {
*count += 1;
return acc;
.map(|message| (message.to_noqa_code(), message))
.sorted_by_key(|(code, message)| (*code, message.fixable()))
.fold(
vec![],
|mut acc: Vec<((Option<NoqaCode>, &Message), usize)>, (code, message)| {
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
if *prev_code == code {
*count += 1;
return acc;
}
}
}
acc.push((message, 1));
acc
})
acc.push(((code, message), 1));
acc
},
)
.iter()
.map(|&(message, count)| ExpandedStatistics {
code: message.rule().map(std::convert::Into::into),
name: message.kind().into(),
.map(|&((code, message), count)| ExpandedStatistics {
code,
name: message.name(),
count,
fixable: if let Some(fix) = message.fix() {
fix.applies(self.unsafe_fixes.required_applicability())

View file

@ -563,6 +563,11 @@ impl Annotation {
&self.span
}
/// Sets the span on this annotation.
pub fn set_span(&mut self, span: Span) {
self.span = span;
}
/// Returns the tags associated with this annotation.
pub fn get_tags(&self) -> &[DiagnosticTag] {
&self.tags
@ -686,7 +691,7 @@ impl DiagnosticId {
///
/// Note that this doesn't include the lint's category. It
/// only includes the lint's name.
pub fn as_str(&self) -> &str {
pub fn as_str(&self) -> &'static str {
match self {
DiagnosticId::Panic => "panic",
DiagnosticId::Io => "io",

View file

@ -47,10 +47,14 @@ pub(crate) fn check_noqa(
// Remove any ignored diagnostics.
'outer: for (index, diagnostic) in diagnostics.iter().enumerate() {
if matches!(diagnostic.rule(), Rule::BlanketNOQA) {
let rule = diagnostic.rule();
if matches!(rule, Rule::BlanketNOQA) {
continue;
}
let code = rule.noqa_code();
match &exemption {
FileExemption::All(_) => {
// If the file is exempted, ignore all diagnostics.
@ -59,7 +63,7 @@ pub(crate) fn check_noqa(
}
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, ignore it.
if codes.contains(&&diagnostic.rule().noqa_code()) {
if codes.contains(&&code) {
ignored_diagnostics.push(index);
continue;
}
@ -78,13 +82,13 @@ pub(crate) fn check_noqa(
{
let suppressed = match &directive_line.directive {
Directive::All(_) => {
directive_line.matches.push(diagnostic.rule().noqa_code());
directive_line.matches.push(code);
ignored_diagnostics.push(index);
true
}
Directive::Codes(directive) => {
if directive.includes(diagnostic.rule()) {
directive_line.matches.push(diagnostic.rule().noqa_code());
if directive.includes(code) {
directive_line.matches.push(code);
ignored_diagnostics.push(index);
true
} else {

View file

@ -10,7 +10,7 @@ use crate::registry::{AsRule, Linter};
use crate::rule_selector::is_single_rule_selector;
use crate::rules;
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str, &'static str);
impl NoqaCode {
@ -46,6 +46,15 @@ impl PartialEq<&str> for NoqaCode {
}
}
impl serde::Serialize for NoqaCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[derive(Debug, Copy, Clone)]
pub enum RuleGroup {
/// The rule is stable.

View file

@ -606,7 +606,7 @@ mod tests {
use crate::fix::edits::{
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
};
use crate::message::DiagnosticMessage;
use crate::message::Message;
/// Parse the given source using [`Mode::Module`] and return the first statement.
fn parse_first_stmt(source: &str) -> Result<Stmt> {
@ -745,16 +745,16 @@ x = 1 \
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
iter,
));
DiagnosticMessage {
name: diag.name,
body: diag.body,
suggestion: diag.suggestion,
range: diag.range,
fix: diag.fix,
parent: diag.parent,
file: SourceFileBuilder::new("<filename>", "<code>").finish(),
noqa_offset: TextSize::default(),
}
Message::diagnostic(
diag.name,
diag.body,
diag.suggestion,
diag.range,
diag.fix,
diag.parent,
SourceFileBuilder::new("<filename>", "<code>").finish(),
None,
)
};
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
Ok(())

View file

@ -8,8 +8,8 @@ use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use crate::Locator;
use crate::linter::FixTable;
use crate::message::{DiagnosticMessage, Message};
use crate::registry::{AsRule, Rule};
use crate::message::Message;
use crate::registry::Rule;
use crate::settings::types::UnsafeFixes;
pub(crate) mod codemods;
@ -35,11 +35,9 @@ pub(crate) fn fix_file(
let mut with_fixes = messages
.iter()
.filter_map(Message::as_diagnostic_message)
.filter(|message| {
message
.fix
.as_ref()
.fix()
.is_some_and(|fix| fix.applies(required_applicability))
})
.peekable();
@ -53,7 +51,7 @@ pub(crate) fn fix_file(
/// Apply a series of fixes.
fn apply_fixes<'a>(
diagnostics: impl Iterator<Item = &'a DiagnosticMessage>,
diagnostics: impl Iterator<Item = &'a Message>,
locator: &'a Locator<'a>,
) -> FixResult {
let mut output = String::with_capacity(locator.len());
@ -64,7 +62,8 @@ fn apply_fixes<'a>(
let mut source_map = SourceMap::default();
for (rule, fix) in diagnostics
.filter_map(|diagnostic| diagnostic.fix.as_ref().map(|fix| (diagnostic.rule(), fix)))
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.filter_map(|(rule, diagnostic)| diagnostic.fix().map(|fix| (rule, fix)))
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
{
let mut edits = fix
@ -164,29 +163,23 @@ mod tests {
use crate::Locator;
use crate::fix::{FixResult, apply_fixes};
use crate::message::DiagnosticMessage;
use crate::message::Message;
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
fn create_diagnostics(
filename: &str,
source: &str,
edit: impl IntoIterator<Item = Edit>,
) -> Vec<DiagnosticMessage> {
// The choice of rule here is arbitrary.
) -> Vec<Message> {
edit.into_iter()
.map(|edit| {
let range = edit.range();
let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, range);
DiagnosticMessage {
name: diagnostic.name,
body: diagnostic.body,
suggestion: diagnostic.suggestion,
range,
fix: Some(Fix::safe_edit(edit)),
parent: None,
file: SourceFileBuilder::new(filename, source).finish(),
noqa_offset: TextSize::default(),
}
// The choice of rule here is arbitrary.
let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, edit.range());
Message::from_diagnostic(
diagnostic.with_fix(Fix::safe_edit(edit)),
SourceFileBuilder::new(filename, source).finish(),
None,
)
})
.collect()
}

View file

@ -535,7 +535,7 @@ fn diagnostics_to_messages(
)
.chain(diagnostics.into_iter().map(|diagnostic| {
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
Message::from_diagnostic(diagnostic, file.deref().clone(), noqa_offset)
Message::from_diagnostic(diagnostic, file.deref().clone(), Some(noqa_offset))
}))
.collect()
}
@ -682,7 +682,7 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
#[expect(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
let codes = collect_rule_codes(messages.iter().filter_map(Message::rule));
let codes = collect_rule_codes(messages.iter().filter_map(Message::to_rule));
if cfg!(debug_assertions) {
eprintln!(
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",

View file

@ -33,8 +33,8 @@ impl Emitter for AzureEmitter {
line = location.line,
col = location.column,
code = message
.rule()
.map_or_else(String::new, |rule| format!("code={};", rule.noqa_code())),
.to_noqa_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = message.body(),
)?;
}

View file

@ -33,8 +33,8 @@ impl Emitter for GithubEmitter {
writer,
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
code = message
.rule()
.map_or_else(String::new, |rule| format!(" ({})", rule.noqa_code())),
.to_noqa_code()
.map_or_else(String::new, |code| format!(" ({code})")),
file = message.filename(),
row = source_location.line,
column = source_location.column,
@ -50,8 +50,8 @@ impl Emitter for GithubEmitter {
column = location.column,
)?;
if let Some(rule) = message.rule() {
write!(writer, " {}", rule.noqa_code())?;
if let Some(code) = message.to_noqa_code() {
write!(writer, " {code}")?;
}
writeln!(writer, " {}", message.body())?;

View file

@ -90,8 +90,8 @@ impl Serialize for SerializedMessages<'_> {
}
fingerprints.insert(message_fingerprint);
let (description, check_name) = if let Some(rule) = message.rule() {
(message.body().to_string(), rule.noqa_code().to_string())
let (description, check_name) = if let Some(code) = message.to_noqa_code() {
(message.body().to_string(), code.to_string())
} else {
let description = message.body();
let description_without_prefix = description

View file

@ -81,8 +81,8 @@ pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext)
}
json!({
"code": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.rule().and_then(|rule| rule.url()),
"code": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_rule().and_then(|rule| rule.url()),
"message": message.body(),
"fix": fix,
"cell": notebook_cell_index,

View file

@ -59,8 +59,8 @@ impl Emitter for JunitEmitter {
body = message.body()
));
let mut case = TestCase::new(
if let Some(rule) = message.rule() {
format!("org.ruff.{}", rule.noqa_code())
if let Some(code) = message.to_noqa_code() {
format!("org.ruff.{code}")
} else {
"org.ruff".to_string()
},

View file

@ -1,10 +1,9 @@
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::io::Write;
use std::ops::Deref;
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, Severity, Span};
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, LintName, Severity, Span};
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap;
@ -26,8 +25,9 @@ pub use sarif::SarifEmitter;
pub use text::TextEmitter;
use crate::Locator;
use crate::codes::NoqaCode;
use crate::logging::DisplayParseErrorType;
use crate::registry::{AsRule, Rule};
use crate::registry::Rule;
mod azure;
mod diff;
@ -43,39 +43,23 @@ mod sarif;
mod text;
/// Message represents either a diagnostic message corresponding to a rule violation or a syntax
/// error message raised by the parser.
/// error message.
///
/// All of the information for syntax errors is captured in the underlying [`db::Diagnostic`], while
/// rule violations can have the additional optional fields like fixes, suggestions, and (parent)
/// `noqa` offsets.
///
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
/// [`Diagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
/// a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived from the `Rule`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Message {
Diagnostic(DiagnosticMessage),
SyntaxError(db::Diagnostic),
}
pub struct Message {
pub diagnostic: db::Diagnostic,
/// A diagnostic message corresponding to a rule violation.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DiagnosticMessage {
pub name: &'static str,
pub body: String,
pub suggestion: Option<String>,
pub range: TextRange,
// these fields are specific to rule violations
pub fix: Option<Fix>,
pub parent: Option<TextSize>,
pub file: SourceFile,
pub noqa_offset: TextSize,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum MessageKind {
Diagnostic(Rule),
SyntaxError,
}
impl MessageKind {
pub fn as_str(&self) -> &str {
match self {
MessageKind::Diagnostic(rule) => rule.as_ref(),
MessageKind::SyntaxError => "syntax-error",
}
}
pub(crate) noqa_offset: Option<TextSize>,
}
impl Message {
@ -84,28 +68,72 @@ impl Message {
range: TextRange,
file: SourceFile,
) -> Message {
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
let span = Span::from(file).with_range(range);
diag.annotate(Annotation::primary(span).message(message));
Self::SyntaxError(diag)
diag.annotate(Annotation::primary(span));
Self {
diagnostic: diag,
fix: None,
parent: None,
noqa_offset: None,
}
}
#[expect(clippy::too_many_arguments)]
pub fn diagnostic(
name: &'static str,
body: String,
suggestion: Option<String>,
range: TextRange,
fix: Option<Fix>,
parent: Option<TextSize>,
file: SourceFile,
noqa_offset: Option<TextSize>,
) -> Message {
let mut diagnostic = db::Diagnostic::new(
DiagnosticId::Lint(LintName::of(name)),
Severity::Error,
body,
);
let span = Span::from(file).with_range(range);
let mut annotation = Annotation::primary(span);
if let Some(suggestion) = suggestion {
annotation = annotation.message(suggestion);
}
diagnostic.annotate(annotation);
Message {
diagnostic,
fix,
parent,
noqa_offset,
}
}
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
pub fn from_diagnostic(
diagnostic: Diagnostic,
file: SourceFile,
noqa_offset: TextSize,
noqa_offset: Option<TextSize>,
) -> Message {
Message::Diagnostic(DiagnosticMessage {
range: diagnostic.range(),
name: diagnostic.name,
body: diagnostic.body,
suggestion: diagnostic.suggestion,
fix: diagnostic.fix,
parent: diagnostic.parent,
let Diagnostic {
name,
body,
suggestion,
range,
fix,
parent,
} = diagnostic;
Self::diagnostic(
name,
body,
suggestion,
range,
fix,
parent,
file,
noqa_offset,
})
)
}
/// Create a [`Message`] from the given [`ParseError`].
@ -157,83 +185,38 @@ impl Message {
)
}
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
match self {
Message::Diagnostic(m) => Some(m),
Message::SyntaxError(_) => None,
}
}
pub fn into_diagnostic_message(self) -> Option<DiagnosticMessage> {
match self {
Message::Diagnostic(m) => Some(m),
Message::SyntaxError(_) => None,
}
}
/// Returns `true` if `self` is a diagnostic message.
pub const fn is_diagnostic_message(&self) -> bool {
matches!(self, Message::Diagnostic(_))
}
/// Returns `true` if `self` is a syntax error message.
pub fn is_syntax_error(&self) -> bool {
match self {
Message::Diagnostic(_) => false,
Message::SyntaxError(diag) => diag.id().is_invalid_syntax(),
}
}
/// Returns a message kind.
pub fn kind(&self) -> MessageKind {
match self {
Message::Diagnostic(m) => MessageKind::Diagnostic(m.rule()),
Message::SyntaxError(_) => MessageKind::SyntaxError,
}
self.diagnostic.id().is_invalid_syntax()
}
/// Returns the name used to represent the diagnostic.
pub fn name(&self) -> &str {
match self {
Message::Diagnostic(m) => m.name,
Message::SyntaxError(_) => "SyntaxError",
pub fn name(&self) -> &'static str {
if self.is_syntax_error() {
"syntax-error"
} else {
self.diagnostic.id().as_str()
}
}
/// Returns the message body to display to the user.
pub fn body(&self) -> &str {
match self {
Message::Diagnostic(m) => &m.body,
Message::SyntaxError(m) => m
.primary_annotation()
.expect("Expected a primary annotation for a ruff diagnostic")
.get_message()
.expect("Expected a message for a ruff diagnostic"),
}
self.diagnostic.primary_message()
}
/// Returns the fix suggestion for the violation.
pub fn suggestion(&self) -> Option<&str> {
match self {
Message::Diagnostic(m) => m.suggestion.as_deref(),
Message::SyntaxError(_) => None,
}
self.diagnostic.primary_annotation()?.get_message()
}
/// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message.
pub fn noqa_offset(&self) -> Option<TextSize> {
match self {
Message::Diagnostic(m) => Some(m.noqa_offset),
Message::SyntaxError(_) => None,
}
self.noqa_offset
}
/// Returns the [`Fix`] for the message, if there is any.
pub fn fix(&self) -> Option<&Fix> {
match self {
Message::Diagnostic(m) => m.fix.as_ref(),
Message::SyntaxError(_) => None,
}
self.fix.as_ref()
}
/// Returns `true` if the message contains a [`Fix`].
@ -242,56 +225,64 @@ impl Message {
}
/// Returns the [`Rule`] corresponding to the diagnostic message.
pub fn rule(&self) -> Option<Rule> {
match self {
Message::Diagnostic(m) => Some(m.rule()),
Message::SyntaxError(_) => None,
pub fn to_rule(&self) -> Option<Rule> {
if self.is_syntax_error() {
None
} else {
Some(self.name().parse().expect("Expected a valid rule name"))
}
}
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
pub fn to_noqa_code(&self) -> Option<NoqaCode> {
self.to_rule().map(|rule| rule.noqa_code())
}
/// Returns the URL for the rule documentation, if it exists.
pub fn to_url(&self) -> Option<String> {
// TODO(brent) Rule::url calls Rule::explanation, which calls ViolationMetadata::explain,
// which when derived (seems always to be the case?) is always `Some`, so I think it's
// pretty safe to inline the Rule::url implementation here, using `self.name()`:
//
// format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.name())
//
// at least in the case of diagnostics, I guess syntax errors will return None
self.to_rule().and_then(|rule| rule.url())
}
/// Returns the filename for the message.
pub fn filename(&self) -> Cow<'_, str> {
match self {
Message::Diagnostic(m) => Cow::Borrowed(m.file.name()),
Message::SyntaxError(diag) => Cow::Owned(
diag.expect_primary_span()
.expect_ruff_file()
.name()
.to_string(),
),
}
pub fn filename(&self) -> String {
self.diagnostic
.expect_primary_span()
.expect_ruff_file()
.name()
.to_string()
}
/// Computes the start source location for the message.
pub fn compute_start_location(&self) -> LineColumn {
match self {
Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.start()),
Message::SyntaxError(diag) => diag
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.start()),
}
self.diagnostic
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.start())
}
/// Computes the end source location for the message.
pub fn compute_end_location(&self) -> LineColumn {
match self {
Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.end()),
Message::SyntaxError(diag) => diag
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.end()),
}
self.diagnostic
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.end())
}
/// Returns the [`SourceFile`] which the message belongs to.
pub fn source_file(&self) -> SourceFile {
match self {
Message::Diagnostic(m) => m.file.clone(),
Message::SyntaxError(m) => m.expect_primary_span().expect_ruff_file().clone(),
}
self.diagnostic
.expect_primary_span()
.expect_ruff_file()
.clone()
}
}
@ -309,13 +300,10 @@ impl PartialOrd for Message {
impl Ranged for Message {
fn range(&self) -> TextRange {
match self {
Message::Diagnostic(m) => m.range,
Message::SyntaxError(m) => m
.expect_primary_span()
.range()
.expect("Expected range for ruff span"),
}
self.diagnostic
.expect_primary_span()
.range()
.expect("Expected range for ruff span")
}
}
@ -390,7 +378,7 @@ mod tests {
use ruff_text_size::{TextRange, TextSize};
use crate::Locator;
use crate::message::{DiagnosticMessage, Emitter, EmitterContext, Message};
use crate::message::{Emitter, EmitterContext, Message};
pub(super) fn create_syntax_error_messages() -> Vec<Message> {
let source = r"from os import
@ -428,54 +416,50 @@ def fibonacci(n):
let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
let unused_import_start = TextSize::from(7);
let unused_import = DiagnosticMessage {
name: "unused-import",
body: "`os` imported but unused".to_string(),
suggestion: Some("Remove unused import: `os`".to_string()),
range: TextRange::new(unused_import_start, TextSize::from(9)),
fix: Some(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
let unused_import = Message::diagnostic(
"unused-import",
"`os` imported but unused".to_string(),
Some("Remove unused import: `os`".to_string()),
TextRange::new(unused_import_start, TextSize::from(9)),
Some(Fix::unsafe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(0),
TextSize::from(10),
)))),
parent: None,
noqa_offset: unused_import_start,
file: fib_source.clone(),
};
None,
fib_source.clone(),
Some(unused_import_start),
);
let unused_variable_start = TextSize::from(94);
let unused_variable = DiagnosticMessage {
name: "unused-variable",
body: "Local variable `x` is assigned to but never used".to_string(),
suggestion: Some("Remove assignment to unused variable `x`".to_string()),
range: TextRange::new(unused_variable_start, TextSize::from(95)),
fix: Some(Fix::unsafe_edit(Edit::deletion(
let unused_variable = Message::diagnostic(
"unused-variable",
"Local variable `x` is assigned to but never used".to_string(),
Some("Remove assignment to unused variable `x`".to_string()),
TextRange::new(unused_variable_start, TextSize::from(95)),
Some(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94),
TextSize::from(99),
))),
parent: None,
noqa_offset: unused_variable_start,
file: fib_source,
};
None,
fib_source,
Some(unused_variable_start),
);
let file_2 = r"if a == 1: pass";
let undefined_name_start = TextSize::from(3);
let undefined_name = DiagnosticMessage {
name: "undefined-name",
body: "Undefined name `a`".to_string(),
suggestion: None,
range: TextRange::new(undefined_name_start, TextSize::from(4)),
fix: None,
parent: None,
noqa_offset: undefined_name_start,
file: SourceFileBuilder::new("undef.py", file_2).finish(),
};
let undefined_name = Message::diagnostic(
"undefined-name",
"Undefined name `a`".to_string(),
None,
TextRange::new(undefined_name_start, TextSize::from(4)),
None,
None,
SourceFileBuilder::new("undef.py", file_2).finish(),
Some(undefined_name_start),
);
vec![
Message::Diagnostic(unused_import),
Message::Diagnostic(unused_variable),
Message::Diagnostic(undefined_name),
]
vec![unused_import, unused_variable, undefined_name]
}
pub(super) fn create_notebook_messages() -> (Vec<Message>, FxHashMap<String, NotebookIndex>) {
@ -494,49 +478,49 @@ def foo():
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
let unused_import_os_start = TextSize::from(16);
let unused_import_os = DiagnosticMessage {
name: "unused-import",
body: "`os` imported but unused".to_string(),
suggestion: Some("Remove unused import: `os`".to_string()),
range: TextRange::new(unused_import_os_start, TextSize::from(18)),
fix: Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
let unused_import_os = Message::diagnostic(
"unused-import",
"`os` imported but unused".to_string(),
Some("Remove unused import: `os`".to_string()),
TextRange::new(unused_import_os_start, TextSize::from(18)),
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(9),
TextSize::from(19),
)))),
parent: None,
file: notebook_source.clone(),
noqa_offset: unused_import_os_start,
};
None,
notebook_source.clone(),
Some(unused_import_os_start),
);
let unused_import_math_start = TextSize::from(35);
let unused_import_math = DiagnosticMessage {
name: "unused-import",
body: "`math` imported but unused".to_string(),
suggestion: Some("Remove unused import: `math`".to_string()),
range: TextRange::new(unused_import_math_start, TextSize::from(39)),
fix: Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
let unused_import_math = Message::diagnostic(
"unused-import",
"`math` imported but unused".to_string(),
Some("Remove unused import: `math`".to_string()),
TextRange::new(unused_import_math_start, TextSize::from(39)),
Some(Fix::safe_edit(Edit::range_deletion(TextRange::new(
TextSize::from(28),
TextSize::from(40),
)))),
parent: None,
file: notebook_source.clone(),
noqa_offset: unused_import_math_start,
};
None,
notebook_source.clone(),
Some(unused_import_math_start),
);
let unused_variable_start = TextSize::from(98);
let unused_variable = DiagnosticMessage {
name: "unused-variable",
body: "Local variable `x` is assigned to but never used".to_string(),
suggestion: Some("Remove assignment to unused variable `x`".to_string()),
range: TextRange::new(unused_variable_start, TextSize::from(99)),
fix: Some(Fix::unsafe_edit(Edit::deletion(
let unused_variable = Message::diagnostic(
"unused-variable",
"Local variable `x` is assigned to but never used".to_string(),
Some("Remove assignment to unused variable `x`".to_string()),
TextRange::new(unused_variable_start, TextSize::from(99)),
Some(Fix::unsafe_edit(Edit::deletion(
TextSize::from(94),
TextSize::from(104),
))),
parent: None,
file: notebook_source,
noqa_offset: unused_variable_start,
};
None,
notebook_source,
Some(unused_variable_start),
);
let mut notebook_indexes = FxHashMap::default();
notebook_indexes.insert(
@ -570,11 +554,7 @@ def foo():
);
(
vec![
Message::Diagnostic(unused_import_os),
Message::Diagnostic(unused_import_math),
Message::Diagnostic(unused_variable),
],
vec![unused_import_os, unused_import_math, unused_variable],
notebook_indexes,
)
}

View file

@ -26,12 +26,8 @@ impl Emitter for PylintEmitter {
message.compute_start_location().line
};
let body = if let Some(rule) = message.rule() {
format!(
"[{code}] {body}",
code = rule.noqa_code(),
body = message.body()
)
let body = if let Some(code) = message.to_noqa_code() {
format!("[{code}] {body}", body = message.body())
} else {
message.body().to_string()
};

View file

@ -71,8 +71,8 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.rule().and_then(|rule| rule.url()),
"value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_url(),
},
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
})
@ -84,8 +84,8 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.rule().map(|rule| rule.noqa_code().to_string()),
"url": message.rule().and_then(|rule| rule.url()),
"value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_url(),
},
})
}

View file

@ -123,7 +123,7 @@ impl SarifResult {
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
rule: message.rule(),
rule: message.to_rule(),
level: "error".to_string(),
message: message.body().to_string(),
uri: url::Url::from_file_path(&path)
@ -143,7 +143,7 @@ impl SarifResult {
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
rule: message.rule(),
rule: message.to_rule(),
level: "error".to_string(),
message: message.body().to_string(),
uri: path.display().to_string(),

View file

@ -151,8 +151,8 @@ impl Display for RuleCodeAndBody<'_> {
if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(rule) = self.message.rule() {
write!(f, "{} ", rule.noqa_code().to_string().red().bold())?;
if let Some(code) = self.message.to_noqa_code() {
write!(f, "{} ", code.to_string().red().bold())?;
}
return write!(
f,
@ -164,11 +164,11 @@ impl Display for RuleCodeAndBody<'_> {
}
}
if let Some(rule) = self.message.rule() {
if let Some(code) = self.message.to_noqa_code() {
write!(
f,
"{code} {body}",
code = rule.noqa_code().to_string().red().bold(),
code = code.to_string().red().bold(),
body = self.message.body(),
)
} else {
@ -254,8 +254,8 @@ impl Display for MessageCodeFrame<'_> {
let label = self
.message
.rule()
.map_or_else(String::new, |rule| rule.noqa_code().to_string());
.to_noqa_code()
.map_or_else(String::new, |code| code.to_string());
let line_start = self.notebook_index.map_or_else(
|| start_index.get(),

View file

@ -18,7 +18,7 @@ use crate::Locator;
use crate::codes::NoqaCode;
use crate::fs::relativize_path;
use crate::message::Message;
use crate::registry::{AsRule, Rule, RuleSet};
use crate::registry::{Rule, RuleSet};
use crate::rule_redirects::get_redirect_target;
/// Generates an array of edits that matches the length of `messages`.
@ -105,8 +105,7 @@ impl Codes<'_> {
/// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof).
pub(crate) fn includes(&self, needle: Rule) -> bool {
let needle = needle.noqa_code();
pub(crate) fn includes(&self, needle: NoqaCode) -> bool {
self.iter()
.any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
}
@ -140,7 +139,7 @@ pub(crate) fn rule_is_ignored(
Ok(Some(NoqaLexerOutput {
directive: Directive::Codes(codes),
..
})) => codes.includes(code),
})) => codes.includes(code.noqa_code()),
_ => false,
}
}
@ -846,11 +845,13 @@ fn find_noqa_comments<'a>(
// Mark any non-ignored diagnostics.
for message in messages {
let Message::Diagnostic(diagnostic) = message else {
let Some(rule) = message.to_rule() else {
comments_by_line.push(None);
continue;
};
let code = rule.noqa_code();
match &exemption {
FileExemption::All(_) => {
// If the file is exempted, don't add any noqa directives.
@ -859,7 +860,7 @@ fn find_noqa_comments<'a>(
}
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
if codes.contains(&&diagnostic.rule().noqa_code()) {
if codes.contains(&&code) {
comments_by_line.push(None);
continue;
}
@ -867,7 +868,7 @@ fn find_noqa_comments<'a>(
}
// Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = diagnostic.parent {
if let Some(parent) = message.parent {
if let Some(directive_line) =
directives.find_line_with_directive(noqa_line_for.resolve(parent))
{
@ -877,7 +878,7 @@ fn find_noqa_comments<'a>(
continue;
}
Directive::Codes(codes) => {
if codes.includes(diagnostic.rule()) {
if codes.includes(code) {
comments_by_line.push(None);
continue;
}
@ -886,9 +887,7 @@ fn find_noqa_comments<'a>(
}
}
let noqa_offset = noqa_line_for.resolve(diagnostic.range.start());
let rule = diagnostic.rule();
let noqa_offset = noqa_line_for.resolve(message.range().start());
// Or ignored by the directive itself?
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
@ -898,7 +897,7 @@ fn find_noqa_comments<'a>(
continue;
}
directive @ Directive::Codes(codes) => {
if !codes.includes(rule) {
if !codes.includes(code) {
comments_by_line.push(Some(NoqaComment {
line: directive_line.start(),
rule,
@ -1260,7 +1259,7 @@ mod tests {
) -> Message {
let noqa_offset = diagnostic.start();
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
Message::from_diagnostic(diagnostic, file, noqa_offset)
Message::from_diagnostic(diagnostic, file, Some(noqa_offset))
}
#[test]

View file

@ -30,11 +30,7 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
);
if settings.rules.enabled(Rule::IOError) {
let diagnostic = Diagnostic::new(IOError { message }, TextRange::default());
messages.push(Message::from_diagnostic(
diagnostic,
source_file,
TextSize::default(),
));
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
} else {
warn!(
"{}{}{} {message}",
@ -56,11 +52,7 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
let toml_err = err.message().to_string();
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
messages.push(Message::from_diagnostic(
diagnostic,
source_file,
TextSize::default(),
));
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
}
messages

View file

@ -24,7 +24,7 @@ mod tests {
use crate::Locator;
use crate::linter::check_path;
use crate::message::Message;
use crate::registry::{AsRule, Linter, Rule};
use crate::registry::{Linter, Rule};
use crate::rules::isort;
use crate::rules::pyflakes;
use crate::settings::types::PreviewMode;
@ -776,8 +776,7 @@ mod tests {
messages.sort_by_key(Ranged::start);
let actual = messages
.iter()
.filter_map(Message::as_diagnostic_message)
.map(AsRule::rule)
.filter_map(Message::to_rule)
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}

View file

@ -7,6 +7,7 @@ use std::path::Path;
#[cfg(not(fuzzing))]
use anyhow::Result;
use itertools::Itertools;
use ruff_text_size::Ranged;
use rustc_hash::FxHashMap;
use ruff_diagnostics::{Applicability, FixAvailability};
@ -25,7 +26,6 @@ use crate::linter::check_path;
use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
use crate::package::PackageRoot;
use crate::packaging::detect_package_root;
use crate::registry::AsRule;
use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind;
@ -233,10 +233,9 @@ Source with applied fixes:
let messages = messages
.into_iter()
.filter_map(Message::into_diagnostic_message)
.map(|mut diagnostic| {
let rule = diagnostic.rule();
let fixable = diagnostic.fix.as_ref().is_some_and(|fix| {
.filter_map(|msg| Some((msg.to_rule()?, msg)))
.map(|(rule, mut diagnostic)| {
let fixable = diagnostic.fix().is_some_and(|fix| {
matches!(
fix.applicability(),
Applicability::Safe | Applicability::Unsafe
@ -269,16 +268,22 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
}
assert!(
!(fixable && diagnostic.suggestion.is_none()),
!(fixable && diagnostic.suggestion().is_none()),
"Diagnostic emitted by {rule:?} is fixable but \
`Violation::fix_title` returns `None`"
);
// Not strictly necessary but adds some coverage for this code path
diagnostic.noqa_offset = directives.noqa_line_for.resolve(diagnostic.range.start());
diagnostic.file = source_code.clone();
// Not strictly necessary but adds some coverage for this code path by overriding the
// noqa offset and the source file
let range = diagnostic.range();
diagnostic.noqa_offset = Some(directives.noqa_line_for.resolve(range.start()));
if let Some(annotation) = diagnostic.diagnostic.primary_annotation_mut() {
annotation.set_span(
ruff_db::diagnostic::Span::from(source_code.clone()).with_range(range),
);
}
Message::Diagnostic(diagnostic)
diagnostic
})
.chain(parsed.errors().iter().map(|parse_error| {
Message::from_parse_error(parse_error, &locator, source_code.clone())
@ -311,7 +316,7 @@ fn print_syntax_errors(
/// Print the [`Message::Diagnostic`]s in `messages`.
fn print_diagnostics(mut messages: Vec<Message>, path: &Path, source: &SourceKind) -> String {
messages.retain(Message::is_diagnostic_message);
messages.retain(|msg| !msg.is_syntax_error());
if let Some(notebook) = source.as_ipy_notebook() {
print_jupyter_messages(&messages, path, notebook)

View file

@ -471,13 +471,6 @@ fn register_rules<'a>(input: impl Iterator<Item = &'a Rule>) -> TokenStream {
}
}
impl AsRule for crate::message::DiagnosticMessage {
fn rule(&self) -> Rule {
self.name
.parse()
.unwrap_or_else(|_| unreachable!("invalid rule name: {}", self.name))
}
}
}
}

View file

@ -12,13 +12,13 @@ use crate::{
use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_linter::{
Locator,
codes::Rule,
directives::{Flags, extract_directives},
generate_noqa_edits,
linter::check_path,
message::{DiagnosticMessage, Message},
message::Message,
package::PackageRoot,
packaging::detect_package_root,
registry::AsRule,
settings::flags,
source_kind::SourceKind,
};
@ -32,6 +32,7 @@ use ruff_text_size::{Ranged, TextRange};
/// This is serialized on the diagnostic `data` field.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub(crate) struct AssociatedDiagnosticData {
/// The message describing what the fix does, if it exists, or the diagnostic name otherwise.
pub(crate) title: String,
/// Edits to fix the diagnostic. If this is empty, a fix
/// does not exist.
@ -165,15 +166,16 @@ pub(crate) fn check(
messages
.into_iter()
.zip(noqa_edits)
.filter_map(|(message, noqa_edit)| match message {
Message::Diagnostic(diagnostic_message) => Some(to_lsp_diagnostic(
diagnostic_message,
.filter_map(|(message, noqa_edit)| match message.to_rule() {
Some(rule) => Some(to_lsp_diagnostic(
rule,
&message,
noqa_edit,
&source_kind,
locator.to_index(),
encoding,
)),
Message::SyntaxError(_) => {
None => {
if show_syntax_errors {
Some(syntax_error_to_lsp_diagnostic(
&message,
@ -239,28 +241,24 @@ pub(crate) fn fixes_for_diagnostics(
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
/// If the source kind is a text document, the cell index will always be `0`.
fn to_lsp_diagnostic(
diagnostic: DiagnosticMessage,
rule: Rule,
diagnostic: &Message,
noqa_edit: Option<Edit>,
source_kind: &SourceKind,
index: &LineIndex,
encoding: PositionEncoding,
) -> (usize, lsp_types::Diagnostic) {
let rule = diagnostic.rule();
let DiagnosticMessage {
range: diagnostic_range,
fix,
name,
body,
suggestion,
..
} = diagnostic;
let diagnostic_range = diagnostic.range();
let name = diagnostic.name();
let body = diagnostic.body().to_string();
let fix = diagnostic.fix();
let suggestion = diagnostic.suggestion();
let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix));
let data = (fix.is_some() || noqa_edit.is_some())
.then(|| {
let edits = fix
.as_ref()
.into_iter()
.flat_map(Fix::edits)
.map(|edit| lsp_types::TextEdit {
@ -273,7 +271,7 @@ fn to_lsp_diagnostic(
new_text: noqa_edit.into_content().unwrap_or_default().into_string(),
});
serde_json::to_value(AssociatedDiagnosticData {
title: suggestion.unwrap_or_else(|| name.to_string()),
title: suggestion.unwrap_or(name).to_string(),
noqa_edit,
edits,
code: rule.noqa_code().to_string(),

View file

@ -1,7 +1,6 @@
use std::path::Path;
use js_sys::Error;
use ruff_linter::message::{DiagnosticMessage, Message};
use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*;
@ -12,7 +11,6 @@ use ruff_linter::Locator;
use ruff_linter::directives;
use ruff_linter::line_width::{IndentWidth, LineLength};
use ruff_linter::linter::check_path;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::{DEFAULT_SELECTORS, DUMMY_VARIABLE_RGX, flags};
use ruff_linter::source_kind::SourceKind;
use ruff_python_ast::{Mod, PySourceType};
@ -209,23 +207,17 @@ impl Workspace {
let messages: Vec<ExpandedMessage> = messages
.into_iter()
.map(|message| match message {
Message::Diagnostic(m) => {
let rule = m.rule();
let DiagnosticMessage {
body,
suggestion,
range,
fix,
..
} = m;
ExpandedMessage {
code: Some(rule.noqa_code().to_string()),
message: body,
.map(|msg| {
let message = msg.body().to_string();
let range = msg.range();
match msg.to_noqa_code() {
Some(code) => ExpandedMessage {
code: Some(code.to_string()),
message,
start_location: source_code.line_column(range.start()).into(),
end_location: source_code.line_column(range.end()).into(),
fix: fix.map(|fix| ExpandedFix {
message: suggestion,
fix: msg.fix().map(|fix| ExpandedFix {
message: msg.suggestion().map(ToString::to_string),
edits: fix
.edits()
.iter()
@ -236,15 +228,15 @@ impl Workspace {
})
.collect(),
}),
}
},
None => ExpandedMessage {
code: None,
message,
start_location: source_code.line_column(range.start()).into(),
end_location: source_code.line_column(range.end()).into(),
fix: None,
},
}
Message::SyntaxError(_) => ExpandedMessage {
code: None,
message: message.body().to_string(),
start_location: source_code.line_column(message.range().start()).into(),
end_location: source_code.line_column(message.range().end()).into(),
fix: None,
},
})
.collect();