Convert OldDiagnostic::noqa_code to an Option<String> (#18946)

## Summary

I think this should be the last step before combining `OldDiagnostic`
and `ruff_db::Diagnostic`. We can't store a `NoqaCode` on
`ruff_db::Diagnostic`, so I converted the `noqa_code` field to an
`Option<String>` and then propagated this change to all of the callers.

I tried to use `&str` everywhere it was possible, so I think the
remaining `to_string` calls are necessary. I spent some time trying to
convert _everything_ to `&str` but ran into lifetime issues, especially
in the `FixTable`. Maybe we can take another look at that if it causes a
performance regression, but hopefully these paths aren't too hot. We
also avoid some `to_string` calls, so it might even out a bit too.

## Test Plan

Existing tests

---------

Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
Brent Westbrook 2025-06-27 11:36:55 -04:00 committed by GitHub
parent efcb63fe3a
commit 96f3c8d1ab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 217 additions and 148 deletions

1
Cargo.lock generated
View file

@ -2946,6 +2946,7 @@ dependencies = [
"fern", "fern",
"glob", "glob",
"globset", "globset",
"hashbrown 0.15.4",
"imperative", "imperative",
"insta", "insta",
"is-macro", "is-macro",

View file

@ -6,7 +6,6 @@ use anyhow::Result;
use bitflags::bitflags; use bitflags::bitflags;
use colored::Colorize; use colored::Colorize;
use itertools::{Itertools, iterate}; use itertools::{Itertools, iterate};
use ruff_linter::codes::NoqaCode;
use ruff_linter::linter::FixTable; use ruff_linter::linter::FixTable;
use serde::Serialize; use serde::Serialize;
@ -15,7 +14,7 @@ use ruff_linter::logging::LogLevel;
use ruff_linter::message::{ use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter, JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
SarifEmitter, TextEmitter, SarifEmitter, SecondaryCode, TextEmitter,
}; };
use ruff_linter::notify_user; use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self}; use ruff_linter::settings::flags::{self};
@ -36,8 +35,8 @@ bitflags! {
} }
#[derive(Serialize)] #[derive(Serialize)]
struct ExpandedStatistics { struct ExpandedStatistics<'a> {
code: Option<NoqaCode>, code: Option<&'a SecondaryCode>,
name: &'static str, name: &'static str,
count: usize, count: usize,
fixable: bool, fixable: bool,
@ -303,11 +302,12 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics let statistics: Vec<ExpandedStatistics> = diagnostics
.inner .inner
.iter() .iter()
.map(|message| (message.noqa_code(), message)) .map(|message| (message.secondary_code(), message))
.sorted_by_key(|(code, message)| (*code, message.fixable())) .sorted_by_key(|(code, message)| (*code, message.fixable()))
.fold( .fold(
vec![], vec![],
|mut acc: Vec<((Option<NoqaCode>, &OldDiagnostic), usize)>, (code, message)| { |mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
(code, message)| {
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() { if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
if *prev_code == code { if *prev_code == code {
*count += 1; *count += 1;
@ -349,12 +349,7 @@ impl Printer {
); );
let code_width = statistics let code_width = statistics
.iter() .iter()
.map(|statistic| { .map(|statistic| statistic.code.map_or(0, |s| s.len()))
statistic
.code
.map_or_else(String::new, |rule| rule.to_string())
.len()
})
.max() .max()
.unwrap(); .unwrap();
let any_fixable = statistics.iter().any(|statistic| statistic.fixable); let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
@ -370,7 +365,8 @@ impl Printer {
statistic.count.to_string().bold(), statistic.count.to_string().bold(),
statistic statistic
.code .code
.map_or_else(String::new, |rule| rule.to_string()) .map(SecondaryCode::as_str)
.unwrap_or_default()
.red() .red()
.bold(), .bold(),
if any_fixable { if any_fixable {

View file

@ -38,6 +38,7 @@ colored = { workspace = true }
fern = { workspace = true } fern = { workspace = true }
glob = { workspace = true } glob = { workspace = true }
globset = { workspace = true } globset = { workspace = true }
hashbrown = { workspace = true }
imperative = { workspace = true } imperative = { workspace = true }
is-macro = { workspace = true } is-macro = { workspace = true }
is-wsl = { workspace = true } is-wsl = { workspace = true }

View file

@ -35,40 +35,35 @@ pub(crate) fn check_noqa(
// Identify any codes that are globally exempted (within the current file). // Identify any codes that are globally exempted (within the current file).
let file_noqa_directives = let file_noqa_directives =
FileNoqaDirectives::extract(locator, comment_ranges, &settings.external, path); FileNoqaDirectives::extract(locator, comment_ranges, &settings.external, path);
let exemption = FileExemption::from(&file_noqa_directives);
// Extract all `noqa` directives. // Extract all `noqa` directives.
let mut noqa_directives = let mut noqa_directives =
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator); NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
return Vec::new();
}
let exemption = FileExemption::from(&file_noqa_directives);
// Indices of diagnostics that were ignored by a `noqa` directive. // Indices of diagnostics that were ignored by a `noqa` directive.
let mut ignored_diagnostics = vec![]; let mut ignored_diagnostics = vec![];
// Remove any ignored diagnostics. // Remove any ignored diagnostics.
'outer: for (index, diagnostic) in context.iter().enumerate() { 'outer: for (index, diagnostic) in context.iter().enumerate() {
// Can't ignore syntax errors. // Can't ignore syntax errors.
let Some(code) = diagnostic.noqa_code() else { let Some(code) = diagnostic.secondary_code() else {
continue; continue;
}; };
if code == Rule::BlanketNOQA.noqa_code() { if *code == Rule::BlanketNOQA.noqa_code() {
continue; continue;
} }
match &exemption { if exemption.contains_secondary_code(code) {
FileExemption::All(_) => {
// If the file is exempted, ignore all diagnostics.
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
continue; continue;
} }
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, ignore it.
if codes.contains(&&code) {
ignored_diagnostics.push(index);
continue;
}
}
}
let noqa_offsets = diagnostic let noqa_offsets = diagnostic
.parent .parent
@ -82,13 +77,21 @@ pub(crate) fn check_noqa(
{ {
let suppressed = match &directive_line.directive { let suppressed = match &directive_line.directive {
Directive::All(_) => { Directive::All(_) => {
directive_line.matches.push(code); let Ok(rule) = Rule::from_code(code) else {
debug_assert!(false, "Invalid secondary code `{code}`");
continue;
};
directive_line.matches.push(rule);
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
true true
} }
Directive::Codes(directive) => { Directive::Codes(directive) => {
if directive.includes(code) { if directive.includes(code) {
directive_line.matches.push(code); let Ok(rule) = Rule::from_code(code) else {
debug_assert!(false, "Invalid secondary code `{code}`");
continue;
};
directive_line.matches.push(rule);
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
true true
} else { } else {
@ -147,11 +150,11 @@ pub(crate) fn check_noqa(
if seen_codes.insert(original_code) { if seen_codes.insert(original_code) {
let is_code_used = if is_file_level { let is_code_used = if is_file_level {
context context.iter().any(|diag| {
.iter() diag.secondary_code().is_some_and(|noqa| *noqa == code)
.any(|diag| diag.noqa_code().is_some_and(|noqa| noqa == code)) })
} else { } else {
matches.iter().any(|match_| *match_ == code) matches.iter().any(|match_| match_.noqa_code() == code)
} || settings } || settings
.external .external
.iter() .iter()

View file

@ -46,6 +46,12 @@ impl PartialEq<&str> for NoqaCode {
} }
} }
impl PartialEq<NoqaCode> for &str {
fn eq(&self, other: &NoqaCode) -> bool {
other.eq(self)
}
}
impl serde::Serialize for NoqaCode { impl serde::Serialize for NoqaCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where where

View file

@ -63,7 +63,7 @@ fn apply_fixes<'a>(
let mut source_map = SourceMap::default(); let mut source_map = SourceMap::default();
for (code, name, fix) in diagnostics for (code, name, fix) in diagnostics
.filter_map(|msg| msg.noqa_code().map(|code| (code, msg.name(), msg))) .filter_map(|msg| msg.secondary_code().map(|code| (code, msg.name(), msg)))
.filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix))) .filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix)))
.sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2)) .sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2))
{ {

View file

@ -1,12 +1,11 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::path::Path; use std::path::Path;
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
use colored::Colorize; use colored::Colorize;
use itertools::Itertools; use itertools::Itertools;
use ruff_python_parser::semantic_errors::SemanticSyntaxError; use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap; use rustc_hash::FxBuildHasher;
use ruff_notebook::Notebook; use ruff_notebook::Notebook;
use ruff_python_ast::{ModModule, PySourceType, PythonVersion}; use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
@ -23,10 +22,10 @@ use crate::checkers::imports::check_imports;
use crate::checkers::noqa::check_noqa; use crate::checkers::noqa::check_noqa;
use crate::checkers::physical_lines::check_physical_lines; use crate::checkers::physical_lines::check_physical_lines;
use crate::checkers::tokens::check_tokens; use crate::checkers::tokens::check_tokens;
use crate::codes::NoqaCode;
use crate::directives::Directives; use crate::directives::Directives;
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens}; use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::fix::{FixResult, fix_file}; use crate::fix::{FixResult, fix_file};
use crate::message::SecondaryCode;
use crate::noqa::add_noqa; use crate::noqa::add_noqa;
use crate::package::PackageRoot; use crate::package::PackageRoot;
use crate::preview::is_py314_support_enabled; use crate::preview::is_py314_support_enabled;
@ -95,25 +94,25 @@ struct FixCount {
/// A mapping from a noqa code to the corresponding lint name and a count of applied fixes. /// A mapping from a noqa code to the corresponding lint name and a count of applied fixes.
#[derive(Debug, Default, PartialEq)] #[derive(Debug, Default, PartialEq)]
pub struct FixTable(FxHashMap<NoqaCode, FixCount>); pub struct FixTable(hashbrown::HashMap<SecondaryCode, FixCount, rustc_hash::FxBuildHasher>);
impl FixTable { impl FixTable {
pub fn counts(&self) -> impl Iterator<Item = usize> { pub fn counts(&self) -> impl Iterator<Item = usize> {
self.0.values().map(|fc| fc.count) self.0.values().map(|fc| fc.count)
} }
pub fn entry(&mut self, code: NoqaCode) -> FixTableEntry { pub fn entry<'a>(&'a mut self, code: &'a SecondaryCode) -> FixTableEntry<'a> {
FixTableEntry(self.0.entry(code)) FixTableEntry(self.0.entry_ref(code))
} }
pub fn iter(&self) -> impl Iterator<Item = (NoqaCode, &'static str, usize)> { pub fn iter(&self) -> impl Iterator<Item = (&SecondaryCode, &'static str, usize)> {
self.0 self.0
.iter() .iter()
.map(|(code, FixCount { rule_name, count })| (*code, *rule_name, *count)) .map(|(code, FixCount { rule_name, count })| (code, *rule_name, *count))
} }
pub fn keys(&self) -> impl Iterator<Item = NoqaCode> { pub fn keys(&self) -> impl Iterator<Item = &SecondaryCode> {
self.0.keys().copied() self.0.keys()
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
@ -121,7 +120,9 @@ impl FixTable {
} }
} }
pub struct FixTableEntry<'a>(Entry<'a, NoqaCode, FixCount>); pub struct FixTableEntry<'a>(
hashbrown::hash_map::EntryRef<'a, 'a, SecondaryCode, SecondaryCode, FixCount, FxBuildHasher>,
);
impl<'a> FixTableEntry<'a> { impl<'a> FixTableEntry<'a> {
pub fn or_default(self, rule_name: &'static str) -> &'a mut usize { pub fn or_default(self, rule_name: &'static str) -> &'a mut usize {
@ -678,18 +679,16 @@ pub fn lint_fix<'a>(
} }
} }
fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String { fn collect_rule_codes<T>(rules: impl IntoIterator<Item = T>) -> String
rules where
.into_iter() T: Ord + PartialEq + std::fmt::Display,
.map(|rule| rule.to_string()) {
.sorted_unstable() rules.into_iter().sorted_unstable().dedup().join(", ")
.dedup()
.join(", ")
} }
#[expect(clippy::print_stderr)] #[expect(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) { fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) {
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::noqa_code)); let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::secondary_code));
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
eprintln!( eprintln!(
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---", "{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
@ -721,11 +720,11 @@ This indicates a bug in Ruff. If you could open an issue at:
} }
#[expect(clippy::print_stderr)] #[expect(clippy::print_stderr)]
fn report_fix_syntax_error( fn report_fix_syntax_error<'a>(
path: &Path, path: &Path,
transformed: &str, transformed: &str,
error: &ParseError, error: &ParseError,
rules: impl IntoIterator<Item = NoqaCode>, rules: impl IntoIterator<Item = &'a SecondaryCode>,
) { ) {
let codes = collect_rule_codes(rules); let codes = collect_rule_codes(rules);
if cfg!(debug_assertions) { if cfg!(debug_assertions) {

View file

@ -33,7 +33,7 @@ impl Emitter for AzureEmitter {
line = location.line, line = location.line,
col = location.column, col = location.column,
code = diagnostic code = diagnostic
.noqa_code() .secondary_code()
.map_or_else(String::new, |code| format!("code={code};")), .map_or_else(String::new, |code| format!("code={code};")),
body = diagnostic.body(), body = diagnostic.body(),
)?; )?;

View file

@ -33,7 +33,7 @@ impl Emitter for GithubEmitter {
writer, writer,
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::", "::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
code = diagnostic code = diagnostic
.noqa_code() .secondary_code()
.map_or_else(String::new, |code| format!(" ({code})")), .map_or_else(String::new, |code| format!(" ({code})")),
file = diagnostic.filename(), file = diagnostic.filename(),
row = source_location.line, row = source_location.line,
@ -50,7 +50,7 @@ impl Emitter for GithubEmitter {
column = location.column, column = location.column,
)?; )?;
if let Some(code) = diagnostic.noqa_code() { if let Some(code) = diagnostic.secondary_code() {
write!(writer, " {code}")?; write!(writer, " {code}")?;
} }

View file

@ -90,18 +90,15 @@ impl Serialize for SerializedMessages<'_> {
} }
fingerprints.insert(message_fingerprint); fingerprints.insert(message_fingerprint);
let (description, check_name) = if let Some(code) = diagnostic.noqa_code() { let (description, check_name) = if let Some(code) = diagnostic.secondary_code() {
(diagnostic.body().to_string(), code.to_string()) (diagnostic.body().to_string(), code.as_str())
} else { } else {
let description = diagnostic.body(); let description = diagnostic.body();
let description_without_prefix = description let description_without_prefix = description
.strip_prefix("SyntaxError: ") .strip_prefix("SyntaxError: ")
.unwrap_or(description); .unwrap_or(description);
( (description_without_prefix.to_string(), "syntax-error")
description_without_prefix.to_string(),
"syntax-error".to_string(),
)
}; };
let value = json!({ let value = json!({

View file

@ -87,7 +87,7 @@ pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterCo
} }
json!({ json!({
"code": message.noqa_code().map(|code| code.to_string()), "code": message.secondary_code(),
"url": message.to_url(), "url": message.to_url(),
"message": message.body(), "message": message.body(),
"fix": fix, "fix": fix,

View file

@ -59,7 +59,7 @@ impl Emitter for JunitEmitter {
body = message.body() body = message.body()
)); ));
let mut case = TestCase::new( let mut case = TestCase::new(
if let Some(code) = message.noqa_code() { if let Some(code) = message.secondary_code() {
format!("org.ruff.{code}") format!("org.ruff.{code}")
} else { } else {
"org.ruff".to_string() "org.ruff".to_string()

View file

@ -62,7 +62,7 @@ pub struct OldDiagnostic {
pub fix: Option<Fix>, pub fix: Option<Fix>,
pub parent: Option<TextSize>, pub parent: Option<TextSize>,
pub(crate) noqa_offset: Option<TextSize>, pub(crate) noqa_offset: Option<TextSize>,
pub(crate) noqa_code: Option<NoqaCode>, pub(crate) secondary_code: Option<SecondaryCode>,
} }
impl OldDiagnostic { impl OldDiagnostic {
@ -79,7 +79,7 @@ impl OldDiagnostic {
fix: None, fix: None,
parent: None, parent: None,
noqa_offset: None, noqa_offset: None,
noqa_code: None, secondary_code: None,
} }
} }
@ -115,7 +115,7 @@ impl OldDiagnostic {
fix, fix,
parent, parent,
noqa_offset, noqa_offset,
noqa_code: Some(rule.noqa_code()), secondary_code: Some(SecondaryCode(rule.noqa_code().to_string())),
} }
} }
@ -247,9 +247,9 @@ impl OldDiagnostic {
self.fix().is_some() self.fix().is_some()
} }
/// Returns the [`NoqaCode`] corresponding to the diagnostic message. /// Returns the noqa code for the diagnostic message as a string.
pub fn noqa_code(&self) -> Option<NoqaCode> { pub fn secondary_code(&self) -> Option<&SecondaryCode> {
self.noqa_code self.secondary_code.as_ref()
} }
/// Returns the URL for the rule documentation, if it exists. /// Returns the URL for the rule documentation, if it exists.
@ -384,6 +384,68 @@ impl<'a> EmitterContext<'a> {
} }
} }
/// A secondary identifier for a lint diagnostic.
///
/// For Ruff rules this means the noqa code.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, serde::Serialize)]
#[serde(transparent)]
pub struct SecondaryCode(String);
impl SecondaryCode {
pub fn new(code: String) -> Self {
Self(code)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl Display for SecondaryCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl std::ops::Deref for SecondaryCode {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl PartialEq<&str> for SecondaryCode {
fn eq(&self, other: &&str) -> bool {
self.0 == *other
}
}
impl PartialEq<SecondaryCode> for &str {
fn eq(&self, other: &SecondaryCode) -> bool {
other.eq(self)
}
}
impl PartialEq<NoqaCode> for SecondaryCode {
fn eq(&self, other: &NoqaCode) -> bool {
&self.as_str() == other
}
}
impl PartialEq<SecondaryCode> for NoqaCode {
fn eq(&self, other: &SecondaryCode) -> bool {
other.eq(self)
}
}
// for `hashbrown::EntryRef`
impl From<&SecondaryCode> for SecondaryCode {
fn from(value: &SecondaryCode) -> Self {
value.clone()
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;

View file

@ -26,7 +26,7 @@ impl Emitter for PylintEmitter {
diagnostic.compute_start_location().line diagnostic.compute_start_location().line
}; };
let body = if let Some(code) = diagnostic.noqa_code() { let body = if let Some(code) = diagnostic.secondary_code() {
format!("[{code}] {body}", body = diagnostic.body()) format!("[{code}] {body}", body = diagnostic.body())
} else { } else {
diagnostic.body().to_string() diagnostic.body().to_string()

View file

@ -71,7 +71,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
"range": rdjson_range(start_location, end_location), "range": rdjson_range(start_location, end_location),
}, },
"code": { "code": {
"value": message.noqa_code().map(|code| code.to_string()), "value": message.secondary_code(),
"url": message.to_url(), "url": message.to_url(),
}, },
"suggestions": rdjson_suggestions(fix.edits(), &source_code), "suggestions": rdjson_suggestions(fix.edits(), &source_code),
@ -84,7 +84,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
"range": rdjson_range(start_location, end_location), "range": rdjson_range(start_location, end_location),
}, },
"code": { "code": {
"value": message.noqa_code().map(|code| code.to_string()), "value": message.secondary_code(),
"url": message.to_url(), "url": message.to_url(),
}, },
}) })

View file

@ -8,9 +8,8 @@ use serde_json::json;
use ruff_source_file::OneIndexed; use ruff_source_file::OneIndexed;
use crate::VERSION; use crate::VERSION;
use crate::codes::NoqaCode;
use crate::fs::normalize_path; use crate::fs::normalize_path;
use crate::message::{Emitter, EmitterContext, OldDiagnostic}; use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
use crate::registry::{Linter, RuleNamespace}; use crate::registry::{Linter, RuleNamespace};
pub struct SarifEmitter; pub struct SarifEmitter;
@ -29,7 +28,7 @@ impl Emitter for SarifEmitter {
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect(); let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect();
let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect(); let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect();
rules.sort_by(|a, b| a.code.cmp(&b.code)); rules.sort_by(|a, b| a.code.cmp(b.code));
let output = json!({ let output = json!({
"$schema": "https://json.schemastore.org/sarif-2.1.0.json", "$schema": "https://json.schemastore.org/sarif-2.1.0.json",
@ -54,26 +53,25 @@ impl Emitter for SarifEmitter {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct SarifRule<'a> { struct SarifRule<'a> {
name: &'a str, name: &'a str,
code: String, code: &'a SecondaryCode,
linter: &'a str, linter: &'a str,
summary: &'a str, summary: &'a str,
explanation: Option<&'a str>, explanation: Option<&'a str>,
url: Option<String>, url: Option<String>,
} }
impl From<NoqaCode> for SarifRule<'_> { impl<'a> From<&'a SecondaryCode> for SarifRule<'a> {
fn from(code: NoqaCode) -> Self { fn from(code: &'a SecondaryCode) -> Self {
let code_str = code.to_string();
// This is a manual re-implementation of Rule::from_code, but we also want the Linter. This // This is a manual re-implementation of Rule::from_code, but we also want the Linter. This
// avoids calling Linter::parse_code twice. // avoids calling Linter::parse_code twice.
let (linter, suffix) = Linter::parse_code(&code_str).unwrap(); let (linter, suffix) = Linter::parse_code(code).unwrap();
let rule = linter let rule = linter
.all_rules() .all_rules()
.find(|rule| rule.noqa_code().suffix() == suffix) .find(|rule| rule.noqa_code().suffix() == suffix)
.expect("Expected a valid noqa code corresponding to a rule"); .expect("Expected a valid noqa code corresponding to a rule");
Self { Self {
name: rule.into(), name: rule.into(),
code: code_str, code,
linter: linter.name(), linter: linter.name(),
summary: rule.message_formats()[0], summary: rule.message_formats()[0],
explanation: rule.explanation(), explanation: rule.explanation(),
@ -111,8 +109,8 @@ impl Serialize for SarifRule<'_> {
} }
#[derive(Debug)] #[derive(Debug)]
struct SarifResult { struct SarifResult<'a> {
code: Option<NoqaCode>, code: Option<&'a SecondaryCode>,
level: String, level: String,
message: String, message: String,
uri: String, uri: String,
@ -122,14 +120,14 @@ struct SarifResult {
end_column: OneIndexed, end_column: OneIndexed,
} }
impl SarifResult { impl<'a> SarifResult<'a> {
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
fn from_message(message: &OldDiagnostic) -> Result<Self> { fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
code: message.noqa_code(), code: message.secondary_code(),
level: "error".to_string(), level: "error".to_string(),
message: message.body().to_string(), message: message.body().to_string(),
uri: url::Url::from_file_path(&path) uri: url::Url::from_file_path(&path)
@ -144,12 +142,12 @@ impl SarifResult {
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
#[expect(clippy::unnecessary_wraps)] #[expect(clippy::unnecessary_wraps)]
fn from_message(message: &OldDiagnostic) -> Result<Self> { fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
code: message.noqa_code(), code: message.secondary_code(),
level: "error".to_string(), level: "error".to_string(),
message: message.body().to_string(), message: message.body().to_string(),
uri: path.display().to_string(), uri: path.display().to_string(),
@ -161,7 +159,7 @@ impl SarifResult {
} }
} }
impl Serialize for SarifResult { impl Serialize for SarifResult<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
@ -184,7 +182,7 @@ impl Serialize for SarifResult {
} }
} }
}], }],
"ruleId": self.code.map(|code| code.to_string()), "ruleId": self.code,
}) })
.serialize(serializer) .serialize(serializer)
} }

View file

@ -14,7 +14,7 @@ use crate::Locator;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::line_width::{IndentWidth, LineWidthBuilder}; use crate::line_width::{IndentWidth, LineWidthBuilder};
use crate::message::diff::Diff; use crate::message::diff::Diff;
use crate::message::{Emitter, EmitterContext, OldDiagnostic}; use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
bitflags! { bitflags! {
@ -151,8 +151,8 @@ impl Display for RuleCodeAndBody<'_> {
if let Some(fix) = self.message.fix() { if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes // Do not display an indicator for inapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) { if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(code) = self.message.noqa_code() { if let Some(code) = self.message.secondary_code() {
write!(f, "{} ", code.to_string().red().bold())?; write!(f, "{} ", code.red().bold())?;
} }
return write!( return write!(
f, f,
@ -164,11 +164,11 @@ impl Display for RuleCodeAndBody<'_> {
} }
} }
if let Some(code) = self.message.noqa_code() { if let Some(code) = self.message.secondary_code() {
write!( write!(
f, f,
"{code} {body}", "{code} {body}",
code = code.to_string().red().bold(), code = code.red().bold(),
body = self.message.body(), body = self.message.body(),
) )
} else { } else {
@ -254,8 +254,9 @@ impl Display for MessageCodeFrame<'_> {
let label = self let label = self
.message .message
.noqa_code() .secondary_code()
.map_or_else(String::new, |code| code.to_string()); .map(SecondaryCode::as_str)
.unwrap_or_default();
let line_start = self.notebook_index.map_or_else( let line_start = self.notebook_index.map_or_else(
|| start_index.get(), || start_index.get(),
@ -269,7 +270,7 @@ impl Display for MessageCodeFrame<'_> {
let span = usize::from(source.annotation_range.start()) let span = usize::from(source.annotation_range.start())
..usize::from(source.annotation_range.end()); ..usize::from(source.annotation_range.end());
let annotation = Level::Error.span(span).label(&label); let annotation = Level::Error.span(span).label(label);
let snippet = Snippet::source(&source.text) let snippet = Snippet::source(&source.text)
.line_start(line_start) .line_start(line_start)
.annotation(annotation) .annotation(annotation)

View file

@ -16,9 +16,8 @@ use rustc_hash::FxHashSet;
use crate::Edit; use crate::Edit;
use crate::Locator; use crate::Locator;
use crate::codes::NoqaCode;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::message::OldDiagnostic; use crate::message::{OldDiagnostic, SecondaryCode};
use crate::registry::Rule; use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target; use crate::rule_redirects::get_redirect_target;
@ -106,9 +105,9 @@ impl Codes<'_> {
/// Returns `true` if the string list of `codes` includes `code` (or an alias /// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof). /// thereof).
pub(crate) fn includes(&self, needle: NoqaCode) -> bool { pub(crate) fn includes<T: for<'a> PartialEq<&'a str>>(&self, needle: &T) -> bool {
self.iter() self.iter()
.any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str())) .any(|code| *needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
} }
} }
@ -140,48 +139,55 @@ pub(crate) fn rule_is_ignored(
Ok(Some(NoqaLexerOutput { Ok(Some(NoqaLexerOutput {
directive: Directive::Codes(codes), directive: Directive::Codes(codes),
.. ..
})) => codes.includes(code.noqa_code()), })) => codes.includes(&code.noqa_code()),
_ => false, _ => false,
} }
} }
/// A summary of the file-level exemption as extracted from [`FileNoqaDirectives`]. /// A summary of the file-level exemption as extracted from [`FileNoqaDirectives`].
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum FileExemption<'a> { pub(crate) enum FileExemption {
/// The file is exempt from all rules. /// The file is exempt from all rules.
All(Vec<&'a NoqaCode>), All(Vec<Rule>),
/// The file is exempt from the given rules. /// The file is exempt from the given rules.
Codes(Vec<&'a NoqaCode>), Codes(Vec<Rule>),
} }
impl FileExemption<'_> { impl FileExemption {
/// Returns `true` if the file is exempt from the given rule. /// Returns `true` if the file is exempt from the given rule, as identified by its noqa code.
pub(crate) fn includes(&self, needle: Rule) -> bool { pub(crate) fn contains_secondary_code(&self, needle: &SecondaryCode) -> bool {
let needle = needle.noqa_code();
match self { match self {
FileExemption::All(_) => true, FileExemption::All(_) => true,
FileExemption::Codes(codes) => codes.iter().any(|code| needle == **code), FileExemption::Codes(codes) => codes.iter().any(|code| *needle == code.noqa_code()),
}
}
/// Returns `true` if the file is exempt from the given rule.
pub(crate) fn includes(&self, needle: Rule) -> bool {
match self {
FileExemption::All(_) => true,
FileExemption::Codes(codes) => codes.contains(&needle),
} }
} }
/// Returns `true` if the file exemption lists the rule directly, rather than via a blanket /// Returns `true` if the file exemption lists the rule directly, rather than via a blanket
/// exemption. /// exemption.
pub(crate) fn enumerates(&self, needle: Rule) -> bool { pub(crate) fn enumerates(&self, needle: Rule) -> bool {
let needle = needle.noqa_code();
let codes = match self { let codes = match self {
FileExemption::All(codes) => codes, FileExemption::All(codes) => codes,
FileExemption::Codes(codes) => codes, FileExemption::Codes(codes) => codes,
}; };
codes.iter().any(|code| needle == **code) codes.contains(&needle)
} }
} }
impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption<'a> { impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption {
fn from(directives: &'a FileNoqaDirectives) -> Self { fn from(directives: &'a FileNoqaDirectives) -> Self {
let codes = directives let codes = directives
.lines() .lines()
.iter() .iter()
.flat_map(|line| &line.matches) .flat_map(|line| &line.matches)
.copied()
.collect(); .collect();
if directives if directives
.lines() .lines()
@ -203,7 +209,7 @@ pub(crate) struct FileNoqaDirectiveLine<'a> {
/// The blanket noqa directive. /// The blanket noqa directive.
pub(crate) parsed_file_exemption: Directive<'a>, pub(crate) parsed_file_exemption: Directive<'a>,
/// The codes that are ignored by the parsed exemptions. /// The codes that are ignored by the parsed exemptions.
pub(crate) matches: Vec<NoqaCode>, pub(crate) matches: Vec<Rule>,
} }
impl Ranged for FileNoqaDirectiveLine<'_> { impl Ranged for FileNoqaDirectiveLine<'_> {
@ -270,7 +276,7 @@ impl<'a> FileNoqaDirectives<'a> {
if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code)) if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code))
{ {
Some(rule.noqa_code()) Some(rule)
} else { } else {
#[expect(deprecated)] #[expect(deprecated)]
let line = locator.compute_line_index(range.start()); let line = locator.compute_line_index(range.start());
@ -303,6 +309,10 @@ impl<'a> FileNoqaDirectives<'a> {
pub(crate) fn lines(&self) -> &[FileNoqaDirectiveLine] { pub(crate) fn lines(&self) -> &[FileNoqaDirectiveLine] {
&self.0 &self.0
} }
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
} }
/// Output of lexing a `noqa` directive. /// Output of lexing a `noqa` directive.
@ -830,7 +840,7 @@ fn build_noqa_edits_by_line<'a>(
struct NoqaComment<'a> { struct NoqaComment<'a> {
line: TextSize, line: TextSize,
code: NoqaCode, code: &'a SecondaryCode,
directive: Option<&'a Directive<'a>>, directive: Option<&'a Directive<'a>>,
} }
@ -846,25 +856,15 @@ fn find_noqa_comments<'a>(
// Mark any non-ignored diagnostics. // Mark any non-ignored diagnostics.
for message in diagnostics { for message in diagnostics {
let Some(code) = message.noqa_code() else { let Some(code) = message.secondary_code() else {
comments_by_line.push(None); comments_by_line.push(None);
continue; continue;
}; };
match &exemption { if exemption.contains_secondary_code(code) {
FileExemption::All(_) => {
// If the file is exempted, don't add any noqa directives.
comments_by_line.push(None); comments_by_line.push(None);
continue; continue;
} }
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
if codes.contains(&&code) {
comments_by_line.push(None);
continue;
}
}
}
// Is the violation ignored by a `noqa` directive on the parent line? // Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = message.parent { if let Some(parent) = message.parent {
@ -921,7 +921,7 @@ fn find_noqa_comments<'a>(
struct NoqaEdit<'a> { struct NoqaEdit<'a> {
edit_range: TextRange, edit_range: TextRange,
noqa_codes: FxHashSet<NoqaCode>, noqa_codes: FxHashSet<&'a SecondaryCode>,
codes: Option<&'a Codes<'a>>, codes: Option<&'a Codes<'a>>,
line_ending: LineEnding, line_ending: LineEnding,
} }
@ -942,13 +942,13 @@ impl NoqaEdit<'_> {
writer, writer,
self.noqa_codes self.noqa_codes
.iter() .iter()
.map(ToString::to_string) .map(|code| code.as_str())
.chain(codes.iter().map(ToString::to_string)) .chain(codes.iter().map(Code::as_str))
.sorted_unstable(), .sorted_unstable(),
); );
} }
None => { None => {
push_codes(writer, self.noqa_codes.iter().map(ToString::to_string)); push_codes(writer, self.noqa_codes.iter().sorted_unstable());
} }
} }
write!(writer, "{}", self.line_ending.as_str()).unwrap(); write!(writer, "{}", self.line_ending.as_str()).unwrap();
@ -964,7 +964,7 @@ impl Ranged for NoqaEdit<'_> {
fn generate_noqa_edit<'a>( fn generate_noqa_edit<'a>(
directive: Option<&'a Directive>, directive: Option<&'a Directive>,
offset: TextSize, offset: TextSize,
noqa_codes: FxHashSet<NoqaCode>, noqa_codes: FxHashSet<&'a SecondaryCode>,
locator: &Locator, locator: &Locator,
line_ending: LineEnding, line_ending: LineEnding,
) -> Option<NoqaEdit<'a>> { ) -> Option<NoqaEdit<'a>> {
@ -1017,7 +1017,7 @@ pub(crate) struct NoqaDirectiveLine<'a> {
/// The noqa directive. /// The noqa directive.
pub(crate) directive: Directive<'a>, pub(crate) directive: Directive<'a>,
/// The codes that are ignored by the directive. /// The codes that are ignored by the directive.
pub(crate) matches: Vec<NoqaCode>, pub(crate) matches: Vec<Rule>,
/// Whether the directive applies to `range.end`. /// Whether the directive applies to `range.end`.
pub(crate) includes_end: bool, pub(crate) includes_end: bool,
} }
@ -1142,6 +1142,10 @@ impl<'a> NoqaDirectives<'a> {
pub(crate) fn lines(&self) -> &[NoqaDirectiveLine] { pub(crate) fn lines(&self) -> &[NoqaDirectiveLine] {
&self.inner &self.inner
} }
pub(crate) fn is_empty(&self) -> bool {
self.inner.is_empty()
}
} }
/// Remaps offsets falling into one of the ranges to instead check for a noqa comment on the /// Remaps offsets falling into one of the ranges to instead check for a noqa comment on the

View file

@ -774,9 +774,10 @@ mod tests {
messages.sort_by_key(Ranged::start); messages.sort_by_key(Ranged::start);
let actual = messages let actual = messages
.iter() .iter()
.filter_map(OldDiagnostic::noqa_code) .filter(|msg| !msg.is_syntax_error())
.map(OldDiagnostic::name)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let expected: Vec<_> = expected.iter().map(Rule::noqa_code).collect(); let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
assert_eq!(actual, expected); assert_eq!(actual, expected);
} }

View file

@ -237,9 +237,9 @@ Source with applied fixes:
let messages = messages let messages = messages
.into_iter() .into_iter()
.filter_map(|msg| Some((msg.noqa_code()?, msg))) .filter_map(|msg| Some((msg.secondary_code()?.to_string(), msg)))
.map(|(code, mut diagnostic)| { .map(|(code, mut diagnostic)| {
let rule = Rule::from_code(&code.to_string()).unwrap(); let rule = Rule::from_code(&code).unwrap();
let fixable = diagnostic.fix().is_some_and(|fix| { let fixable = diagnostic.fix().is_some_and(|fix| {
matches!( matches!(
fix.applicability(), fix.applicability(),

View file

@ -239,7 +239,7 @@ fn to_lsp_diagnostic(
let body = diagnostic.body().to_string(); let body = diagnostic.body().to_string();
let fix = diagnostic.fix(); let fix = diagnostic.fix();
let suggestion = diagnostic.suggestion(); let suggestion = diagnostic.suggestion();
let code = diagnostic.noqa_code(); let code = diagnostic.secondary_code();
let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix)); let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix));

View file

@ -208,7 +208,7 @@ impl Workspace {
let messages: Vec<ExpandedMessage> = diagnostics let messages: Vec<ExpandedMessage> = diagnostics
.into_iter() .into_iter()
.map(|msg| ExpandedMessage { .map(|msg| ExpandedMessage {
code: msg.noqa_code().map(|code| code.to_string()), code: msg.secondary_code().map(ToString::to_string),
message: msg.body().to_string(), message: msg.body().to_string(),
start_location: source_code.line_column(msg.start()).into(), start_location: source_code.line_column(msg.start()).into(),
end_location: source_code.line_column(msg.end()).into(), end_location: source_code.line_column(msg.end()).into(),