mirror of
https://github.com/astral-sh/ruff.git
synced 2025-11-18 19:31:31 +00:00
Convert OldDiagnostic::noqa_code to an Option<String> (#18946)
## Summary I think this should be the last step before combining `OldDiagnostic` and `ruff_db::Diagnostic`. We can't store a `NoqaCode` on `ruff_db::Diagnostic`, so I converted the `noqa_code` field to an `Option<String>` and then propagated this change to all of the callers. I tried to use `&str` everywhere it was possible, so I think the remaining `to_string` calls are necessary. I spent some time trying to convert _everything_ to `&str` but ran into lifetime issues, especially in the `FixTable`. Maybe we can take another look at that if it causes a performance regression, but hopefully these paths aren't too hot. We also avoid some `to_string` calls, so it might even out a bit too. ## Test Plan Existing tests --------- Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
parent
efcb63fe3a
commit
96f3c8d1ab
22 changed files with 217 additions and 148 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -2946,6 +2946,7 @@ dependencies = [
|
|||
"fern",
|
||||
"glob",
|
||||
"globset",
|
||||
"hashbrown 0.15.4",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is-macro",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ use anyhow::Result;
|
|||
use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{Itertools, iterate};
|
||||
use ruff_linter::codes::NoqaCode;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
|
|
@ -15,7 +14,7 @@ use ruff_linter::logging::LogLevel;
|
|||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
||||
SarifEmitter, TextEmitter,
|
||||
SarifEmitter, SecondaryCode, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::settings::flags::{self};
|
||||
|
|
@ -36,8 +35,8 @@ bitflags! {
|
|||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ExpandedStatistics {
|
||||
code: Option<NoqaCode>,
|
||||
struct ExpandedStatistics<'a> {
|
||||
code: Option<&'a SecondaryCode>,
|
||||
name: &'static str,
|
||||
count: usize,
|
||||
fixable: bool,
|
||||
|
|
@ -303,11 +302,12 @@ impl Printer {
|
|||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.inner
|
||||
.iter()
|
||||
.map(|message| (message.noqa_code(), message))
|
||||
.map(|message| (message.secondary_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
|mut acc: Vec<((Option<NoqaCode>, &OldDiagnostic), usize)>, (code, message)| {
|
||||
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
|
||||
(code, message)| {
|
||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||
if *prev_code == code {
|
||||
*count += 1;
|
||||
|
|
@ -349,12 +349,7 @@ impl Printer {
|
|||
);
|
||||
let code_width = statistics
|
||||
.iter()
|
||||
.map(|statistic| {
|
||||
statistic
|
||||
.code
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.len()
|
||||
})
|
||||
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
|
||||
.max()
|
||||
.unwrap();
|
||||
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
|
||||
|
|
@ -370,7 +365,8 @@ impl Printer {
|
|||
statistic.count.to_string().bold(),
|
||||
statistic
|
||||
.code
|
||||
.map_or_else(String::new, |rule| rule.to_string())
|
||||
.map(SecondaryCode::as_str)
|
||||
.unwrap_or_default()
|
||||
.red()
|
||||
.bold(),
|
||||
if any_fixable {
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ colored = { workspace = true }
|
|||
fern = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
globset = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
imperative = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
is-wsl = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -35,40 +35,35 @@ pub(crate) fn check_noqa(
|
|||
// Identify any codes that are globally exempted (within the current file).
|
||||
let file_noqa_directives =
|
||||
FileNoqaDirectives::extract(locator, comment_ranges, &settings.external, path);
|
||||
let exemption = FileExemption::from(&file_noqa_directives);
|
||||
|
||||
// Extract all `noqa` directives.
|
||||
let mut noqa_directives =
|
||||
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
|
||||
|
||||
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let exemption = FileExemption::from(&file_noqa_directives);
|
||||
|
||||
// Indices of diagnostics that were ignored by a `noqa` directive.
|
||||
let mut ignored_diagnostics = vec![];
|
||||
|
||||
// Remove any ignored diagnostics.
|
||||
'outer: for (index, diagnostic) in context.iter().enumerate() {
|
||||
// Can't ignore syntax errors.
|
||||
let Some(code) = diagnostic.noqa_code() else {
|
||||
let Some(code) = diagnostic.secondary_code() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if code == Rule::BlanketNOQA.noqa_code() {
|
||||
if *code == Rule::BlanketNOQA.noqa_code() {
|
||||
continue;
|
||||
}
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All(_) => {
|
||||
// If the file is exempted, ignore all diagnostics.
|
||||
if exemption.contains_secondary_code(code) {
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
FileExemption::Codes(codes) => {
|
||||
// If the diagnostic is ignored by a global exemption, ignore it.
|
||||
if codes.contains(&&code) {
|
||||
ignored_diagnostics.push(index);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let noqa_offsets = diagnostic
|
||||
.parent
|
||||
|
|
@ -82,13 +77,21 @@ pub(crate) fn check_noqa(
|
|||
{
|
||||
let suppressed = match &directive_line.directive {
|
||||
Directive::All(_) => {
|
||||
directive_line.matches.push(code);
|
||||
let Ok(rule) = Rule::from_code(code) else {
|
||||
debug_assert!(false, "Invalid secondary code `{code}`");
|
||||
continue;
|
||||
};
|
||||
directive_line.matches.push(rule);
|
||||
ignored_diagnostics.push(index);
|
||||
true
|
||||
}
|
||||
Directive::Codes(directive) => {
|
||||
if directive.includes(code) {
|
||||
directive_line.matches.push(code);
|
||||
let Ok(rule) = Rule::from_code(code) else {
|
||||
debug_assert!(false, "Invalid secondary code `{code}`");
|
||||
continue;
|
||||
};
|
||||
directive_line.matches.push(rule);
|
||||
ignored_diagnostics.push(index);
|
||||
true
|
||||
} else {
|
||||
|
|
@ -147,11 +150,11 @@ pub(crate) fn check_noqa(
|
|||
|
||||
if seen_codes.insert(original_code) {
|
||||
let is_code_used = if is_file_level {
|
||||
context
|
||||
.iter()
|
||||
.any(|diag| diag.noqa_code().is_some_and(|noqa| noqa == code))
|
||||
context.iter().any(|diag| {
|
||||
diag.secondary_code().is_some_and(|noqa| *noqa == code)
|
||||
})
|
||||
} else {
|
||||
matches.iter().any(|match_| *match_ == code)
|
||||
matches.iter().any(|match_| match_.noqa_code() == code)
|
||||
} || settings
|
||||
.external
|
||||
.iter()
|
||||
|
|
|
|||
|
|
@ -46,6 +46,12 @@ impl PartialEq<&str> for NoqaCode {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq<NoqaCode> for &str {
|
||||
fn eq(&self, other: &NoqaCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for NoqaCode {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ fn apply_fixes<'a>(
|
|||
let mut source_map = SourceMap::default();
|
||||
|
||||
for (code, name, fix) in diagnostics
|
||||
.filter_map(|msg| msg.noqa_code().map(|code| (code, msg.name(), msg)))
|
||||
.filter_map(|msg| msg.secondary_code().map(|code| (code, msg.name(), msg)))
|
||||
.filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix)))
|
||||
.sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2))
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use colored::Colorize;
|
||||
use itertools::Itertools;
|
||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||
|
|
@ -23,10 +22,10 @@ use crate::checkers::imports::check_imports;
|
|||
use crate::checkers::noqa::check_noqa;
|
||||
use crate::checkers::physical_lines::check_physical_lines;
|
||||
use crate::checkers::tokens::check_tokens;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::fix::{FixResult, fix_file};
|
||||
use crate::message::SecondaryCode;
|
||||
use crate::noqa::add_noqa;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::preview::is_py314_support_enabled;
|
||||
|
|
@ -95,25 +94,25 @@ struct FixCount {
|
|||
|
||||
/// A mapping from a noqa code to the corresponding lint name and a count of applied fixes.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub struct FixTable(FxHashMap<NoqaCode, FixCount>);
|
||||
pub struct FixTable(hashbrown::HashMap<SecondaryCode, FixCount, rustc_hash::FxBuildHasher>);
|
||||
|
||||
impl FixTable {
|
||||
pub fn counts(&self) -> impl Iterator<Item = usize> {
|
||||
self.0.values().map(|fc| fc.count)
|
||||
}
|
||||
|
||||
pub fn entry(&mut self, code: NoqaCode) -> FixTableEntry {
|
||||
FixTableEntry(self.0.entry(code))
|
||||
pub fn entry<'a>(&'a mut self, code: &'a SecondaryCode) -> FixTableEntry<'a> {
|
||||
FixTableEntry(self.0.entry_ref(code))
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (NoqaCode, &'static str, usize)> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&SecondaryCode, &'static str, usize)> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|(code, FixCount { rule_name, count })| (*code, *rule_name, *count))
|
||||
.map(|(code, FixCount { rule_name, count })| (code, *rule_name, *count))
|
||||
}
|
||||
|
||||
pub fn keys(&self) -> impl Iterator<Item = NoqaCode> {
|
||||
self.0.keys().copied()
|
||||
pub fn keys(&self) -> impl Iterator<Item = &SecondaryCode> {
|
||||
self.0.keys()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
|
@ -121,7 +120,9 @@ impl FixTable {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct FixTableEntry<'a>(Entry<'a, NoqaCode, FixCount>);
|
||||
pub struct FixTableEntry<'a>(
|
||||
hashbrown::hash_map::EntryRef<'a, 'a, SecondaryCode, SecondaryCode, FixCount, FxBuildHasher>,
|
||||
);
|
||||
|
||||
impl<'a> FixTableEntry<'a> {
|
||||
pub fn or_default(self, rule_name: &'static str) -> &'a mut usize {
|
||||
|
|
@ -678,18 +679,16 @@ pub fn lint_fix<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String {
|
||||
rules
|
||||
.into_iter()
|
||||
.map(|rule| rule.to_string())
|
||||
.sorted_unstable()
|
||||
.dedup()
|
||||
.join(", ")
|
||||
fn collect_rule_codes<T>(rules: impl IntoIterator<Item = T>) -> String
|
||||
where
|
||||
T: Ord + PartialEq + std::fmt::Display,
|
||||
{
|
||||
rules.into_iter().sorted_unstable().dedup().join(", ")
|
||||
}
|
||||
|
||||
#[expect(clippy::print_stderr)]
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) {
|
||||
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::noqa_code));
|
||||
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::secondary_code));
|
||||
if cfg!(debug_assertions) {
|
||||
eprintln!(
|
||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||
|
|
@ -721,11 +720,11 @@ This indicates a bug in Ruff. If you could open an issue at:
|
|||
}
|
||||
|
||||
#[expect(clippy::print_stderr)]
|
||||
fn report_fix_syntax_error(
|
||||
fn report_fix_syntax_error<'a>(
|
||||
path: &Path,
|
||||
transformed: &str,
|
||||
error: &ParseError,
|
||||
rules: impl IntoIterator<Item = NoqaCode>,
|
||||
rules: impl IntoIterator<Item = &'a SecondaryCode>,
|
||||
) {
|
||||
let codes = collect_rule_codes(rules);
|
||||
if cfg!(debug_assertions) {
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ impl Emitter for AzureEmitter {
|
|||
line = location.line,
|
||||
col = location.column,
|
||||
code = diagnostic
|
||||
.noqa_code()
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = diagnostic.body(),
|
||||
)?;
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ impl Emitter for GithubEmitter {
|
|||
writer,
|
||||
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
|
||||
code = diagnostic
|
||||
.noqa_code()
|
||||
.secondary_code()
|
||||
.map_or_else(String::new, |code| format!(" ({code})")),
|
||||
file = diagnostic.filename(),
|
||||
row = source_location.line,
|
||||
|
|
@ -50,7 +50,7 @@ impl Emitter for GithubEmitter {
|
|||
column = location.column,
|
||||
)?;
|
||||
|
||||
if let Some(code) = diagnostic.noqa_code() {
|
||||
if let Some(code) = diagnostic.secondary_code() {
|
||||
write!(writer, " {code}")?;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -90,18 +90,15 @@ impl Serialize for SerializedMessages<'_> {
|
|||
}
|
||||
fingerprints.insert(message_fingerprint);
|
||||
|
||||
let (description, check_name) = if let Some(code) = diagnostic.noqa_code() {
|
||||
(diagnostic.body().to_string(), code.to_string())
|
||||
let (description, check_name) = if let Some(code) = diagnostic.secondary_code() {
|
||||
(diagnostic.body().to_string(), code.as_str())
|
||||
} else {
|
||||
let description = diagnostic.body();
|
||||
let description_without_prefix = description
|
||||
.strip_prefix("SyntaxError: ")
|
||||
.unwrap_or(description);
|
||||
|
||||
(
|
||||
description_without_prefix.to_string(),
|
||||
"syntax-error".to_string(),
|
||||
)
|
||||
(description_without_prefix.to_string(), "syntax-error")
|
||||
};
|
||||
|
||||
let value = json!({
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterCo
|
|||
}
|
||||
|
||||
json!({
|
||||
"code": message.noqa_code().map(|code| code.to_string()),
|
||||
"code": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
"message": message.body(),
|
||||
"fix": fix,
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ impl Emitter for JunitEmitter {
|
|||
body = message.body()
|
||||
));
|
||||
let mut case = TestCase::new(
|
||||
if let Some(code) = message.noqa_code() {
|
||||
if let Some(code) = message.secondary_code() {
|
||||
format!("org.ruff.{code}")
|
||||
} else {
|
||||
"org.ruff".to_string()
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ pub struct OldDiagnostic {
|
|||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
pub(crate) noqa_offset: Option<TextSize>,
|
||||
pub(crate) noqa_code: Option<NoqaCode>,
|
||||
pub(crate) secondary_code: Option<SecondaryCode>,
|
||||
}
|
||||
|
||||
impl OldDiagnostic {
|
||||
|
|
@ -79,7 +79,7 @@ impl OldDiagnostic {
|
|||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
noqa_code: None,
|
||||
secondary_code: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -115,7 +115,7 @@ impl OldDiagnostic {
|
|||
fix,
|
||||
parent,
|
||||
noqa_offset,
|
||||
noqa_code: Some(rule.noqa_code()),
|
||||
secondary_code: Some(SecondaryCode(rule.noqa_code().to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -247,9 +247,9 @@ impl OldDiagnostic {
|
|||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
|
||||
pub fn noqa_code(&self) -> Option<NoqaCode> {
|
||||
self.noqa_code
|
||||
/// Returns the noqa code for the diagnostic message as a string.
|
||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||
self.secondary_code.as_ref()
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
|
|
@ -384,6 +384,68 @@ impl<'a> EmitterContext<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// A secondary identifier for a lint diagnostic.
|
||||
///
|
||||
/// For Ruff rules this means the noqa code.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, serde::Serialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct SecondaryCode(String);
|
||||
|
||||
impl SecondaryCode {
|
||||
pub fn new(code: String) -> Self {
|
||||
Self(code)
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SecondaryCode {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for SecondaryCode {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&str> for SecondaryCode {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for &str {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<NoqaCode> for SecondaryCode {
|
||||
fn eq(&self, other: &NoqaCode) -> bool {
|
||||
&self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<SecondaryCode> for NoqaCode {
|
||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||
other.eq(self)
|
||||
}
|
||||
}
|
||||
|
||||
// for `hashbrown::EntryRef`
|
||||
impl From<&SecondaryCode> for SecondaryCode {
|
||||
fn from(value: &SecondaryCode) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ impl Emitter for PylintEmitter {
|
|||
diagnostic.compute_start_location().line
|
||||
};
|
||||
|
||||
let body = if let Some(code) = diagnostic.noqa_code() {
|
||||
let body = if let Some(code) = diagnostic.secondary_code() {
|
||||
format!("[{code}] {body}", body = diagnostic.body())
|
||||
} else {
|
||||
diagnostic.body().to_string()
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
|
|||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.noqa_code().map(|code| code.to_string()),
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
},
|
||||
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
|
||||
|
|
@ -84,7 +84,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
|
|||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.noqa_code().map(|code| code.to_string()),
|
||||
"value": message.secondary_code(),
|
||||
"url": message.to_url(),
|
||||
},
|
||||
})
|
||||
|
|
|
|||
|
|
@ -8,9 +8,8 @@ use serde_json::json;
|
|||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::VERSION;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::normalize_path;
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
||||
use crate::registry::{Linter, RuleNamespace};
|
||||
|
||||
pub struct SarifEmitter;
|
||||
|
|
@ -29,7 +28,7 @@ impl Emitter for SarifEmitter {
|
|||
|
||||
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect();
|
||||
let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect();
|
||||
rules.sort_by(|a, b| a.code.cmp(&b.code));
|
||||
rules.sort_by(|a, b| a.code.cmp(b.code));
|
||||
|
||||
let output = json!({
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
|
|
@ -54,26 +53,25 @@ impl Emitter for SarifEmitter {
|
|||
#[derive(Debug, Clone)]
|
||||
struct SarifRule<'a> {
|
||||
name: &'a str,
|
||||
code: String,
|
||||
code: &'a SecondaryCode,
|
||||
linter: &'a str,
|
||||
summary: &'a str,
|
||||
explanation: Option<&'a str>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
impl From<NoqaCode> for SarifRule<'_> {
|
||||
fn from(code: NoqaCode) -> Self {
|
||||
let code_str = code.to_string();
|
||||
impl<'a> From<&'a SecondaryCode> for SarifRule<'a> {
|
||||
fn from(code: &'a SecondaryCode) -> Self {
|
||||
// This is a manual re-implementation of Rule::from_code, but we also want the Linter. This
|
||||
// avoids calling Linter::parse_code twice.
|
||||
let (linter, suffix) = Linter::parse_code(&code_str).unwrap();
|
||||
let (linter, suffix) = Linter::parse_code(code).unwrap();
|
||||
let rule = linter
|
||||
.all_rules()
|
||||
.find(|rule| rule.noqa_code().suffix() == suffix)
|
||||
.expect("Expected a valid noqa code corresponding to a rule");
|
||||
Self {
|
||||
name: rule.into(),
|
||||
code: code_str,
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
explanation: rule.explanation(),
|
||||
|
|
@ -111,8 +109,8 @@ impl Serialize for SarifRule<'_> {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SarifResult {
|
||||
code: Option<NoqaCode>,
|
||||
struct SarifResult<'a> {
|
||||
code: Option<&'a SecondaryCode>,
|
||||
level: String,
|
||||
message: String,
|
||||
uri: String,
|
||||
|
|
@ -122,14 +120,14 @@ struct SarifResult {
|
|||
end_column: OneIndexed,
|
||||
}
|
||||
|
||||
impl SarifResult {
|
||||
impl<'a> SarifResult<'a> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn from_message(message: &OldDiagnostic) -> Result<Self> {
|
||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
code: message.noqa_code(),
|
||||
code: message.secondary_code(),
|
||||
level: "error".to_string(),
|
||||
message: message.body().to_string(),
|
||||
uri: url::Url::from_file_path(&path)
|
||||
|
|
@ -144,12 +142,12 @@ impl SarifResult {
|
|||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[expect(clippy::unnecessary_wraps)]
|
||||
fn from_message(message: &OldDiagnostic) -> Result<Self> {
|
||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
code: message.noqa_code(),
|
||||
code: message.secondary_code(),
|
||||
level: "error".to_string(),
|
||||
message: message.body().to_string(),
|
||||
uri: path.display().to_string(),
|
||||
|
|
@ -161,7 +159,7 @@ impl SarifResult {
|
|||
}
|
||||
}
|
||||
|
||||
impl Serialize for SarifResult {
|
||||
impl Serialize for SarifResult<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
|
|
@ -184,7 +182,7 @@ impl Serialize for SarifResult {
|
|||
}
|
||||
}
|
||||
}],
|
||||
"ruleId": self.code.map(|code| code.to_string()),
|
||||
"ruleId": self.code,
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use crate::Locator;
|
|||
use crate::fs::relativize_path;
|
||||
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
||||
use crate::message::diff::Diff;
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
|
||||
bitflags! {
|
||||
|
|
@ -151,8 +151,8 @@ impl Display for RuleCodeAndBody<'_> {
|
|||
if let Some(fix) = self.message.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.unsafe_fixes.required_applicability()) {
|
||||
if let Some(code) = self.message.noqa_code() {
|
||||
write!(f, "{} ", code.to_string().red().bold())?;
|
||||
if let Some(code) = self.message.secondary_code() {
|
||||
write!(f, "{} ", code.red().bold())?;
|
||||
}
|
||||
return write!(
|
||||
f,
|
||||
|
|
@ -164,11 +164,11 @@ impl Display for RuleCodeAndBody<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(code) = self.message.noqa_code() {
|
||||
if let Some(code) = self.message.secondary_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} {body}",
|
||||
code = code.to_string().red().bold(),
|
||||
code = code.red().bold(),
|
||||
body = self.message.body(),
|
||||
)
|
||||
} else {
|
||||
|
|
@ -254,8 +254,9 @@ impl Display for MessageCodeFrame<'_> {
|
|||
|
||||
let label = self
|
||||
.message
|
||||
.noqa_code()
|
||||
.map_or_else(String::new, |code| code.to_string());
|
||||
.secondary_code()
|
||||
.map(SecondaryCode::as_str)
|
||||
.unwrap_or_default();
|
||||
|
||||
let line_start = self.notebook_index.map_or_else(
|
||||
|| start_index.get(),
|
||||
|
|
@ -269,7 +270,7 @@ impl Display for MessageCodeFrame<'_> {
|
|||
|
||||
let span = usize::from(source.annotation_range.start())
|
||||
..usize::from(source.annotation_range.end());
|
||||
let annotation = Level::Error.span(span).label(&label);
|
||||
let annotation = Level::Error.span(span).label(label);
|
||||
let snippet = Snippet::source(&source.text)
|
||||
.line_start(line_start)
|
||||
.annotation(annotation)
|
||||
|
|
|
|||
|
|
@ -16,9 +16,8 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::OldDiagnostic;
|
||||
use crate::message::{OldDiagnostic, SecondaryCode};
|
||||
use crate::registry::Rule;
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
|
||||
|
|
@ -106,9 +105,9 @@ impl Codes<'_> {
|
|||
|
||||
/// Returns `true` if the string list of `codes` includes `code` (or an alias
|
||||
/// thereof).
|
||||
pub(crate) fn includes(&self, needle: NoqaCode) -> bool {
|
||||
pub(crate) fn includes<T: for<'a> PartialEq<&'a str>>(&self, needle: &T) -> bool {
|
||||
self.iter()
|
||||
.any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
|
||||
.any(|code| *needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -140,48 +139,55 @@ pub(crate) fn rule_is_ignored(
|
|||
Ok(Some(NoqaLexerOutput {
|
||||
directive: Directive::Codes(codes),
|
||||
..
|
||||
})) => codes.includes(code.noqa_code()),
|
||||
})) => codes.includes(&code.noqa_code()),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// A summary of the file-level exemption as extracted from [`FileNoqaDirectives`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FileExemption<'a> {
|
||||
pub(crate) enum FileExemption {
|
||||
/// The file is exempt from all rules.
|
||||
All(Vec<&'a NoqaCode>),
|
||||
All(Vec<Rule>),
|
||||
/// The file is exempt from the given rules.
|
||||
Codes(Vec<&'a NoqaCode>),
|
||||
Codes(Vec<Rule>),
|
||||
}
|
||||
|
||||
impl FileExemption<'_> {
|
||||
/// Returns `true` if the file is exempt from the given rule.
|
||||
pub(crate) fn includes(&self, needle: Rule) -> bool {
|
||||
let needle = needle.noqa_code();
|
||||
impl FileExemption {
|
||||
/// Returns `true` if the file is exempt from the given rule, as identified by its noqa code.
|
||||
pub(crate) fn contains_secondary_code(&self, needle: &SecondaryCode) -> bool {
|
||||
match self {
|
||||
FileExemption::All(_) => true,
|
||||
FileExemption::Codes(codes) => codes.iter().any(|code| needle == **code),
|
||||
FileExemption::Codes(codes) => codes.iter().any(|code| *needle == code.noqa_code()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is exempt from the given rule.
|
||||
pub(crate) fn includes(&self, needle: Rule) -> bool {
|
||||
match self {
|
||||
FileExemption::All(_) => true,
|
||||
FileExemption::Codes(codes) => codes.contains(&needle),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file exemption lists the rule directly, rather than via a blanket
|
||||
/// exemption.
|
||||
pub(crate) fn enumerates(&self, needle: Rule) -> bool {
|
||||
let needle = needle.noqa_code();
|
||||
let codes = match self {
|
||||
FileExemption::All(codes) => codes,
|
||||
FileExemption::Codes(codes) => codes,
|
||||
};
|
||||
codes.iter().any(|code| needle == **code)
|
||||
codes.contains(&needle)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption<'a> {
|
||||
impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption {
|
||||
fn from(directives: &'a FileNoqaDirectives) -> Self {
|
||||
let codes = directives
|
||||
.lines()
|
||||
.iter()
|
||||
.flat_map(|line| &line.matches)
|
||||
.copied()
|
||||
.collect();
|
||||
if directives
|
||||
.lines()
|
||||
|
|
@ -203,7 +209,7 @@ pub(crate) struct FileNoqaDirectiveLine<'a> {
|
|||
/// The blanket noqa directive.
|
||||
pub(crate) parsed_file_exemption: Directive<'a>,
|
||||
/// The codes that are ignored by the parsed exemptions.
|
||||
pub(crate) matches: Vec<NoqaCode>,
|
||||
pub(crate) matches: Vec<Rule>,
|
||||
}
|
||||
|
||||
impl Ranged for FileNoqaDirectiveLine<'_> {
|
||||
|
|
@ -270,7 +276,7 @@ impl<'a> FileNoqaDirectives<'a> {
|
|||
|
||||
if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code))
|
||||
{
|
||||
Some(rule.noqa_code())
|
||||
Some(rule)
|
||||
} else {
|
||||
#[expect(deprecated)]
|
||||
let line = locator.compute_line_index(range.start());
|
||||
|
|
@ -303,6 +309,10 @@ impl<'a> FileNoqaDirectives<'a> {
|
|||
pub(crate) fn lines(&self) -> &[FileNoqaDirectiveLine] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// Output of lexing a `noqa` directive.
|
||||
|
|
@ -830,7 +840,7 @@ fn build_noqa_edits_by_line<'a>(
|
|||
|
||||
struct NoqaComment<'a> {
|
||||
line: TextSize,
|
||||
code: NoqaCode,
|
||||
code: &'a SecondaryCode,
|
||||
directive: Option<&'a Directive<'a>>,
|
||||
}
|
||||
|
||||
|
|
@ -846,25 +856,15 @@ fn find_noqa_comments<'a>(
|
|||
|
||||
// Mark any non-ignored diagnostics.
|
||||
for message in diagnostics {
|
||||
let Some(code) = message.noqa_code() else {
|
||||
let Some(code) = message.secondary_code() else {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
};
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All(_) => {
|
||||
// If the file is exempted, don't add any noqa directives.
|
||||
if exemption.contains_secondary_code(code) {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
}
|
||||
FileExemption::Codes(codes) => {
|
||||
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
|
||||
if codes.contains(&&code) {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Is the violation ignored by a `noqa` directive on the parent line?
|
||||
if let Some(parent) = message.parent {
|
||||
|
|
@ -921,7 +921,7 @@ fn find_noqa_comments<'a>(
|
|||
|
||||
struct NoqaEdit<'a> {
|
||||
edit_range: TextRange,
|
||||
noqa_codes: FxHashSet<NoqaCode>,
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
codes: Option<&'a Codes<'a>>,
|
||||
line_ending: LineEnding,
|
||||
}
|
||||
|
|
@ -942,13 +942,13 @@ impl NoqaEdit<'_> {
|
|||
writer,
|
||||
self.noqa_codes
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.chain(codes.iter().map(ToString::to_string))
|
||||
.map(|code| code.as_str())
|
||||
.chain(codes.iter().map(Code::as_str))
|
||||
.sorted_unstable(),
|
||||
);
|
||||
}
|
||||
None => {
|
||||
push_codes(writer, self.noqa_codes.iter().map(ToString::to_string));
|
||||
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
|
||||
}
|
||||
}
|
||||
write!(writer, "{}", self.line_ending.as_str()).unwrap();
|
||||
|
|
@ -964,7 +964,7 @@ impl Ranged for NoqaEdit<'_> {
|
|||
fn generate_noqa_edit<'a>(
|
||||
directive: Option<&'a Directive>,
|
||||
offset: TextSize,
|
||||
noqa_codes: FxHashSet<NoqaCode>,
|
||||
noqa_codes: FxHashSet<&'a SecondaryCode>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
) -> Option<NoqaEdit<'a>> {
|
||||
|
|
@ -1017,7 +1017,7 @@ pub(crate) struct NoqaDirectiveLine<'a> {
|
|||
/// The noqa directive.
|
||||
pub(crate) directive: Directive<'a>,
|
||||
/// The codes that are ignored by the directive.
|
||||
pub(crate) matches: Vec<NoqaCode>,
|
||||
pub(crate) matches: Vec<Rule>,
|
||||
/// Whether the directive applies to `range.end`.
|
||||
pub(crate) includes_end: bool,
|
||||
}
|
||||
|
|
@ -1142,6 +1142,10 @@ impl<'a> NoqaDirectives<'a> {
|
|||
pub(crate) fn lines(&self) -> &[NoqaDirectiveLine] {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// Remaps offsets falling into one of the ranges to instead check for a noqa comment on the
|
||||
|
|
|
|||
|
|
@ -774,9 +774,10 @@ mod tests {
|
|||
messages.sort_by_key(Ranged::start);
|
||||
let actual = messages
|
||||
.iter()
|
||||
.filter_map(OldDiagnostic::noqa_code)
|
||||
.filter(|msg| !msg.is_syntax_error())
|
||||
.map(OldDiagnostic::name)
|
||||
.collect::<Vec<_>>();
|
||||
let expected: Vec<_> = expected.iter().map(Rule::noqa_code).collect();
|
||||
let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -237,9 +237,9 @@ Source with applied fixes:
|
|||
|
||||
let messages = messages
|
||||
.into_iter()
|
||||
.filter_map(|msg| Some((msg.noqa_code()?, msg)))
|
||||
.filter_map(|msg| Some((msg.secondary_code()?.to_string(), msg)))
|
||||
.map(|(code, mut diagnostic)| {
|
||||
let rule = Rule::from_code(&code.to_string()).unwrap();
|
||||
let rule = Rule::from_code(&code).unwrap();
|
||||
let fixable = diagnostic.fix().is_some_and(|fix| {
|
||||
matches!(
|
||||
fix.applicability(),
|
||||
|
|
|
|||
|
|
@ -239,7 +239,7 @@ fn to_lsp_diagnostic(
|
|||
let body = diagnostic.body().to_string();
|
||||
let fix = diagnostic.fix();
|
||||
let suggestion = diagnostic.suggestion();
|
||||
let code = diagnostic.noqa_code();
|
||||
let code = diagnostic.secondary_code();
|
||||
|
||||
let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix));
|
||||
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ impl Workspace {
|
|||
let messages: Vec<ExpandedMessage> = diagnostics
|
||||
.into_iter()
|
||||
.map(|msg| ExpandedMessage {
|
||||
code: msg.noqa_code().map(|code| code.to_string()),
|
||||
code: msg.secondary_code().map(ToString::to_string),
|
||||
message: msg.body().to_string(),
|
||||
start_location: source_code.line_column(msg.start()).into(),
|
||||
end_location: source_code.line_column(msg.end()).into(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue