mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-18 17:41:12 +00:00
[internal] Return Message
s from check_path
(#16837)
Summary -- This PR updates `check_path` in the `ruff_linter` crate to return a `Vec<Message>` instead of a `Vec<Diagnostic>`. The main motivation for this is to make it easier to convert semantic syntax errors directly into `Message`s rather than `Diagnostic`s in #16106. However, this also has the benefit of keeping the preview check on unsupported syntax errors in `check_path`, as suggested in https://github.com/astral-sh/ruff/pull/16429#discussion_r1974748024. All of the interesting changes are in the first commit. The second commit just renames variables like `diagnostics` to `messages`, and the third commit is a tiny import fix. I also updated the `ExpandedMessage::location` field name, which caused a few extra commits tidying up the playground code. I thought it was nicely symmetric with `end_location`, but I'm happy to revert that too. Test Plan -- Existing tests. I also tested the playground and server manually.
This commit is contained in:
parent
f2a9960fb3
commit
22de00de16
13 changed files with 280 additions and 294 deletions
|
@ -592,6 +592,7 @@ fn all_lines_fit(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::{anyhow, Result};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
|
@ -604,6 +605,7 @@ mod tests {
|
|||
use crate::fix::edits::{
|
||||
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
||||
};
|
||||
use crate::message::DiagnosticMessage;
|
||||
use crate::Locator;
|
||||
|
||||
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
||||
|
@ -735,14 +737,22 @@ x = 1 \
|
|||
let diag = {
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
let mut iter = edits.into_iter();
|
||||
Diagnostic::new(
|
||||
let diag = Diagnostic::new(
|
||||
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
||||
TextRange::default(),
|
||||
)
|
||||
.with_fix(Fix::safe_edits(
|
||||
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
||||
iter,
|
||||
))
|
||||
));
|
||||
DiagnosticMessage {
|
||||
kind: diag.kind,
|
||||
range: diag.range,
|
||||
fix: diag.fix,
|
||||
parent: diag.parent,
|
||||
file: SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||
noqa_offset: TextSize::default(),
|
||||
}
|
||||
};
|
||||
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
|
||||
Ok(())
|
||||
|
|
|
@ -3,10 +3,11 @@ use std::collections::BTreeSet;
|
|||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_diagnostics::{Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::linter::FixTable;
|
||||
use crate::message::{DiagnosticMessage, Message};
|
||||
use crate::registry::{AsRule, Rule};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::Locator;
|
||||
|
@ -26,16 +27,17 @@ pub(crate) struct FixResult {
|
|||
|
||||
/// Fix errors in a file, and write the fixed source code to disk.
|
||||
pub(crate) fn fix_file(
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
) -> Option<FixResult> {
|
||||
let required_applicability = unsafe_fixes.required_applicability();
|
||||
|
||||
let mut with_fixes = diagnostics
|
||||
let mut with_fixes = messages
|
||||
.iter()
|
||||
.filter(|diagnostic| {
|
||||
diagnostic
|
||||
.filter_map(Message::as_diagnostic_message)
|
||||
.filter(|message| {
|
||||
message
|
||||
.fix
|
||||
.as_ref()
|
||||
.is_some_and(|fix| fix.applies(required_applicability))
|
||||
|
@ -51,7 +53,7 @@ pub(crate) fn fix_file(
|
|||
|
||||
/// Apply a series of fixes.
|
||||
fn apply_fixes<'a>(
|
||||
diagnostics: impl Iterator<Item = &'a Diagnostic>,
|
||||
diagnostics: impl Iterator<Item = &'a DiagnosticMessage>,
|
||||
locator: &'a Locator<'a>,
|
||||
) -> FixResult {
|
||||
let mut output = String::with_capacity(locator.len());
|
||||
|
@ -161,22 +163,30 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
|
||||
use ruff_diagnostics::{Edit, Fix, SourceMarker};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::fix::{apply_fixes, FixResult};
|
||||
use crate::message::DiagnosticMessage;
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
use crate::Locator;
|
||||
|
||||
#[allow(deprecated)]
|
||||
fn create_diagnostics(edit: impl IntoIterator<Item = Edit>) -> Vec<Diagnostic> {
|
||||
fn create_diagnostics(
|
||||
filename: &str,
|
||||
source: &str,
|
||||
edit: impl IntoIterator<Item = Edit>,
|
||||
) -> Vec<DiagnosticMessage> {
|
||||
edit.into_iter()
|
||||
.map(|edit| Diagnostic {
|
||||
.map(|edit| DiagnosticMessage {
|
||||
// The choice of rule here is arbitrary.
|
||||
kind: MissingNewlineAtEndOfFile.into(),
|
||||
range: edit.range(),
|
||||
fix: Some(Fix::safe_edit(edit)),
|
||||
parent: None,
|
||||
file: SourceFileBuilder::new(filename, source).finish(),
|
||||
noqa_offset: TextSize::default(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -184,7 +194,7 @@ mod tests {
|
|||
#[test]
|
||||
fn empty_file() {
|
||||
let locator = Locator::new(r"");
|
||||
let diagnostics = create_diagnostics([]);
|
||||
let diagnostics = create_diagnostics("<filename>", locator.contents(), []);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -205,10 +215,14 @@ print("hello world")
|
|||
"#
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::insertion(
|
||||
"import sys\n".to_string(),
|
||||
TextSize::new(10),
|
||||
)]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::insertion(
|
||||
"import sys\n".to_string(),
|
||||
TextSize::new(10),
|
||||
)],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -243,11 +257,15 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::replacement(
|
||||
"Bar".to_string(),
|
||||
TextSize::new(8),
|
||||
TextSize::new(14),
|
||||
)]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::replacement(
|
||||
"Bar".to_string(),
|
||||
TextSize::new(8),
|
||||
TextSize::new(14),
|
||||
)],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -280,7 +298,11 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::deletion(TextSize::new(7), TextSize::new(15))]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::deletion(TextSize::new(7), TextSize::new(15))],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -313,10 +335,14 @@ class A(object, object, object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([
|
||||
Edit::deletion(TextSize::from(8), TextSize::from(16)),
|
||||
Edit::deletion(TextSize::from(22), TextSize::from(30)),
|
||||
]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[
|
||||
Edit::deletion(TextSize::from(8), TextSize::from(16)),
|
||||
Edit::deletion(TextSize::from(22), TextSize::from(30)),
|
||||
],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -352,10 +378,14 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([
|
||||
Edit::deletion(TextSize::from(7), TextSize::from(15)),
|
||||
Edit::replacement("ignored".to_string(), TextSize::from(9), TextSize::from(11)),
|
||||
]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[
|
||||
Edit::deletion(TextSize::from(7), TextSize::from(15)),
|
||||
Edit::replacement("ignored".to_string(), TextSize::from(9), TextSize::from(11)),
|
||||
],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
|
|
@ -58,8 +58,7 @@ pub struct FixerResult<'a> {
|
|||
pub fixed: FixTable,
|
||||
}
|
||||
|
||||
/// Generate `Diagnostic`s from the source code contents at the
|
||||
/// given `Path`.
|
||||
/// Generate [`Message`]s from the source code contents at the given `Path`.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn check_path(
|
||||
path: &Path,
|
||||
|
@ -74,7 +73,7 @@ pub fn check_path(
|
|||
source_type: PySourceType,
|
||||
parsed: &Parsed<ModModule>,
|
||||
target_version: PythonVersion,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<Message> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
|
@ -322,7 +321,20 @@ pub fn check_path(
|
|||
}
|
||||
}
|
||||
|
||||
diagnostics
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
locator,
|
||||
directives,
|
||||
)
|
||||
}
|
||||
|
||||
const MAX_ITERATIONS: usize = 100;
|
||||
|
@ -357,7 +369,7 @@ pub fn add_noqa_to_path(
|
|||
);
|
||||
|
||||
// Generate diagnostics, ignoring any existing `noqa` directives.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -376,7 +388,7 @@ pub fn add_noqa_to_path(
|
|||
// TODO(dhruvmanila): Add support for Jupyter Notebooks
|
||||
add_noqa(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&locator,
|
||||
indexer.comment_ranges(),
|
||||
&settings.external,
|
||||
|
@ -417,7 +429,7 @@ pub fn lint_only(
|
|||
);
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -432,21 +444,8 @@ pub fn lint_only(
|
|||
target_version,
|
||||
);
|
||||
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
LinterResult {
|
||||
messages: diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
&locator,
|
||||
&directives,
|
||||
),
|
||||
messages,
|
||||
has_syntax_error: parsed.has_syntax_errors(),
|
||||
}
|
||||
}
|
||||
|
@ -532,7 +531,7 @@ pub fn lint_fix<'a>(
|
|||
);
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -571,7 +570,7 @@ pub fn lint_fix<'a>(
|
|||
code: fixed_contents,
|
||||
fixes: applied,
|
||||
source_map,
|
||||
}) = fix_file(&diagnostics, &locator, unsafe_fixes)
|
||||
}) = fix_file(&messages, &locator, unsafe_fixes)
|
||||
{
|
||||
if iterations < MAX_ITERATIONS {
|
||||
// Count the number of fixed errors.
|
||||
|
@ -588,25 +587,12 @@ pub fn lint_fix<'a>(
|
|||
continue;
|
||||
}
|
||||
|
||||
report_failed_to_converge_error(path, transformed.source_code(), &diagnostics);
|
||||
report_failed_to_converge_error(path, transformed.source_code(), &messages);
|
||||
}
|
||||
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
return Ok(FixerResult {
|
||||
result: LinterResult {
|
||||
messages: diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
&locator,
|
||||
&directives,
|
||||
),
|
||||
messages,
|
||||
has_syntax_error: !is_valid_syntax,
|
||||
},
|
||||
transformed,
|
||||
|
@ -625,8 +611,8 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
|
|||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[Diagnostic]) {
|
||||
let codes = collect_rule_codes(diagnostics.iter().map(|diagnostic| diagnostic.kind.rule()));
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
|
||||
let codes = collect_rule_codes(messages.iter().filter_map(Message::rule));
|
||||
if cfg!(debug_assertions) {
|
||||
eprintln!(
|
||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||
|
|
|
@ -41,24 +41,25 @@ mod text;
|
|||
|
||||
/// Message represents either a diagnostic message corresponding to a rule violation or a syntax
|
||||
/// error message raised by the parser.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Message {
|
||||
Diagnostic(DiagnosticMessage),
|
||||
SyntaxError(SyntaxErrorMessage),
|
||||
}
|
||||
|
||||
/// A diagnostic message corresponding to a rule violation.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct DiagnosticMessage {
|
||||
pub kind: DiagnosticKind,
|
||||
pub range: TextRange,
|
||||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
pub file: SourceFile,
|
||||
pub noqa_offset: TextSize,
|
||||
}
|
||||
|
||||
/// A syntax error message raised by the parser.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct SyntaxErrorMessage {
|
||||
pub message: String,
|
||||
pub range: TextRange,
|
||||
|
@ -91,6 +92,7 @@ impl Message {
|
|||
range: diagnostic.range(),
|
||||
kind: diagnostic.kind,
|
||||
fix: diagnostic.fix,
|
||||
parent: diagnostic.parent,
|
||||
file,
|
||||
noqa_offset,
|
||||
})
|
||||
|
@ -140,6 +142,18 @@ impl Message {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn into_diagnostic_message(self) -> Option<DiagnosticMessage> {
|
||||
match self {
|
||||
Message::Diagnostic(m) => Some(m),
|
||||
Message::SyntaxError(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a diagnostic message.
|
||||
pub const fn is_diagnostic_message(&self) -> bool {
|
||||
matches!(self, Message::Diagnostic(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub const fn is_syntax_error(&self) -> bool {
|
||||
matches!(self, Message::SyntaxError(_))
|
||||
|
|
|
@ -9,25 +9,26 @@ use anyhow::Result;
|
|||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit};
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_trivia::{indentation_at_offset, CommentRanges, Cursor};
|
||||
use ruff_source_file::{LineEnding, LineRanges};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::Message;
|
||||
use crate::registry::{AsRule, Rule, RuleSet};
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
use crate::Locator;
|
||||
|
||||
/// Generates an array of edits that matches the length of `diagnostics`.
|
||||
/// Generates an array of edits that matches the length of `messages`.
|
||||
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
|
||||
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
|
||||
/// the diagnostic. These edits may conflict with each other and should not be applied
|
||||
/// simultaneously.
|
||||
pub fn generate_noqa_edits(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -37,7 +38,7 @@ pub fn generate_noqa_edits(
|
|||
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
|
||||
let exemption = FileExemption::from(&file_directives);
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(messages, locator, &exemption, &directives, noqa_line_for);
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
|
||||
}
|
||||
|
||||
|
@ -699,10 +700,10 @@ impl Display for LexicalError {
|
|||
|
||||
impl Error for LexicalError {}
|
||||
|
||||
/// Adds noqa comments to suppress all diagnostics of a file.
|
||||
/// Adds noqa comments to suppress all messages of a file.
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -711,7 +712,7 @@ pub(crate) fn add_noqa(
|
|||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
diagnostics,
|
||||
messages,
|
||||
locator,
|
||||
comment_ranges,
|
||||
external,
|
||||
|
@ -725,7 +726,7 @@ pub(crate) fn add_noqa(
|
|||
|
||||
fn add_noqa_inner(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -740,7 +741,7 @@ fn add_noqa_inner(
|
|||
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(messages, locator, &exemption, &directives, noqa_line_for);
|
||||
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
|
||||
|
||||
|
@ -776,7 +777,7 @@ fn build_noqa_edits_by_diagnostic(
|
|||
if let Some(noqa_edit) = generate_noqa_edit(
|
||||
comment.directive,
|
||||
comment.line,
|
||||
RuleSet::from_rule(comment.diagnostic.kind.rule()),
|
||||
RuleSet::from_rule(comment.rule),
|
||||
locator,
|
||||
line_ending,
|
||||
) {
|
||||
|
@ -812,7 +813,7 @@ fn build_noqa_edits_by_line<'a>(
|
|||
offset,
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|NoqaComment { diagnostic, .. }| diagnostic.kind.rule())
|
||||
.map(|NoqaComment { rule, .. }| rule)
|
||||
.collect(),
|
||||
locator,
|
||||
line_ending,
|
||||
|
@ -825,22 +826,27 @@ fn build_noqa_edits_by_line<'a>(
|
|||
|
||||
struct NoqaComment<'a> {
|
||||
line: TextSize,
|
||||
diagnostic: &'a Diagnostic,
|
||||
rule: Rule,
|
||||
directive: Option<&'a Directive<'a>>,
|
||||
}
|
||||
|
||||
fn find_noqa_comments<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
messages: &'a [Message],
|
||||
locator: &'a Locator,
|
||||
exemption: &'a FileExemption,
|
||||
directives: &'a NoqaDirectives,
|
||||
noqa_line_for: &NoqaMapping,
|
||||
) -> Vec<Option<NoqaComment<'a>>> {
|
||||
// List of noqa comments, ordered to match up with `diagnostics`
|
||||
// List of noqa comments, ordered to match up with `messages`
|
||||
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
|
||||
|
||||
// Mark any non-ignored diagnostics.
|
||||
for diagnostic in diagnostics {
|
||||
for message in messages {
|
||||
let Message::Diagnostic(diagnostic) = message else {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
};
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All(_) => {
|
||||
// If the file is exempted, don't add any noqa directives.
|
||||
|
@ -876,7 +882,9 @@ fn find_noqa_comments<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
let noqa_offset = noqa_line_for.resolve(diagnostic.start());
|
||||
let noqa_offset = noqa_line_for.resolve(diagnostic.range.start());
|
||||
|
||||
let rule = diagnostic.kind.rule();
|
||||
|
||||
// Or ignored by the directive itself?
|
||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||
|
@ -886,11 +894,10 @@ fn find_noqa_comments<'a>(
|
|||
continue;
|
||||
}
|
||||
directive @ Directive::Codes(codes) => {
|
||||
let rule = diagnostic.kind.rule();
|
||||
if !codes.includes(rule) {
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: directive_line.start(),
|
||||
diagnostic,
|
||||
rule,
|
||||
directive: Some(directive),
|
||||
}));
|
||||
}
|
||||
|
@ -902,7 +909,7 @@ fn find_noqa_comments<'a>(
|
|||
// There's no existing noqa directive that suppresses the diagnostic.
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: locator.line_start(noqa_offset),
|
||||
diagnostic,
|
||||
rule,
|
||||
directive: None,
|
||||
}));
|
||||
}
|
||||
|
@ -1209,9 +1216,10 @@ mod tests {
|
|||
|
||||
use ruff_diagnostics::{Diagnostic, Edit};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::LineEnding;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::message::Message;
|
||||
use crate::noqa::{
|
||||
add_noqa_inner, lex_codes, lex_file_exemption, lex_inline_noqa, Directive, LexicalError,
|
||||
NoqaLexerOutput, NoqaMapping,
|
||||
|
@ -1236,6 +1244,17 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a [`Message`] with a placeholder filename and rule code from `diagnostic`.
|
||||
fn message_from_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
path: impl AsRef<Path>,
|
||||
source: &str,
|
||||
) -> Message {
|
||||
let noqa_offset = diagnostic.start();
|
||||
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
|
||||
Message::from_diagnostic(diagnostic, file, noqa_offset)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_lex_codes() {
|
||||
let source = " F401,,F402F403 # and so on";
|
||||
|
@ -2816,18 +2835,19 @@ mod tests {
|
|||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
|
||||
let contents = "x = 1";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&CommentRanges::default(),
|
||||
&[],
|
||||
|
@ -2837,7 +2857,7 @@ mod tests {
|
|||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
|
||||
let diagnostics = [
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
|
@ -2848,14 +2868,15 @@ mod tests {
|
|||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
];
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
let contents = "x = 1 # noqa: E741\n";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
CommentRanges::new(vec![TextRange::new(TextSize::from(7), TextSize::from(19))]);
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2865,7 +2886,7 @@ mod tests {
|
|||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
|
||||
let diagnostics = [
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
|
@ -2876,14 +2897,15 @@ mod tests {
|
|||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
];
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
let contents = "x = 1 # noqa";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
CommentRanges::new(vec![TextRange::new(TextSize::from(7), TextSize::from(13))]);
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2907,14 +2929,15 @@ print(
|
|||
)
|
||||
"#;
|
||||
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
PrintfStringFormatting,
|
||||
TextRange::new(12.into(), 79.into()),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(source),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2938,15 +2961,16 @@ print(
|
|||
foo;
|
||||
bar =
|
||||
";
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
UselessSemicolon,
|
||||
TextRange::new(4.into(), 5.into()),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(source),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
|
|
@ -22,6 +22,7 @@ mod tests {
|
|||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::linter::check_path;
|
||||
use crate::message::Message;
|
||||
use crate::registry::{AsRule, Linter, Rule};
|
||||
use crate::rules::isort;
|
||||
use crate::rules::pyflakes;
|
||||
|
@ -757,7 +758,7 @@ mod tests {
|
|||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let mut diagnostics = check_path(
|
||||
let mut messages = check_path(
|
||||
Path::new("<filename>"),
|
||||
None,
|
||||
&locator,
|
||||
|
@ -771,9 +772,10 @@ mod tests {
|
|||
&parsed,
|
||||
settings.unresolved_target_version,
|
||||
);
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
let actual = diagnostics
|
||||
messages.sort_by_key(Ranged::start);
|
||||
let actual = messages
|
||||
.iter()
|
||||
.filter_map(Message::as_diagnostic_message)
|
||||
.map(|diagnostic| diagnostic.kind.rule())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
|
|
|
@ -9,7 +9,7 @@ use anyhow::Result;
|
|||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, FixAvailability};
|
||||
use ruff_diagnostics::{Applicability, FixAvailability};
|
||||
use ruff_notebook::Notebook;
|
||||
#[cfg(not(fuzzing))]
|
||||
use ruff_notebook::NotebookError;
|
||||
|
@ -19,7 +19,6 @@ use ruff_python_index::Indexer;
|
|||
use ruff_python_parser::{ParseError, ParseOptions};
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::fix::{fix_file, FixResult};
|
||||
use crate::linter::check_path;
|
||||
|
@ -124,7 +123,7 @@ pub(crate) fn test_contents<'a>(
|
|||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
path.parent()
|
||||
.and_then(|parent| detect_package_root(parent, &settings.namespace_packages))
|
||||
|
@ -148,25 +147,22 @@ pub(crate) fn test_contents<'a>(
|
|||
|
||||
let mut transformed = Cow::Borrowed(source_kind);
|
||||
|
||||
if diagnostics
|
||||
.iter()
|
||||
.any(|diagnostic| diagnostic.fix.is_some())
|
||||
{
|
||||
let mut diagnostics = diagnostics.clone();
|
||||
if messages.iter().any(|message| message.fix().is_some()) {
|
||||
let mut messages = messages.clone();
|
||||
|
||||
while let Some(FixResult {
|
||||
code: fixed_contents,
|
||||
source_map,
|
||||
..
|
||||
}) = fix_file(
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(transformed.source_code()),
|
||||
UnsafeFixes::Enabled,
|
||||
) {
|
||||
if iterations < max_iterations() {
|
||||
iterations += 1;
|
||||
} else {
|
||||
let output = print_diagnostics(diagnostics, path, &transformed);
|
||||
let output = print_diagnostics(messages, path, &transformed);
|
||||
|
||||
panic!(
|
||||
"Failed to converge after {} iterations. This likely \
|
||||
|
@ -192,7 +188,7 @@ pub(crate) fn test_contents<'a>(
|
|||
&indexer,
|
||||
);
|
||||
|
||||
let fixed_diagnostics = check_path(
|
||||
let fixed_messages = check_path(
|
||||
path,
|
||||
None,
|
||||
&locator,
|
||||
|
@ -209,7 +205,7 @@ pub(crate) fn test_contents<'a>(
|
|||
|
||||
if parsed.has_invalid_syntax() && !source_has_errors {
|
||||
// Previous fix introduced a syntax error, abort
|
||||
let fixes = print_diagnostics(diagnostics, path, source_kind);
|
||||
let fixes = print_diagnostics(messages, path, source_kind);
|
||||
let syntax_errors =
|
||||
print_syntax_errors(parsed.errors(), path, &locator, &transformed);
|
||||
|
||||
|
@ -224,7 +220,7 @@ Source with applied fixes:
|
|||
);
|
||||
}
|
||||
|
||||
diagnostics = fixed_diagnostics;
|
||||
messages = fixed_messages;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,9 +230,10 @@ Source with applied fixes:
|
|||
)
|
||||
.finish();
|
||||
|
||||
let messages = diagnostics
|
||||
let messages = messages
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
.filter_map(Message::into_diagnostic_message)
|
||||
.map(|mut diagnostic| {
|
||||
let rule = diagnostic.kind.rule();
|
||||
let fixable = diagnostic.fix.as_ref().is_some_and(|fix| {
|
||||
matches!(
|
||||
|
@ -277,9 +274,10 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
|
|||
);
|
||||
|
||||
// Not strictly necessary but adds some coverage for this code path
|
||||
let noqa = directives.noqa_line_for.resolve(diagnostic.start());
|
||||
diagnostic.noqa_offset = directives.noqa_line_for.resolve(diagnostic.range.start());
|
||||
diagnostic.file = source_code.clone();
|
||||
|
||||
Message::from_diagnostic(diagnostic, source_code.clone(), noqa)
|
||||
Message::Diagnostic(diagnostic)
|
||||
})
|
||||
.chain(parsed.errors().iter().map(|parse_error| {
|
||||
Message::from_parse_error(parse_error, &locator, source_code.clone())
|
||||
|
@ -310,18 +308,9 @@ fn print_syntax_errors(
|
|||
}
|
||||
}
|
||||
|
||||
fn print_diagnostics(diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceKind) -> String {
|
||||
let filename = path.file_name().unwrap().to_string_lossy();
|
||||
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();
|
||||
|
||||
let messages: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
let noqa_start = diagnostic.start();
|
||||
|
||||
Message::from_diagnostic(diagnostic, source_file.clone(), noqa_start)
|
||||
})
|
||||
.collect();
|
||||
/// Print the [`Message::Diagnostic`]s in `messages`.
|
||||
fn print_diagnostics(mut messages: Vec<Message>, path: &Path, source: &SourceKind) -> String {
|
||||
messages.retain(Message::is_diagnostic_message);
|
||||
|
||||
if let Some(notebook) = source.as_ipy_notebook() {
|
||||
print_jupyter_messages(&messages, path, notebook)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue