mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-02 09:52:18 +00:00
[internal] Return Message
s from check_path
(#16837)
Summary -- This PR updates `check_path` in the `ruff_linter` crate to return a `Vec<Message>` instead of a `Vec<Diagnostic>`. The main motivation for this is to make it easier to convert semantic syntax errors directly into `Message`s rather than `Diagnostic`s in #16106. However, this also has the benefit of keeping the preview check on unsupported syntax errors in `check_path`, as suggested in https://github.com/astral-sh/ruff/pull/16429#discussion_r1974748024. All of the interesting changes are in the first commit. The second commit just renames variables like `diagnostics` to `messages`, and the third commit is a tiny import fix. I also updated the `ExpandedMessage::location` field name, which caused a few extra commits tidying up the playground code. I thought it was nicely symmetric with `end_location`, but I'm happy to revert that too. Test Plan -- Existing tests. I also tested the playground and server manually.
This commit is contained in:
parent
f2a9960fb3
commit
22de00de16
13 changed files with 280 additions and 294 deletions
|
@ -353,6 +353,7 @@ impl FileCache {
|
|||
fix: msg.fix.clone(),
|
||||
file: file.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
parent: msg.parent,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
|
@ -445,6 +446,7 @@ impl LintCacheData {
|
|||
CacheMessage {
|
||||
kind: msg.kind.clone(),
|
||||
range: msg.range,
|
||||
parent: msg.parent,
|
||||
fix: msg.fix.clone(),
|
||||
noqa_offset: msg.noqa_offset,
|
||||
}
|
||||
|
@ -465,6 +467,7 @@ pub(super) struct CacheMessage {
|
|||
kind: DiagnosticKind,
|
||||
/// Range into the message's [`FileCache::source`].
|
||||
range: TextRange,
|
||||
parent: Option<TextSize>,
|
||||
fix: Option<Fix>,
|
||||
noqa_offset: TextSize,
|
||||
}
|
||||
|
@ -702,7 +705,10 @@ mod tests {
|
|||
.unwrap();
|
||||
}
|
||||
|
||||
assert_eq!(expected_diagnostics, got_diagnostics);
|
||||
assert_eq!(
|
||||
expected_diagnostics, got_diagnostics,
|
||||
"left == {expected_diagnostics:#?}, right == {got_diagnostics:#?}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -592,6 +592,7 @@ fn all_lines_fit(
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::{anyhow, Result};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
|
@ -604,6 +605,7 @@ mod tests {
|
|||
use crate::fix::edits::{
|
||||
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
||||
};
|
||||
use crate::message::DiagnosticMessage;
|
||||
use crate::Locator;
|
||||
|
||||
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
||||
|
@ -735,14 +737,22 @@ x = 1 \
|
|||
let diag = {
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
let mut iter = edits.into_iter();
|
||||
Diagnostic::new(
|
||||
let diag = Diagnostic::new(
|
||||
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
||||
TextRange::default(),
|
||||
)
|
||||
.with_fix(Fix::safe_edits(
|
||||
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
||||
iter,
|
||||
))
|
||||
));
|
||||
DiagnosticMessage {
|
||||
kind: diag.kind,
|
||||
range: diag.range,
|
||||
fix: diag.fix,
|
||||
parent: diag.parent,
|
||||
file: SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||
noqa_offset: TextSize::default(),
|
||||
}
|
||||
};
|
||||
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
|
||||
Ok(())
|
||||
|
|
|
@ -3,10 +3,11 @@ use std::collections::BTreeSet;
|
|||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_diagnostics::{Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::linter::FixTable;
|
||||
use crate::message::{DiagnosticMessage, Message};
|
||||
use crate::registry::{AsRule, Rule};
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::Locator;
|
||||
|
@ -26,16 +27,17 @@ pub(crate) struct FixResult {
|
|||
|
||||
/// Fix errors in a file, and write the fixed source code to disk.
|
||||
pub(crate) fn fix_file(
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
unsafe_fixes: UnsafeFixes,
|
||||
) -> Option<FixResult> {
|
||||
let required_applicability = unsafe_fixes.required_applicability();
|
||||
|
||||
let mut with_fixes = diagnostics
|
||||
let mut with_fixes = messages
|
||||
.iter()
|
||||
.filter(|diagnostic| {
|
||||
diagnostic
|
||||
.filter_map(Message::as_diagnostic_message)
|
||||
.filter(|message| {
|
||||
message
|
||||
.fix
|
||||
.as_ref()
|
||||
.is_some_and(|fix| fix.applies(required_applicability))
|
||||
|
@ -51,7 +53,7 @@ pub(crate) fn fix_file(
|
|||
|
||||
/// Apply a series of fixes.
|
||||
fn apply_fixes<'a>(
|
||||
diagnostics: impl Iterator<Item = &'a Diagnostic>,
|
||||
diagnostics: impl Iterator<Item = &'a DiagnosticMessage>,
|
||||
locator: &'a Locator<'a>,
|
||||
) -> FixResult {
|
||||
let mut output = String::with_capacity(locator.len());
|
||||
|
@ -161,22 +163,30 @@ fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Orderi
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
|
||||
use ruff_diagnostics::{Edit, Fix, SourceMarker};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::fix::{apply_fixes, FixResult};
|
||||
use crate::message::DiagnosticMessage;
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
use crate::Locator;
|
||||
|
||||
#[allow(deprecated)]
|
||||
fn create_diagnostics(edit: impl IntoIterator<Item = Edit>) -> Vec<Diagnostic> {
|
||||
fn create_diagnostics(
|
||||
filename: &str,
|
||||
source: &str,
|
||||
edit: impl IntoIterator<Item = Edit>,
|
||||
) -> Vec<DiagnosticMessage> {
|
||||
edit.into_iter()
|
||||
.map(|edit| Diagnostic {
|
||||
.map(|edit| DiagnosticMessage {
|
||||
// The choice of rule here is arbitrary.
|
||||
kind: MissingNewlineAtEndOfFile.into(),
|
||||
range: edit.range(),
|
||||
fix: Some(Fix::safe_edit(edit)),
|
||||
parent: None,
|
||||
file: SourceFileBuilder::new(filename, source).finish(),
|
||||
noqa_offset: TextSize::default(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -184,7 +194,7 @@ mod tests {
|
|||
#[test]
|
||||
fn empty_file() {
|
||||
let locator = Locator::new(r"");
|
||||
let diagnostics = create_diagnostics([]);
|
||||
let diagnostics = create_diagnostics("<filename>", locator.contents(), []);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -205,10 +215,14 @@ print("hello world")
|
|||
"#
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::insertion(
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::insertion(
|
||||
"import sys\n".to_string(),
|
||||
TextSize::new(10),
|
||||
)]);
|
||||
)],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -243,11 +257,15 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::replacement(
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::replacement(
|
||||
"Bar".to_string(),
|
||||
TextSize::new(8),
|
||||
TextSize::new(14),
|
||||
)]);
|
||||
)],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -280,7 +298,11 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([Edit::deletion(TextSize::new(7), TextSize::new(15))]);
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[Edit::deletion(TextSize::new(7), TextSize::new(15))],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -313,10 +335,14 @@ class A(object, object, object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[
|
||||
Edit::deletion(TextSize::from(8), TextSize::from(16)),
|
||||
Edit::deletion(TextSize::from(22), TextSize::from(30)),
|
||||
]);
|
||||
],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
@ -352,10 +378,14 @@ class A(object):
|
|||
"
|
||||
.trim(),
|
||||
);
|
||||
let diagnostics = create_diagnostics([
|
||||
let diagnostics = create_diagnostics(
|
||||
"<filename>",
|
||||
locator.contents(),
|
||||
[
|
||||
Edit::deletion(TextSize::from(7), TextSize::from(15)),
|
||||
Edit::replacement("ignored".to_string(), TextSize::from(9), TextSize::from(11)),
|
||||
]);
|
||||
],
|
||||
);
|
||||
let FixResult {
|
||||
code,
|
||||
fixes,
|
||||
|
|
|
@ -58,8 +58,7 @@ pub struct FixerResult<'a> {
|
|||
pub fixed: FixTable,
|
||||
}
|
||||
|
||||
/// Generate `Diagnostic`s from the source code contents at the
|
||||
/// given `Path`.
|
||||
/// Generate [`Message`]s from the source code contents at the given `Path`.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn check_path(
|
||||
path: &Path,
|
||||
|
@ -74,7 +73,7 @@ pub fn check_path(
|
|||
source_type: PySourceType,
|
||||
parsed: &Parsed<ModModule>,
|
||||
target_version: PythonVersion,
|
||||
) -> Vec<Diagnostic> {
|
||||
) -> Vec<Message> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
|
@ -322,7 +321,20 @@ pub fn check_path(
|
|||
}
|
||||
}
|
||||
|
||||
diagnostics
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
locator,
|
||||
directives,
|
||||
)
|
||||
}
|
||||
|
||||
const MAX_ITERATIONS: usize = 100;
|
||||
|
@ -357,7 +369,7 @@ pub fn add_noqa_to_path(
|
|||
);
|
||||
|
||||
// Generate diagnostics, ignoring any existing `noqa` directives.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -376,7 +388,7 @@ pub fn add_noqa_to_path(
|
|||
// TODO(dhruvmanila): Add support for Jupyter Notebooks
|
||||
add_noqa(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&locator,
|
||||
indexer.comment_ranges(),
|
||||
&settings.external,
|
||||
|
@ -417,7 +429,7 @@ pub fn lint_only(
|
|||
);
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -432,21 +444,8 @@ pub fn lint_only(
|
|||
target_version,
|
||||
);
|
||||
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
LinterResult {
|
||||
messages: diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
&locator,
|
||||
&directives,
|
||||
),
|
||||
messages,
|
||||
has_syntax_error: parsed.has_syntax_errors(),
|
||||
}
|
||||
}
|
||||
|
@ -532,7 +531,7 @@ pub fn lint_fix<'a>(
|
|||
);
|
||||
|
||||
// Generate diagnostics.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
package,
|
||||
&locator,
|
||||
|
@ -571,7 +570,7 @@ pub fn lint_fix<'a>(
|
|||
code: fixed_contents,
|
||||
fixes: applied,
|
||||
source_map,
|
||||
}) = fix_file(&diagnostics, &locator, unsafe_fixes)
|
||||
}) = fix_file(&messages, &locator, unsafe_fixes)
|
||||
{
|
||||
if iterations < MAX_ITERATIONS {
|
||||
// Count the number of fixed errors.
|
||||
|
@ -588,25 +587,12 @@ pub fn lint_fix<'a>(
|
|||
continue;
|
||||
}
|
||||
|
||||
report_failed_to_converge_error(path, transformed.source_code(), &diagnostics);
|
||||
report_failed_to_converge_error(path, transformed.source_code(), &messages);
|
||||
}
|
||||
|
||||
let syntax_errors = if settings.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
return Ok(FixerResult {
|
||||
result: LinterResult {
|
||||
messages: diagnostics_to_messages(
|
||||
diagnostics,
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
path,
|
||||
&locator,
|
||||
&directives,
|
||||
),
|
||||
messages,
|
||||
has_syntax_error: !is_valid_syntax,
|
||||
},
|
||||
transformed,
|
||||
|
@ -625,8 +611,8 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
|
|||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[Diagnostic]) {
|
||||
let codes = collect_rule_codes(diagnostics.iter().map(|diagnostic| diagnostic.kind.rule()));
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
|
||||
let codes = collect_rule_codes(messages.iter().filter_map(Message::rule));
|
||||
if cfg!(debug_assertions) {
|
||||
eprintln!(
|
||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||
|
|
|
@ -41,24 +41,25 @@ mod text;
|
|||
|
||||
/// Message represents either a diagnostic message corresponding to a rule violation or a syntax
|
||||
/// error message raised by the parser.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Message {
|
||||
Diagnostic(DiagnosticMessage),
|
||||
SyntaxError(SyntaxErrorMessage),
|
||||
}
|
||||
|
||||
/// A diagnostic message corresponding to a rule violation.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct DiagnosticMessage {
|
||||
pub kind: DiagnosticKind,
|
||||
pub range: TextRange,
|
||||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
pub file: SourceFile,
|
||||
pub noqa_offset: TextSize,
|
||||
}
|
||||
|
||||
/// A syntax error message raised by the parser.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct SyntaxErrorMessage {
|
||||
pub message: String,
|
||||
pub range: TextRange,
|
||||
|
@ -91,6 +92,7 @@ impl Message {
|
|||
range: diagnostic.range(),
|
||||
kind: diagnostic.kind,
|
||||
fix: diagnostic.fix,
|
||||
parent: diagnostic.parent,
|
||||
file,
|
||||
noqa_offset,
|
||||
})
|
||||
|
@ -140,6 +142,18 @@ impl Message {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn into_diagnostic_message(self) -> Option<DiagnosticMessage> {
|
||||
match self {
|
||||
Message::Diagnostic(m) => Some(m),
|
||||
Message::SyntaxError(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a diagnostic message.
|
||||
pub const fn is_diagnostic_message(&self) -> bool {
|
||||
matches!(self, Message::Diagnostic(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is a syntax error message.
|
||||
pub const fn is_syntax_error(&self) -> bool {
|
||||
matches!(self, Message::SyntaxError(_))
|
||||
|
|
|
@ -9,25 +9,26 @@ use anyhow::Result;
|
|||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit};
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_trivia::{indentation_at_offset, CommentRanges, Cursor};
|
||||
use ruff_source_file::{LineEnding, LineRanges};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::Message;
|
||||
use crate::registry::{AsRule, Rule, RuleSet};
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
use crate::Locator;
|
||||
|
||||
/// Generates an array of edits that matches the length of `diagnostics`.
|
||||
/// Generates an array of edits that matches the length of `messages`.
|
||||
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
|
||||
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
|
||||
/// the diagnostic. These edits may conflict with each other and should not be applied
|
||||
/// simultaneously.
|
||||
pub fn generate_noqa_edits(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -37,7 +38,7 @@ pub fn generate_noqa_edits(
|
|||
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
|
||||
let exemption = FileExemption::from(&file_directives);
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(messages, locator, &exemption, &directives, noqa_line_for);
|
||||
build_noqa_edits_by_diagnostic(comments, locator, line_ending)
|
||||
}
|
||||
|
||||
|
@ -699,10 +700,10 @@ impl Display for LexicalError {
|
|||
|
||||
impl Error for LexicalError {}
|
||||
|
||||
/// Adds noqa comments to suppress all diagnostics of a file.
|
||||
/// Adds noqa comments to suppress all messages of a file.
|
||||
pub(crate) fn add_noqa(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -711,7 +712,7 @@ pub(crate) fn add_noqa(
|
|||
) -> Result<usize> {
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
diagnostics,
|
||||
messages,
|
||||
locator,
|
||||
comment_ranges,
|
||||
external,
|
||||
|
@ -725,7 +726,7 @@ pub(crate) fn add_noqa(
|
|||
|
||||
fn add_noqa_inner(
|
||||
path: &Path,
|
||||
diagnostics: &[Diagnostic],
|
||||
messages: &[Message],
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
external: &[String],
|
||||
|
@ -740,7 +741,7 @@ fn add_noqa_inner(
|
|||
|
||||
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
|
||||
|
||||
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
|
||||
let comments = find_noqa_comments(messages, locator, &exemption, &directives, noqa_line_for);
|
||||
|
||||
let edits = build_noqa_edits_by_line(comments, locator, line_ending);
|
||||
|
||||
|
@ -776,7 +777,7 @@ fn build_noqa_edits_by_diagnostic(
|
|||
if let Some(noqa_edit) = generate_noqa_edit(
|
||||
comment.directive,
|
||||
comment.line,
|
||||
RuleSet::from_rule(comment.diagnostic.kind.rule()),
|
||||
RuleSet::from_rule(comment.rule),
|
||||
locator,
|
||||
line_ending,
|
||||
) {
|
||||
|
@ -812,7 +813,7 @@ fn build_noqa_edits_by_line<'a>(
|
|||
offset,
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|NoqaComment { diagnostic, .. }| diagnostic.kind.rule())
|
||||
.map(|NoqaComment { rule, .. }| rule)
|
||||
.collect(),
|
||||
locator,
|
||||
line_ending,
|
||||
|
@ -825,22 +826,27 @@ fn build_noqa_edits_by_line<'a>(
|
|||
|
||||
struct NoqaComment<'a> {
|
||||
line: TextSize,
|
||||
diagnostic: &'a Diagnostic,
|
||||
rule: Rule,
|
||||
directive: Option<&'a Directive<'a>>,
|
||||
}
|
||||
|
||||
fn find_noqa_comments<'a>(
|
||||
diagnostics: &'a [Diagnostic],
|
||||
messages: &'a [Message],
|
||||
locator: &'a Locator,
|
||||
exemption: &'a FileExemption,
|
||||
directives: &'a NoqaDirectives,
|
||||
noqa_line_for: &NoqaMapping,
|
||||
) -> Vec<Option<NoqaComment<'a>>> {
|
||||
// List of noqa comments, ordered to match up with `diagnostics`
|
||||
// List of noqa comments, ordered to match up with `messages`
|
||||
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
|
||||
|
||||
// Mark any non-ignored diagnostics.
|
||||
for diagnostic in diagnostics {
|
||||
for message in messages {
|
||||
let Message::Diagnostic(diagnostic) = message else {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
};
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All(_) => {
|
||||
// If the file is exempted, don't add any noqa directives.
|
||||
|
@ -876,7 +882,9 @@ fn find_noqa_comments<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
let noqa_offset = noqa_line_for.resolve(diagnostic.start());
|
||||
let noqa_offset = noqa_line_for.resolve(diagnostic.range.start());
|
||||
|
||||
let rule = diagnostic.kind.rule();
|
||||
|
||||
// Or ignored by the directive itself?
|
||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||
|
@ -886,11 +894,10 @@ fn find_noqa_comments<'a>(
|
|||
continue;
|
||||
}
|
||||
directive @ Directive::Codes(codes) => {
|
||||
let rule = diagnostic.kind.rule();
|
||||
if !codes.includes(rule) {
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: directive_line.start(),
|
||||
diagnostic,
|
||||
rule,
|
||||
directive: Some(directive),
|
||||
}));
|
||||
}
|
||||
|
@ -902,7 +909,7 @@ fn find_noqa_comments<'a>(
|
|||
// There's no existing noqa directive that suppresses the diagnostic.
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: locator.line_start(noqa_offset),
|
||||
diagnostic,
|
||||
rule,
|
||||
directive: None,
|
||||
}));
|
||||
}
|
||||
|
@ -1209,9 +1216,10 @@ mod tests {
|
|||
|
||||
use ruff_diagnostics::{Diagnostic, Edit};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::LineEnding;
|
||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::message::Message;
|
||||
use crate::noqa::{
|
||||
add_noqa_inner, lex_codes, lex_file_exemption, lex_inline_noqa, Directive, LexicalError,
|
||||
NoqaLexerOutput, NoqaMapping,
|
||||
|
@ -1236,6 +1244,17 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a [`Message`] with a placeholder filename and rule code from `diagnostic`.
|
||||
fn message_from_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
path: impl AsRef<Path>,
|
||||
source: &str,
|
||||
) -> Message {
|
||||
let noqa_offset = diagnostic.start();
|
||||
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
|
||||
Message::from_diagnostic(diagnostic, file, noqa_offset)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn noqa_lex_codes() {
|
||||
let source = " F401,,F402F403 # and so on";
|
||||
|
@ -2816,18 +2835,19 @@ mod tests {
|
|||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
|
||||
let contents = "x = 1";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&CommentRanges::default(),
|
||||
&[],
|
||||
|
@ -2837,7 +2857,7 @@ mod tests {
|
|||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
|
||||
let diagnostics = [
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
|
@ -2848,14 +2868,15 @@ mod tests {
|
|||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
];
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
let contents = "x = 1 # noqa: E741\n";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
CommentRanges::new(vec![TextRange::new(TextSize::from(7), TextSize::from(19))]);
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2865,7 +2886,7 @@ mod tests {
|
|||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
|
||||
let diagnostics = [
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
|
@ -2876,14 +2897,15 @@ mod tests {
|
|||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
),
|
||||
];
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
let contents = "x = 1 # noqa";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
CommentRanges::new(vec![TextRange::new(TextSize::from(7), TextSize::from(13))]);
|
||||
let (count, output) = add_noqa_inner(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(contents),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2907,14 +2929,15 @@ print(
|
|||
)
|
||||
"#;
|
||||
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
PrintfStringFormatting,
|
||||
TextRange::new(12.into(), 79.into()),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(source),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
@ -2938,15 +2961,16 @@ print(
|
|||
foo;
|
||||
bar =
|
||||
";
|
||||
let diagnostics = [Diagnostic::new(
|
||||
let messages = [Diagnostic::new(
|
||||
UselessSemicolon,
|
||||
TextRange::new(4.into(), 5.into()),
|
||||
)];
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(source),
|
||||
&comment_ranges,
|
||||
&[],
|
||||
|
|
|
@ -22,6 +22,7 @@ mod tests {
|
|||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::linter::check_path;
|
||||
use crate::message::Message;
|
||||
use crate::registry::{AsRule, Linter, Rule};
|
||||
use crate::rules::isort;
|
||||
use crate::rules::pyflakes;
|
||||
|
@ -757,7 +758,7 @@ mod tests {
|
|||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let mut diagnostics = check_path(
|
||||
let mut messages = check_path(
|
||||
Path::new("<filename>"),
|
||||
None,
|
||||
&locator,
|
||||
|
@ -771,9 +772,10 @@ mod tests {
|
|||
&parsed,
|
||||
settings.unresolved_target_version,
|
||||
);
|
||||
diagnostics.sort_by_key(Ranged::start);
|
||||
let actual = diagnostics
|
||||
messages.sort_by_key(Ranged::start);
|
||||
let actual = messages
|
||||
.iter()
|
||||
.filter_map(Message::as_diagnostic_message)
|
||||
.map(|diagnostic| diagnostic.kind.rule())
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
|
|
|
@ -9,7 +9,7 @@ use anyhow::Result;
|
|||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, FixAvailability};
|
||||
use ruff_diagnostics::{Applicability, FixAvailability};
|
||||
use ruff_notebook::Notebook;
|
||||
#[cfg(not(fuzzing))]
|
||||
use ruff_notebook::NotebookError;
|
||||
|
@ -19,7 +19,6 @@ use ruff_python_index::Indexer;
|
|||
use ruff_python_parser::{ParseError, ParseOptions};
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::fix::{fix_file, FixResult};
|
||||
use crate::linter::check_path;
|
||||
|
@ -124,7 +123,7 @@ pub(crate) fn test_contents<'a>(
|
|||
&locator,
|
||||
&indexer,
|
||||
);
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
path,
|
||||
path.parent()
|
||||
.and_then(|parent| detect_package_root(parent, &settings.namespace_packages))
|
||||
|
@ -148,25 +147,22 @@ pub(crate) fn test_contents<'a>(
|
|||
|
||||
let mut transformed = Cow::Borrowed(source_kind);
|
||||
|
||||
if diagnostics
|
||||
.iter()
|
||||
.any(|diagnostic| diagnostic.fix.is_some())
|
||||
{
|
||||
let mut diagnostics = diagnostics.clone();
|
||||
if messages.iter().any(|message| message.fix().is_some()) {
|
||||
let mut messages = messages.clone();
|
||||
|
||||
while let Some(FixResult {
|
||||
code: fixed_contents,
|
||||
source_map,
|
||||
..
|
||||
}) = fix_file(
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&Locator::new(transformed.source_code()),
|
||||
UnsafeFixes::Enabled,
|
||||
) {
|
||||
if iterations < max_iterations() {
|
||||
iterations += 1;
|
||||
} else {
|
||||
let output = print_diagnostics(diagnostics, path, &transformed);
|
||||
let output = print_diagnostics(messages, path, &transformed);
|
||||
|
||||
panic!(
|
||||
"Failed to converge after {} iterations. This likely \
|
||||
|
@ -192,7 +188,7 @@ pub(crate) fn test_contents<'a>(
|
|||
&indexer,
|
||||
);
|
||||
|
||||
let fixed_diagnostics = check_path(
|
||||
let fixed_messages = check_path(
|
||||
path,
|
||||
None,
|
||||
&locator,
|
||||
|
@ -209,7 +205,7 @@ pub(crate) fn test_contents<'a>(
|
|||
|
||||
if parsed.has_invalid_syntax() && !source_has_errors {
|
||||
// Previous fix introduced a syntax error, abort
|
||||
let fixes = print_diagnostics(diagnostics, path, source_kind);
|
||||
let fixes = print_diagnostics(messages, path, source_kind);
|
||||
let syntax_errors =
|
||||
print_syntax_errors(parsed.errors(), path, &locator, &transformed);
|
||||
|
||||
|
@ -224,7 +220,7 @@ Source with applied fixes:
|
|||
);
|
||||
}
|
||||
|
||||
diagnostics = fixed_diagnostics;
|
||||
messages = fixed_messages;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,9 +230,10 @@ Source with applied fixes:
|
|||
)
|
||||
.finish();
|
||||
|
||||
let messages = diagnostics
|
||||
let messages = messages
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
.filter_map(Message::into_diagnostic_message)
|
||||
.map(|mut diagnostic| {
|
||||
let rule = diagnostic.kind.rule();
|
||||
let fixable = diagnostic.fix.as_ref().is_some_and(|fix| {
|
||||
matches!(
|
||||
|
@ -277,9 +274,10 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
|
|||
);
|
||||
|
||||
// Not strictly necessary but adds some coverage for this code path
|
||||
let noqa = directives.noqa_line_for.resolve(diagnostic.start());
|
||||
diagnostic.noqa_offset = directives.noqa_line_for.resolve(diagnostic.range.start());
|
||||
diagnostic.file = source_code.clone();
|
||||
|
||||
Message::from_diagnostic(diagnostic, source_code.clone(), noqa)
|
||||
Message::Diagnostic(diagnostic)
|
||||
})
|
||||
.chain(parsed.errors().iter().map(|parse_error| {
|
||||
Message::from_parse_error(parse_error, &locator, source_code.clone())
|
||||
|
@ -310,18 +308,9 @@ fn print_syntax_errors(
|
|||
}
|
||||
}
|
||||
|
||||
fn print_diagnostics(diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceKind) -> String {
|
||||
let filename = path.file_name().unwrap().to_string_lossy();
|
||||
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();
|
||||
|
||||
let messages: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
let noqa_start = diagnostic.start();
|
||||
|
||||
Message::from_diagnostic(diagnostic, source_file.clone(), noqa_start)
|
||||
})
|
||||
.collect();
|
||||
/// Print the [`Message::Diagnostic`]s in `messages`.
|
||||
fn print_diagnostics(mut messages: Vec<Message>, path: &Path, source: &SourceKind) -> String {
|
||||
messages.retain(Message::is_diagnostic_message);
|
||||
|
||||
if let Some(notebook) = source.as_ipy_notebook() {
|
||||
print_jupyter_messages(&messages, path, notebook)
|
||||
|
|
|
@ -9,12 +9,13 @@ use crate::{
|
|||
session::DocumentQuery,
|
||||
PositionEncoding, DIAGNOSTIC_NAME,
|
||||
};
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, DiagnosticKind, Edit, Fix};
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_diagnostics::{Applicability, DiagnosticKind, Edit, Fix};
|
||||
use ruff_linter::{
|
||||
directives::{extract_directives, Flags},
|
||||
generate_noqa_edits,
|
||||
linter::check_path,
|
||||
message::{DiagnosticMessage, Message, SyntaxErrorMessage},
|
||||
package::PackageRoot,
|
||||
packaging::detect_package_root,
|
||||
registry::AsRule,
|
||||
settings::flags,
|
||||
|
@ -24,7 +25,7 @@ use ruff_linter::{
|
|||
use ruff_notebook::Notebook;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{ParseError, ParseOptions, UnsupportedSyntaxError};
|
||||
use ruff_python_parser::ParseOptions;
|
||||
use ruff_source_file::LineIndex;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
|
@ -120,7 +121,7 @@ pub(crate) fn check(
|
|||
let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer);
|
||||
|
||||
// Generate checks.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
&query.virtual_file_path(),
|
||||
package,
|
||||
&locator,
|
||||
|
@ -137,7 +138,7 @@ pub(crate) fn check(
|
|||
|
||||
let noqa_edits = generate_noqa_edits(
|
||||
&query.virtual_file_path(),
|
||||
&diagnostics,
|
||||
&messages,
|
||||
&locator,
|
||||
indexer.comment_ranges(),
|
||||
&settings.linter.external,
|
||||
|
@ -159,51 +160,31 @@ pub(crate) fn check(
|
|||
.or_default();
|
||||
}
|
||||
|
||||
let lsp_diagnostics = diagnostics
|
||||
let lsp_diagnostics =
|
||||
messages
|
||||
.into_iter()
|
||||
.zip(noqa_edits)
|
||||
.map(|(diagnostic, noqa_edit)| {
|
||||
to_lsp_diagnostic(
|
||||
diagnostic,
|
||||
.filter_map(|(message, noqa_edit)| match message {
|
||||
Message::Diagnostic(diagnostic_message) => Some(to_lsp_diagnostic(
|
||||
diagnostic_message,
|
||||
noqa_edit,
|
||||
&source_kind,
|
||||
locator.to_index(),
|
||||
encoding,
|
||||
)
|
||||
});
|
||||
|
||||
let unsupported_syntax_errors = if settings.linter.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
)),
|
||||
Message::SyntaxError(syntax_error_message) => {
|
||||
if show_syntax_errors {
|
||||
Some(syntax_error_to_lsp_diagnostic(
|
||||
syntax_error_message,
|
||||
&source_kind,
|
||||
locator.to_index(),
|
||||
encoding,
|
||||
))
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
let lsp_diagnostics = lsp_diagnostics.chain(
|
||||
show_syntax_errors
|
||||
.then(|| {
|
||||
parsed
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
parse_error_to_lsp_diagnostic(
|
||||
parse_error,
|
||||
&source_kind,
|
||||
locator.to_index(),
|
||||
encoding,
|
||||
)
|
||||
})
|
||||
.chain(unsupported_syntax_errors.iter().map(|error| {
|
||||
unsupported_syntax_error_to_lsp_diagnostic(
|
||||
error,
|
||||
&source_kind,
|
||||
locator.to_index(),
|
||||
encoding,
|
||||
)
|
||||
}))
|
||||
})
|
||||
.into_iter()
|
||||
.flatten(),
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(notebook) = query.as_notebook() {
|
||||
for (index, diagnostic) in lsp_diagnostics {
|
||||
|
@ -260,13 +241,13 @@ pub(crate) fn fixes_for_diagnostics(
|
|||
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
|
||||
/// If the source kind is a text document, the cell index will always be `0`.
|
||||
fn to_lsp_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
diagnostic: DiagnosticMessage,
|
||||
noqa_edit: Option<Edit>,
|
||||
source_kind: &SourceKind,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
) -> (usize, lsp_types::Diagnostic) {
|
||||
let Diagnostic {
|
||||
let DiagnosticMessage {
|
||||
kind,
|
||||
range: diagnostic_range,
|
||||
fix,
|
||||
|
@ -339,8 +320,8 @@ fn to_lsp_diagnostic(
|
|||
)
|
||||
}
|
||||
|
||||
fn parse_error_to_lsp_diagnostic(
|
||||
parse_error: &ParseError,
|
||||
fn syntax_error_to_lsp_diagnostic(
|
||||
syntax_error: SyntaxErrorMessage,
|
||||
source_kind: &SourceKind,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
|
@ -349,7 +330,7 @@ fn parse_error_to_lsp_diagnostic(
|
|||
let cell: usize;
|
||||
|
||||
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
|
||||
NotebookRange { cell, range } = parse_error.location.to_notebook_range(
|
||||
NotebookRange { cell, range } = syntax_error.range.to_notebook_range(
|
||||
source_kind.source_code(),
|
||||
index,
|
||||
notebook_index,
|
||||
|
@ -357,46 +338,7 @@ fn parse_error_to_lsp_diagnostic(
|
|||
);
|
||||
} else {
|
||||
cell = usize::default();
|
||||
range = parse_error
|
||||
.location
|
||||
.to_range(source_kind.source_code(), index, encoding);
|
||||
}
|
||||
|
||||
(
|
||||
cell,
|
||||
lsp_types::Diagnostic {
|
||||
range,
|
||||
severity: Some(lsp_types::DiagnosticSeverity::ERROR),
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some(DIAGNOSTIC_NAME.into()),
|
||||
message: format!("SyntaxError: {}", &parse_error.error),
|
||||
related_information: None,
|
||||
data: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn unsupported_syntax_error_to_lsp_diagnostic(
|
||||
unsupported_syntax_error: &UnsupportedSyntaxError,
|
||||
source_kind: &SourceKind,
|
||||
index: &LineIndex,
|
||||
encoding: PositionEncoding,
|
||||
) -> (usize, lsp_types::Diagnostic) {
|
||||
let range: lsp_types::Range;
|
||||
let cell: usize;
|
||||
|
||||
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
|
||||
NotebookRange { cell, range } = unsupported_syntax_error.range.to_notebook_range(
|
||||
source_kind.source_code(),
|
||||
index,
|
||||
notebook_index,
|
||||
encoding,
|
||||
);
|
||||
} else {
|
||||
cell = usize::default();
|
||||
range = unsupported_syntax_error
|
||||
range = syntax_error
|
||||
.range
|
||||
.to_range(source_kind.source_code(), index, encoding);
|
||||
}
|
||||
|
@ -410,7 +352,7 @@ fn unsupported_syntax_error_to_lsp_diagnostic(
|
|||
code: None,
|
||||
code_description: None,
|
||||
source: Some(DIAGNOSTIC_NAME.into()),
|
||||
message: format!("SyntaxError: {unsupported_syntax_error}"),
|
||||
message: syntax_error.message,
|
||||
related_information: None,
|
||||
data: None,
|
||||
},
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::path::Path;
|
||||
|
||||
use js_sys::Error;
|
||||
use ruff_linter::message::{DiagnosticMessage, Message, SyntaxErrorMessage};
|
||||
use ruff_linter::settings::types::PythonVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
@ -31,7 +32,7 @@ const TYPES: &'static str = r#"
|
|||
export interface Diagnostic {
|
||||
code: string | null;
|
||||
message: string;
|
||||
location: {
|
||||
start_location: {
|
||||
row: number;
|
||||
column: number;
|
||||
};
|
||||
|
@ -60,7 +61,7 @@ export interface Diagnostic {
|
|||
pub struct ExpandedMessage {
|
||||
pub code: Option<String>,
|
||||
pub message: String,
|
||||
pub location: SourceLocation,
|
||||
pub start_location: SourceLocation,
|
||||
pub end_location: SourceLocation,
|
||||
pub fix: Option<ExpandedFix>,
|
||||
}
|
||||
|
@ -188,7 +189,7 @@ impl Workspace {
|
|||
);
|
||||
|
||||
// Generate checks.
|
||||
let diagnostics = check_path(
|
||||
let messages = check_path(
|
||||
Path::new("<filename>"),
|
||||
None,
|
||||
&locator,
|
||||
|
@ -205,25 +206,18 @@ impl Workspace {
|
|||
|
||||
let source_code = locator.to_source_code();
|
||||
|
||||
let unsupported_syntax_errors = if self.settings.linter.preview.is_enabled() {
|
||||
parsed.unsupported_syntax_errors()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
|
||||
let messages: Vec<ExpandedMessage> = diagnostics
|
||||
let messages: Vec<ExpandedMessage> = messages
|
||||
.into_iter()
|
||||
.map(|diagnostic| {
|
||||
let start_location = source_code.source_location(diagnostic.start());
|
||||
let end_location = source_code.source_location(diagnostic.end());
|
||||
|
||||
ExpandedMessage {
|
||||
code: Some(diagnostic.kind.rule().noqa_code().to_string()),
|
||||
message: diagnostic.kind.body,
|
||||
location: start_location,
|
||||
end_location,
|
||||
fix: diagnostic.fix.map(|fix| ExpandedFix {
|
||||
message: diagnostic.kind.suggestion,
|
||||
.map(|message| match message {
|
||||
Message::Diagnostic(DiagnosticMessage {
|
||||
kind, range, fix, ..
|
||||
}) => ExpandedMessage {
|
||||
code: Some(kind.rule().noqa_code().to_string()),
|
||||
message: kind.body,
|
||||
start_location: source_code.source_location(range.start()),
|
||||
end_location: source_code.source_location(range.end()),
|
||||
fix: fix.map(|fix| ExpandedFix {
|
||||
message: kind.suggestion,
|
||||
edits: fix
|
||||
.edits()
|
||||
.iter()
|
||||
|
@ -234,32 +228,17 @@ impl Workspace {
|
|||
})
|
||||
.collect(),
|
||||
}),
|
||||
},
|
||||
Message::SyntaxError(SyntaxErrorMessage { message, range, .. }) => {
|
||||
ExpandedMessage {
|
||||
code: None,
|
||||
message,
|
||||
start_location: source_code.source_location(range.start()),
|
||||
end_location: source_code.source_location(range.end()),
|
||||
fix: None,
|
||||
}
|
||||
}
|
||||
})
|
||||
.chain(parsed.errors().iter().map(|parse_error| {
|
||||
let start_location = source_code.source_location(parse_error.location.start());
|
||||
let end_location = source_code.source_location(parse_error.location.end());
|
||||
|
||||
ExpandedMessage {
|
||||
code: None,
|
||||
message: format!("SyntaxError: {}", parse_error.error),
|
||||
location: start_location,
|
||||
end_location,
|
||||
fix: None,
|
||||
}
|
||||
}))
|
||||
.chain(unsupported_syntax_errors.iter().map(|error| {
|
||||
let start_location = source_code.source_location(error.range.start());
|
||||
let end_location = source_code.source_location(error.range.end());
|
||||
|
||||
ExpandedMessage {
|
||||
code: None,
|
||||
message: format!("SyntaxError: {error}"),
|
||||
location: start_location,
|
||||
end_location,
|
||||
fix: None,
|
||||
}
|
||||
}))
|
||||
.collect();
|
||||
|
||||
serde_wasm_bindgen::to_value(&messages).map_err(into_error)
|
||||
|
|
|
@ -27,7 +27,7 @@ fn empty_config() {
|
|||
[ExpandedMessage {
|
||||
code: Some(Rule::IfTuple.noqa_code().to_string()),
|
||||
message: "If test is a tuple, which is always `True`".to_string(),
|
||||
location: SourceLocation {
|
||||
start_location: SourceLocation {
|
||||
row: OneIndexed::from_zero_indexed(0),
|
||||
column: OneIndexed::from_zero_indexed(3)
|
||||
},
|
||||
|
@ -48,7 +48,7 @@ fn syntax_error() {
|
|||
[ExpandedMessage {
|
||||
code: None,
|
||||
message: "SyntaxError: Expected an expression".to_string(),
|
||||
location: SourceLocation {
|
||||
start_location: SourceLocation {
|
||||
row: OneIndexed::from_zero_indexed(0),
|
||||
column: OneIndexed::from_zero_indexed(3)
|
||||
},
|
||||
|
@ -69,7 +69,7 @@ fn unsupported_syntax_error() {
|
|||
[ExpandedMessage {
|
||||
code: None,
|
||||
message: "SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)".to_string(),
|
||||
location: SourceLocation {
|
||||
start_location: SourceLocation {
|
||||
row: OneIndexed::from_zero_indexed(0),
|
||||
column: OneIndexed::from_zero_indexed(0)
|
||||
},
|
||||
|
|
|
@ -17,11 +17,11 @@ export default function Diagnostics({
|
|||
const diagnostics = useMemo(() => {
|
||||
const sorted = [...unsorted];
|
||||
sorted.sort((a, b) => {
|
||||
if (a.location.row === b.location.row) {
|
||||
return a.location.column - b.location.column;
|
||||
if (a.start_location.row === b.start_location.row) {
|
||||
return a.start_location.column - b.start_location.column;
|
||||
}
|
||||
|
||||
return a.location.row - b.location.row;
|
||||
return a.start_location.row - b.start_location.row;
|
||||
});
|
||||
|
||||
return sorted;
|
||||
|
@ -70,18 +70,22 @@ function Items({
|
|||
{diagnostics.map((diagnostic, index) => {
|
||||
return (
|
||||
<li
|
||||
key={`${diagnostic.location.row}:${diagnostic.location.column}-${diagnostic.code ?? index}`}
|
||||
key={`${diagnostic.start_location.row}:${diagnostic.start_location.column}-${diagnostic.code ?? index}`}
|
||||
>
|
||||
<button
|
||||
onClick={() =>
|
||||
onGoTo(diagnostic.location.row, diagnostic.location.column)
|
||||
onGoTo(
|
||||
diagnostic.start_location.row,
|
||||
diagnostic.start_location.column,
|
||||
)
|
||||
}
|
||||
className="w-full text-start"
|
||||
>
|
||||
{diagnostic.message}{" "}
|
||||
<span className="text-gray-500">
|
||||
{diagnostic.code != null && `(${diagnostic.code})`} [Ln{" "}
|
||||
{diagnostic.location.row}, Col {diagnostic.location.column}]
|
||||
{diagnostic.start_location.row}, Col{" "}
|
||||
{diagnostic.start_location.column}]
|
||||
</span>
|
||||
</button>
|
||||
</li>
|
||||
|
|
|
@ -124,7 +124,7 @@ class RuffCodeActionProvider implements CodeActionProvider {
|
|||
range: Range,
|
||||
): languages.ProviderResult<languages.CodeActionList> {
|
||||
const actions = this.diagnostics
|
||||
.filter((check) => range.startLineNumber === check.location.row)
|
||||
.filter((check) => range.startLineNumber === check.start_location.row)
|
||||
.filter(({ fix }) => fix)
|
||||
.map((check) => ({
|
||||
title: check.fix
|
||||
|
@ -173,8 +173,8 @@ function updateMarkers(monaco: Monaco, diagnostics: Array<Diagnostic>) {
|
|||
model,
|
||||
"owner",
|
||||
diagnostics.map((diagnostic) => ({
|
||||
startLineNumber: diagnostic.location.row,
|
||||
startColumn: diagnostic.location.column,
|
||||
startLineNumber: diagnostic.start_location.row,
|
||||
startColumn: diagnostic.start_location.column,
|
||||
endLineNumber: diagnostic.end_location.row,
|
||||
endColumn: diagnostic.end_location.column,
|
||||
message: diagnostic.code
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue