mirror of
https://github.com/astral-sh/ruff.git
synced 2025-07-07 21:25:08 +00:00
Combine OldDiagnostic
and Diagnostic
(#19053)
## Summary This PR is a collaboration with @AlexWaygood from our pairing session last Friday. The main goal here is removing `ruff_linter::message::OldDiagnostic` in favor of using `ruff_db::diagnostic::Diagnostic` directly. This involved a few major steps: - Transferring the fields - Transferring the methods and trait implementations, where possible - Converting some constructor methods to free functions - Moving the `SecondaryCode` struct - Updating the method names I'm hoping that some of the methods, especially those in the `expect_ruff_*` family, won't be necessary long-term, but I avoided trying to replace them entirely for now to keep the already-large diff a bit smaller. ### Related refactors Alex and I noticed a few refactoring opportunities while looking at the code, specifically the very similar implementations for `create_parse_diagnostic`, `create_unsupported_syntax_diagnostic`, and `create_semantic_syntax_diagnostic`. We combined these into a single generic function, which I then copied into `ruff_linter::message` with some small changes and a TODO to combine them in the future. I also deleted the `DisplayParseErrorType` and `TruncateAtNewline` types for reporting parse errors. These were added in #4124, I believe to work around the error messages from LALRPOP. Removing these didn't affect any tests, so I think they were unnecessary now that we fully control the error messages from the parser. On a more minor note, I factored out some calls to the `OldDiagnostic::filename` (now `Diagnostic::expect_ruff_filename`) function to avoid repeatedly allocating `String`s in some places. ### Snapshot changes The `show_statistics_syntax_errors` integration test changed because the `OldDiagnostic::name` method used `syntax-error` instead of `invalid-syntax` like in ty. I think this (`--statistics`) is one of the only places we actually use this name for syntax errors, so I hope this is okay. An alternative is to use `syntax-error` in ty too. The other snapshot changes are from removing this code, as discussed on [Discord](1388252408
):34052a1185/crates/ruff_linter/src/message/mod.rs (L128-L135)
I think both of these are technically breaking changes, but they only affect syntax errors and are very narrow in scope, while also pretty substantially simplifying the refactor, so I hope they're okay to include in a patch release. ## Test plan Existing tests, with the adjustments mentioned above --------- Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
This commit is contained in:
parent
9bee8376a1
commit
77a5c5ac80
61 changed files with 715 additions and 772 deletions
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -2854,6 +2854,7 @@ dependencies = [
|
||||||
"path-slash",
|
"path-slash",
|
||||||
"ruff_annotate_snippets",
|
"ruff_annotate_snippets",
|
||||||
"ruff_cache",
|
"ruff_cache",
|
||||||
|
"ruff_diagnostics",
|
||||||
"ruff_notebook",
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
"ruff_python_parser",
|
"ruff_python_parser",
|
||||||
|
@ -2918,6 +2919,7 @@ dependencies = [
|
||||||
name = "ruff_diagnostics"
|
name = "ruff_diagnostics"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"get-size2",
|
||||||
"is-macro",
|
"is-macro",
|
||||||
"ruff_text_size",
|
"ruff_text_size",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -3256,6 +3258,7 @@ dependencies = [
|
||||||
"lsp-server",
|
"lsp-server",
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"regex",
|
"regex",
|
||||||
|
"ruff_db",
|
||||||
"ruff_diagnostics",
|
"ruff_diagnostics",
|
||||||
"ruff_formatter",
|
"ruff_formatter",
|
||||||
"ruff_linter",
|
"ruff_linter",
|
||||||
|
|
|
@ -18,14 +18,15 @@ use rustc_hash::FxHashMap;
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
|
||||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_diagnostics::Fix;
|
use ruff_diagnostics::Fix;
|
||||||
use ruff_linter::message::OldDiagnostic;
|
use ruff_linter::message::create_lint_diagnostic;
|
||||||
use ruff_linter::package::PackageRoot;
|
use ruff_linter::package::PackageRoot;
|
||||||
use ruff_linter::{VERSION, warn_user};
|
use ruff_linter::{VERSION, warn_user};
|
||||||
use ruff_macros::CacheKey;
|
use ruff_macros::CacheKey;
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::SourceFileBuilder;
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use ruff_workspace::Settings;
|
use ruff_workspace::Settings;
|
||||||
use ruff_workspace::resolver::Resolver;
|
use ruff_workspace::resolver::Resolver;
|
||||||
|
|
||||||
|
@ -348,7 +349,7 @@ impl FileCache {
|
||||||
lint.messages
|
lint.messages
|
||||||
.iter()
|
.iter()
|
||||||
.map(|msg| {
|
.map(|msg| {
|
||||||
OldDiagnostic::lint(
|
create_lint_diagnostic(
|
||||||
&msg.body,
|
&msg.body,
|
||||||
msg.suggestion.as_ref(),
|
msg.suggestion.as_ref(),
|
||||||
msg.range,
|
msg.range,
|
||||||
|
@ -428,11 +429,11 @@ pub(crate) struct LintCacheData {
|
||||||
|
|
||||||
impl LintCacheData {
|
impl LintCacheData {
|
||||||
pub(crate) fn from_diagnostics(
|
pub(crate) fn from_diagnostics(
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
notebook_index: Option<NotebookIndex>,
|
notebook_index: Option<NotebookIndex>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let source = if let Some(msg) = diagnostics.first() {
|
let source = if let Some(msg) = diagnostics.first() {
|
||||||
msg.source_file().source_text().to_owned()
|
msg.expect_ruff_source_file().source_text().to_owned()
|
||||||
} else {
|
} else {
|
||||||
String::new() // No messages, no need to keep the source!
|
String::new() // No messages, no need to keep the source!
|
||||||
};
|
};
|
||||||
|
@ -446,16 +447,16 @@ impl LintCacheData {
|
||||||
.map(|(rule, msg)| {
|
.map(|(rule, msg)| {
|
||||||
// Make sure that all message use the same source file.
|
// Make sure that all message use the same source file.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
msg.source_file(),
|
msg.expect_ruff_source_file(),
|
||||||
diagnostics.first().unwrap().source_file(),
|
diagnostics.first().unwrap().expect_ruff_source_file(),
|
||||||
"message uses a different source file"
|
"message uses a different source file"
|
||||||
);
|
);
|
||||||
CacheMessage {
|
CacheMessage {
|
||||||
rule,
|
rule,
|
||||||
body: msg.body().to_string(),
|
body: msg.body().to_string(),
|
||||||
suggestion: msg.suggestion().map(ToString::to_string),
|
suggestion: msg.suggestion().map(ToString::to_string),
|
||||||
range: msg.range(),
|
range: msg.expect_range(),
|
||||||
parent: msg.parent,
|
parent: msg.parent(),
|
||||||
fix: msg.fix().cloned(),
|
fix: msg.fix().cloned(),
|
||||||
noqa_offset: msg.noqa_offset(),
|
noqa_offset: msg.noqa_offset(),
|
||||||
}
|
}
|
||||||
|
@ -608,12 +609,12 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use filetime::{FileTime, set_file_mtime};
|
use filetime::{FileTime, set_file_mtime};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ruff_linter::settings::LinterSettings;
|
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
use ruff_cache::CACHE_DIR_NAME;
|
use ruff_cache::CACHE_DIR_NAME;
|
||||||
use ruff_linter::message::OldDiagnostic;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_linter::package::PackageRoot;
|
use ruff_linter::package::PackageRoot;
|
||||||
|
use ruff_linter::settings::LinterSettings;
|
||||||
use ruff_linter::settings::flags;
|
use ruff_linter::settings::flags;
|
||||||
use ruff_linter::settings::types::UnsafeFixes;
|
use ruff_linter::settings::types::UnsafeFixes;
|
||||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||||
|
@ -680,7 +681,7 @@ mod tests {
|
||||||
UnsafeFixes::Enabled,
|
UnsafeFixes::Enabled,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
if diagnostics.inner.iter().any(OldDiagnostic::is_syntax_error) {
|
if diagnostics.inner.iter().any(Diagnostic::is_syntax_error) {
|
||||||
parse_errors.push(path.clone());
|
parse_errors.push(path.clone());
|
||||||
}
|
}
|
||||||
paths.push(path);
|
paths.push(path);
|
||||||
|
|
|
@ -9,10 +9,10 @@ use ignore::Error;
|
||||||
use log::{debug, error, warn};
|
use log::{debug, error, warn};
|
||||||
#[cfg(not(target_family = "wasm"))]
|
#[cfg(not(target_family = "wasm"))]
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
use ruff_linter::message::diagnostic_from_violation;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use ruff_db::panic::catch_unwind;
|
use ruff_db::panic::catch_unwind;
|
||||||
use ruff_linter::OldDiagnostic;
|
|
||||||
use ruff_linter::package::PackageRoot;
|
use ruff_linter::package::PackageRoot;
|
||||||
use ruff_linter::registry::Rule;
|
use ruff_linter::registry::Rule;
|
||||||
use ruff_linter::settings::types::UnsafeFixes;
|
use ruff_linter::settings::types::UnsafeFixes;
|
||||||
|
@ -129,7 +129,7 @@ pub(crate) fn check(
|
||||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
SourceFileBuilder::new(path.to_string_lossy().as_ref(), "").finish();
|
||||||
|
|
||||||
Diagnostics::new(
|
Diagnostics::new(
|
||||||
vec![OldDiagnostic::new(
|
vec![diagnostic_from_violation(
|
||||||
IOError { message },
|
IOError { message },
|
||||||
TextRange::default(),
|
TextRange::default(),
|
||||||
&dummy,
|
&dummy,
|
||||||
|
|
|
@ -10,11 +10,10 @@ use std::path::Path;
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use log::{debug, warn};
|
use log::{debug, warn};
|
||||||
use rustc_hash::FxHashMap;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
||||||
use ruff_linter::OldDiagnostic;
|
|
||||||
use ruff_linter::codes::Rule;
|
use ruff_linter::codes::Rule;
|
||||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||||
|
use ruff_linter::message::{create_syntax_error_diagnostic, diagnostic_from_violation};
|
||||||
use ruff_linter::package::PackageRoot;
|
use ruff_linter::package::PackageRoot;
|
||||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||||
use ruff_linter::settings::types::UnsafeFixes;
|
use ruff_linter::settings::types::UnsafeFixes;
|
||||||
|
@ -26,19 +25,20 @@ use ruff_python_ast::{PySourceType, SourceType, TomlSourceType};
|
||||||
use ruff_source_file::SourceFileBuilder;
|
use ruff_source_file::SourceFileBuilder;
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
use ruff_workspace::Settings;
|
use ruff_workspace::Settings;
|
||||||
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
use crate::cache::{Cache, FileCacheKey, LintCacheData};
|
||||||
|
|
||||||
#[derive(Debug, Default, PartialEq)]
|
#[derive(Debug, Default, PartialEq)]
|
||||||
pub(crate) struct Diagnostics {
|
pub(crate) struct Diagnostics {
|
||||||
pub(crate) inner: Vec<OldDiagnostic>,
|
pub(crate) inner: Vec<Diagnostic>,
|
||||||
pub(crate) fixed: FixMap,
|
pub(crate) fixed: FixMap,
|
||||||
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
pub(crate) notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Diagnostics {
|
impl Diagnostics {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(
|
||||||
diagnostics: Vec<OldDiagnostic>,
|
diagnostics: Vec<Diagnostic>,
|
||||||
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
notebook_indexes: FxHashMap<String, NotebookIndex>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
@ -62,7 +62,7 @@ impl Diagnostics {
|
||||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||||
Self::new(
|
Self::new(
|
||||||
vec![OldDiagnostic::new(
|
vec![diagnostic_from_violation(
|
||||||
IOError {
|
IOError {
|
||||||
message: err.to_string(),
|
message: err.to_string(),
|
||||||
},
|
},
|
||||||
|
@ -98,10 +98,10 @@ impl Diagnostics {
|
||||||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||||
Self::new(
|
Self::new(
|
||||||
vec![OldDiagnostic::syntax_error(
|
vec![create_syntax_error_diagnostic(
|
||||||
|
dummy,
|
||||||
err,
|
err,
|
||||||
TextRange::default(),
|
TextRange::default(),
|
||||||
dummy,
|
|
||||||
)],
|
)],
|
||||||
FxHashMap::default(),
|
FxHashMap::default(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -9,12 +9,13 @@ use itertools::{Itertools, iterate};
|
||||||
use ruff_linter::linter::FixTable;
|
use ruff_linter::linter::FixTable;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||||
use ruff_linter::fs::relativize_path;
|
use ruff_linter::fs::relativize_path;
|
||||||
use ruff_linter::logging::LogLevel;
|
use ruff_linter::logging::LogLevel;
|
||||||
use ruff_linter::message::{
|
use ruff_linter::message::{
|
||||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
|
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, RdjsonEmitter, SarifEmitter,
|
||||||
SarifEmitter, SecondaryCode, TextEmitter,
|
TextEmitter,
|
||||||
};
|
};
|
||||||
use ruff_linter::notify_user;
|
use ruff_linter::notify_user;
|
||||||
use ruff_linter::settings::flags::{self};
|
use ruff_linter::settings::flags::{self};
|
||||||
|
@ -306,8 +307,7 @@ impl Printer {
|
||||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||||
.fold(
|
.fold(
|
||||||
vec![],
|
vec![],
|
||||||
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
|
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||||
(code, message)| {
|
|
||||||
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||||
if *prev_code == code {
|
if *prev_code == code {
|
||||||
*count += 1;
|
*count += 1;
|
||||||
|
|
|
@ -1067,7 +1067,7 @@ fn show_statistics_syntax_errors() {
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
1 syntax-error
|
1 invalid-syntax
|
||||||
Found 1 error.
|
Found 1 error.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
@ -1080,7 +1080,7 @@ fn show_statistics_syntax_errors() {
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
1 syntax-error
|
1 invalid-syntax
|
||||||
Found 1 error.
|
Found 1 error.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
@ -1093,7 +1093,7 @@ fn show_statistics_syntax_errors() {
|
||||||
success: false
|
success: false
|
||||||
exit_code: 1
|
exit_code: 1
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
1 syntax-error
|
1 invalid-syntax
|
||||||
Found 1 error.
|
Found 1 error.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
@ -13,6 +13,7 @@ license = { workspace = true }
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_annotate_snippets = { workspace = true }
|
ruff_annotate_snippets = { workspace = true }
|
||||||
ruff_cache = { workspace = true, optional = true }
|
ruff_cache = { workspace = true, optional = true }
|
||||||
|
ruff_diagnostics = { workspace = true }
|
||||||
ruff_notebook = { workspace = true }
|
ruff_notebook = { workspace = true }
|
||||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||||
ruff_python_parser = { workspace = true }
|
ruff_python_parser = { workspace = true }
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
use std::{fmt::Formatter, sync::Arc};
|
use std::{fmt::Formatter, sync::Arc};
|
||||||
|
|
||||||
use render::{FileResolver, Input};
|
use render::{FileResolver, Input};
|
||||||
use ruff_source_file::{SourceCode, SourceFile};
|
use ruff_diagnostics::Fix;
|
||||||
|
use ruff_source_file::{LineColumn, SourceCode, SourceFile};
|
||||||
|
|
||||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
pub use self::render::DisplayDiagnostic;
|
pub use self::render::DisplayDiagnostic;
|
||||||
use crate::{Db, files::File};
|
use crate::{Db, files::File};
|
||||||
|
@ -62,10 +63,37 @@ impl Diagnostic {
|
||||||
message: message.into_diagnostic_message(),
|
message: message.into_diagnostic_message(),
|
||||||
annotations: vec![],
|
annotations: vec![],
|
||||||
subs: vec![],
|
subs: vec![],
|
||||||
|
fix: None,
|
||||||
|
parent: None,
|
||||||
|
noqa_offset: None,
|
||||||
|
secondary_code: None,
|
||||||
});
|
});
|
||||||
Diagnostic { inner }
|
Diagnostic { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a `Diagnostic` for a syntax error.
|
||||||
|
///
|
||||||
|
/// Unlike the more general [`Diagnostic::new`], this requires a [`Span`] and a [`TextRange`]
|
||||||
|
/// attached to it.
|
||||||
|
///
|
||||||
|
/// This should _probably_ be a method on the syntax errors, but
|
||||||
|
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||||
|
/// the other way around. And since we want to do this conversion in a couple
|
||||||
|
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||||
|
///
|
||||||
|
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||||
|
/// message.
|
||||||
|
pub fn syntax_error(
|
||||||
|
span: impl Into<Span>,
|
||||||
|
message: impl IntoDiagnosticMessage,
|
||||||
|
range: impl Ranged,
|
||||||
|
) -> Diagnostic {
|
||||||
|
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||||
|
let span = span.into().with_range(range.range());
|
||||||
|
diag.annotate(Annotation::primary(span).message(message));
|
||||||
|
diag
|
||||||
|
}
|
||||||
|
|
||||||
/// Add an annotation to this diagnostic.
|
/// Add an annotation to this diagnostic.
|
||||||
///
|
///
|
||||||
/// Annotations for a diagnostic are optional, but if any are added,
|
/// Annotations for a diagnostic are optional, but if any are added,
|
||||||
|
@ -226,6 +254,11 @@ impl Diagnostic {
|
||||||
self.primary_annotation().map(|ann| ann.span.clone())
|
self.primary_annotation().map(|ann| ann.span.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the primary span of this diagnostic.
|
||||||
|
pub fn primary_span_ref(&self) -> Option<&Span> {
|
||||||
|
self.primary_annotation().map(|ann| &ann.span)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||||
|
@ -268,6 +301,167 @@ impl Diagnostic {
|
||||||
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
pub fn sub_diagnostics(&self) -> &[SubDiagnostic] {
|
||||||
&self.inner.subs
|
&self.inner.subs
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the fix for this diagnostic if it exists.
|
||||||
|
pub fn fix(&self) -> Option<&Fix> {
|
||||||
|
self.inner.fix.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the fix for this diagnostic.
|
||||||
|
pub fn set_fix(&mut self, fix: Fix) {
|
||||||
|
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove the fix for this diagnostic.
|
||||||
|
pub fn remove_fix(&mut self) {
|
||||||
|
Arc::make_mut(&mut self.inner).fix = None;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the diagnostic contains a [`Fix`].
|
||||||
|
pub fn fixable(&self) -> bool {
|
||||||
|
self.fix().is_some()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||||
|
///
|
||||||
|
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||||
|
pub fn parent(&self) -> Option<TextSize> {
|
||||||
|
self.inner.parent
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the offset of the diagnostic's parent statement.
|
||||||
|
pub fn set_parent(&mut self, parent: TextSize) {
|
||||||
|
Arc::make_mut(&mut self.inner).parent = Some(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the remapped offset for a suppression comment if it exists.
|
||||||
|
///
|
||||||
|
/// Like [`Diagnostic::parent`], this is used for noqa code suppression comments in Ruff.
|
||||||
|
pub fn noqa_offset(&self) -> Option<TextSize> {
|
||||||
|
self.inner.noqa_offset
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the remapped offset for a suppression comment.
|
||||||
|
pub fn set_noqa_offset(&mut self, noqa_offset: TextSize) {
|
||||||
|
Arc::make_mut(&mut self.inner).noqa_offset = Some(noqa_offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the secondary code for the diagnostic if it exists.
|
||||||
|
///
|
||||||
|
/// The "primary" code for the diagnostic is its lint name. Diagnostics in ty don't have
|
||||||
|
/// secondary codes (yet), but in Ruff the noqa code is used.
|
||||||
|
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
||||||
|
self.inner.secondary_code.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the secondary code for this diagnostic.
|
||||||
|
pub fn set_secondary_code(&mut self, code: SecondaryCode) {
|
||||||
|
Arc::make_mut(&mut self.inner).secondary_code = Some(code);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the name used to represent the diagnostic.
|
||||||
|
pub fn name(&self) -> &'static str {
|
||||||
|
self.id().as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if `self` is a syntax error message.
|
||||||
|
pub fn is_syntax_error(&self) -> bool {
|
||||||
|
self.id().is_invalid_syntax()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the message body to display to the user.
|
||||||
|
pub fn body(&self) -> &str {
|
||||||
|
self.primary_message()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the fix suggestion for the violation.
|
||||||
|
pub fn suggestion(&self) -> Option<&str> {
|
||||||
|
self.primary_annotation()?.get_message()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the URL for the rule documentation, if it exists.
|
||||||
|
pub fn to_url(&self) -> Option<String> {
|
||||||
|
if self.is_syntax_error() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(format!(
|
||||||
|
"{}/rules/{}",
|
||||||
|
env!("CARGO_PKG_HOMEPAGE"),
|
||||||
|
self.name()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the filename for the message.
|
||||||
|
///
|
||||||
|
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||||
|
pub fn expect_ruff_filename(&self) -> String {
|
||||||
|
self.expect_primary_span()
|
||||||
|
.expect_ruff_file()
|
||||||
|
.name()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the start source location for the message.
|
||||||
|
///
|
||||||
|
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||||
|
/// span has no range.
|
||||||
|
pub fn expect_ruff_start_location(&self) -> LineColumn {
|
||||||
|
self.expect_primary_span()
|
||||||
|
.expect_ruff_file()
|
||||||
|
.to_source_code()
|
||||||
|
.line_column(self.expect_range().start())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Computes the end source location for the message.
|
||||||
|
///
|
||||||
|
/// Panics if the diagnostic has no primary span, if its file is not a `SourceFile`, or if the
|
||||||
|
/// span has no range.
|
||||||
|
pub fn expect_ruff_end_location(&self) -> LineColumn {
|
||||||
|
self.expect_primary_span()
|
||||||
|
.expect_ruff_file()
|
||||||
|
.to_source_code()
|
||||||
|
.line_column(self.expect_range().end())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the [`SourceFile`] which the message belongs to.
|
||||||
|
pub fn ruff_source_file(&self) -> Option<&SourceFile> {
|
||||||
|
self.primary_span_ref()?.as_ruff_file()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the [`SourceFile`] which the message belongs to.
|
||||||
|
///
|
||||||
|
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||||
|
pub fn expect_ruff_source_file(&self) -> SourceFile {
|
||||||
|
self.expect_primary_span().expect_ruff_file().clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the [`TextRange`] for the diagnostic.
|
||||||
|
pub fn range(&self) -> Option<TextRange> {
|
||||||
|
self.primary_span()?.range()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the [`TextRange`] for the diagnostic.
|
||||||
|
///
|
||||||
|
/// Panics if the diagnostic has no primary span or if the span has no range.
|
||||||
|
pub fn expect_range(&self) -> TextRange {
|
||||||
|
self.range().expect("Expected a range for the primary span")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Diagnostic {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Diagnostic {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
Some(
|
||||||
|
(self.ruff_source_file()?, self.range()?.start())
|
||||||
|
.cmp(&(other.ruff_source_file()?, other.range()?.start())),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
#[derive(Debug, Clone, Eq, PartialEq, get_size2::GetSize)]
|
||||||
|
@ -277,6 +471,10 @@ struct DiagnosticInner {
|
||||||
message: DiagnosticMessage,
|
message: DiagnosticMessage,
|
||||||
annotations: Vec<Annotation>,
|
annotations: Vec<Annotation>,
|
||||||
subs: Vec<SubDiagnostic>,
|
subs: Vec<SubDiagnostic>,
|
||||||
|
fix: Option<Fix>,
|
||||||
|
parent: Option<TextSize>,
|
||||||
|
noqa_offset: Option<TextSize>,
|
||||||
|
secondary_code: Option<SecondaryCode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RenderingSortKey<'a> {
|
struct RenderingSortKey<'a> {
|
||||||
|
@ -897,9 +1095,15 @@ impl Span {
|
||||||
///
|
///
|
||||||
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
|
||||||
pub fn expect_ruff_file(&self) -> &SourceFile {
|
pub fn expect_ruff_file(&self) -> &SourceFile {
|
||||||
|
self.as_ruff_file()
|
||||||
|
.expect("Expected a ruff `SourceFile`, found a ty `File`")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the [`SourceFile`] attached to this [`Span`].
|
||||||
|
pub fn as_ruff_file(&self) -> Option<&SourceFile> {
|
||||||
match &self.file {
|
match &self.file {
|
||||||
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
|
UnifiedFile::Ty(_) => None,
|
||||||
UnifiedFile::Ruff(file) => file,
|
UnifiedFile::Ruff(file) => Some(file),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1147,41 +1351,52 @@ impl<T: std::fmt::Display> IntoDiagnosticMessage for T {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `Diagnostic` from a parse error.
|
/// A secondary identifier for a lint diagnostic.
|
||||||
///
|
///
|
||||||
/// This should _probably_ be a method on `ruff_python_parser::ParseError`, but
|
/// For Ruff rules this means the noqa code.
|
||||||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, get_size2::GetSize)]
|
||||||
/// the other way around. And since we want to do this conversion in a couple
|
#[cfg_attr(feature = "serde", derive(serde::Serialize), serde(transparent))]
|
||||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
pub struct SecondaryCode(String);
|
||||||
pub fn create_parse_diagnostic(file: File, err: &ruff_python_parser::ParseError) -> Diagnostic {
|
|
||||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
impl SecondaryCode {
|
||||||
let span = Span::from(file).with_range(err.location);
|
pub fn new(code: String) -> Self {
|
||||||
diag.annotate(Annotation::primary(span).message(&err.error));
|
Self(code)
|
||||||
diag
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `Diagnostic` from an unsupported syntax error.
|
impl std::fmt::Display for SecondaryCode {
|
||||||
///
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
/// See [`create_parse_diagnostic`] for more details.
|
f.write_str(&self.0)
|
||||||
pub fn create_unsupported_syntax_diagnostic(
|
}
|
||||||
file: File,
|
|
||||||
err: &ruff_python_parser::UnsupportedSyntaxError,
|
|
||||||
) -> Diagnostic {
|
|
||||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
|
||||||
let span = Span::from(file).with_range(err.range);
|
|
||||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
|
||||||
diag
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
impl std::ops::Deref for SecondaryCode {
|
||||||
///
|
type Target = str;
|
||||||
/// See [`create_parse_diagnostic`] for more details.
|
|
||||||
pub fn create_semantic_syntax_diagnostic(
|
fn deref(&self) -> &Self::Target {
|
||||||
file: File,
|
&self.0
|
||||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
}
|
||||||
) -> Diagnostic {
|
}
|
||||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
|
||||||
let span = Span::from(file).with_range(err.range);
|
impl PartialEq<&str> for SecondaryCode {
|
||||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
fn eq(&self, other: &&str) -> bool {
|
||||||
diag
|
self.0 == *other
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SecondaryCode> for &str {
|
||||||
|
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||||
|
other.eq(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// for `hashbrown::EntryRef`
|
||||||
|
impl From<&SecondaryCode> for SecondaryCode {
|
||||||
|
fn from(value: &SecondaryCode) -> Self {
|
||||||
|
value.clone()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,5 +16,6 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_text_size = { workspace = true }
|
ruff_text_size = { workspace = true }
|
||||||
|
|
||||||
|
get-size2 = { workspace = true }
|
||||||
is-macro = { workspace = true }
|
is-macro = { workspace = true }
|
||||||
serde = { workspace = true, optional = true, features = [] }
|
serde = { workspace = true, optional = true, features = [] }
|
||||||
|
|
|
@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||||
/// content at a given location.
|
/// content at a given location.
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
pub struct Edit {
|
pub struct Edit {
|
||||||
/// The start location of the edit.
|
/// The start location of the edit.
|
||||||
|
|
|
@ -6,7 +6,9 @@ use ruff_text_size::{Ranged, TextSize};
|
||||||
use crate::edit::Edit;
|
use crate::edit::Edit;
|
||||||
|
|
||||||
/// Indicates if a fix can be applied.
|
/// Indicates if a fix can be applied.
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is)]
|
#[derive(
|
||||||
|
Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, is_macro::Is, get_size2::GetSize,
|
||||||
|
)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
|
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
|
||||||
pub enum Applicability {
|
pub enum Applicability {
|
||||||
|
@ -30,7 +32,7 @@ pub enum Applicability {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indicates the level of isolation required to apply a fix.
|
/// Indicates the level of isolation required to apply a fix.
|
||||||
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Default, Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, get_size2::GetSize)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
pub enum IsolationLevel {
|
pub enum IsolationLevel {
|
||||||
/// The fix should be applied as long as no other fixes in the same group have been applied.
|
/// The fix should be applied as long as no other fixes in the same group have been applied.
|
||||||
|
@ -41,7 +43,7 @@ pub enum IsolationLevel {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A collection of [`Edit`] elements to be applied to a source file.
|
/// A collection of [`Edit`] elements to be applied to a source file.
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone, get_size2::GetSize)]
|
||||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||||
pub struct Fix {
|
pub struct Fix {
|
||||||
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
/// The [`Edit`] elements to be applied, sorted by [`Edit::start`] in ascending order.
|
||||||
|
|
|
@ -15,7 +15,7 @@ license = { workspace = true }
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_annotate_snippets = { workspace = true }
|
ruff_annotate_snippets = { workspace = true }
|
||||||
ruff_cache = { workspace = true }
|
ruff_cache = { workspace = true }
|
||||||
ruff_db = { workspace = true }
|
ruff_db = { workspace = true, features = ["serde"] }
|
||||||
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
ruff_diagnostics = { workspace = true, features = ["serde"] }
|
||||||
ruff_notebook = { workspace = true }
|
ruff_notebook = { workspace = true }
|
||||||
ruff_macros = { workspace = true }
|
ruff_macros = { workspace = true }
|
||||||
|
|
|
@ -28,6 +28,7 @@ use itertools::Itertools;
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_diagnostics::{Applicability, Fix, IsolationLevel};
|
use ruff_diagnostics::{Applicability, Fix, IsolationLevel};
|
||||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||||
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
||||||
|
@ -63,6 +64,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
use crate::checkers::ast::annotation::AnnotationContext;
|
use crate::checkers::ast::annotation::AnnotationContext;
|
||||||
use crate::docstrings::extraction::ExtractionTarget;
|
use crate::docstrings::extraction::ExtractionTarget;
|
||||||
use crate::importer::{ImportRequest, Importer, ResolutionError};
|
use crate::importer::{ImportRequest, Importer, ResolutionError};
|
||||||
|
use crate::message::diagnostic_from_violation;
|
||||||
use crate::noqa::NoqaMapping;
|
use crate::noqa::NoqaMapping;
|
||||||
use crate::package::PackageRoot;
|
use crate::package::PackageRoot;
|
||||||
use crate::preview::is_undefined_export_in_dunder_init_enabled;
|
use crate::preview::is_undefined_export_in_dunder_init_enabled;
|
||||||
|
@ -74,7 +76,7 @@ use crate::rules::pylint::rules::{AwaitOutsideAsync, LoadBeforeGlobalDeclaration
|
||||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||||
use crate::settings::rule_table::RuleTable;
|
use crate::settings::rule_table::RuleTable;
|
||||||
use crate::settings::{LinterSettings, TargetVersion, flags};
|
use crate::settings::{LinterSettings, TargetVersion, flags};
|
||||||
use crate::{Edit, OldDiagnostic, Violation};
|
use crate::{Edit, Violation};
|
||||||
use crate::{Locator, docstrings, noqa};
|
use crate::{Locator, docstrings, noqa};
|
||||||
|
|
||||||
mod analyze;
|
mod analyze;
|
||||||
|
@ -388,7 +390,7 @@ impl<'a> Checker<'a> {
|
||||||
|
|
||||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||||
///
|
///
|
||||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||||
/// before it is added to the collection in the checker on `Drop`.
|
/// before it is added to the collection in the checker on `Drop`.
|
||||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||||
&'chk self,
|
&'chk self,
|
||||||
|
@ -401,7 +403,7 @@ impl<'a> Checker<'a> {
|
||||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||||
/// enabled.
|
/// enabled.
|
||||||
///
|
///
|
||||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||||
/// before it is added to the collection in the checker on `Drop`.
|
/// before it is added to the collection in the checker on `Drop`.
|
||||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||||
&'chk self,
|
&'chk self,
|
||||||
|
@ -3116,9 +3118,9 @@ pub(crate) fn check_ast(
|
||||||
/// A type for collecting diagnostics in a given file.
|
/// A type for collecting diagnostics in a given file.
|
||||||
///
|
///
|
||||||
/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push
|
/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push
|
||||||
/// a [`Violation`] to the contained [`OldDiagnostic`] collection on `Drop`.
|
/// a [`Violation`] to the contained [`Diagnostic`] collection on `Drop`.
|
||||||
pub(crate) struct LintContext<'a> {
|
pub(crate) struct LintContext<'a> {
|
||||||
diagnostics: RefCell<Vec<OldDiagnostic>>,
|
diagnostics: RefCell<Vec<Diagnostic>>,
|
||||||
source_file: SourceFile,
|
source_file: SourceFile,
|
||||||
rules: RuleTable,
|
rules: RuleTable,
|
||||||
settings: &'a LinterSettings,
|
settings: &'a LinterSettings,
|
||||||
|
@ -3126,7 +3128,7 @@ pub(crate) struct LintContext<'a> {
|
||||||
|
|
||||||
impl<'a> LintContext<'a> {
|
impl<'a> LintContext<'a> {
|
||||||
/// Create a new collector with the given `source_file` and an empty collection of
|
/// Create a new collector with the given `source_file` and an empty collection of
|
||||||
/// `OldDiagnostic`s.
|
/// `Diagnostic`s.
|
||||||
pub(crate) fn new(path: &Path, contents: &str, settings: &'a LinterSettings) -> Self {
|
pub(crate) fn new(path: &Path, contents: &str, settings: &'a LinterSettings) -> Self {
|
||||||
let source_file =
|
let source_file =
|
||||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), contents).finish();
|
SourceFileBuilder::new(path.to_string_lossy().as_ref(), contents).finish();
|
||||||
|
@ -3147,7 +3149,7 @@ impl<'a> LintContext<'a> {
|
||||||
|
|
||||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||||
///
|
///
|
||||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||||
/// before it is added to the collection in the context on `Drop`.
|
/// before it is added to the collection in the context on `Drop`.
|
||||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||||
&'chk self,
|
&'chk self,
|
||||||
|
@ -3156,7 +3158,7 @@ impl<'a> LintContext<'a> {
|
||||||
) -> DiagnosticGuard<'chk, 'a> {
|
) -> DiagnosticGuard<'chk, 'a> {
|
||||||
DiagnosticGuard {
|
DiagnosticGuard {
|
||||||
context: self,
|
context: self,
|
||||||
diagnostic: Some(OldDiagnostic::new(kind, range, &self.source_file)),
|
diagnostic: Some(diagnostic_from_violation(kind, range, &self.source_file)),
|
||||||
rule: T::rule(),
|
rule: T::rule(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3164,7 +3166,7 @@ impl<'a> LintContext<'a> {
|
||||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||||
/// enabled.
|
/// enabled.
|
||||||
///
|
///
|
||||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||||
/// before it is added to the collection in the context on `Drop`.
|
/// before it is added to the collection in the context on `Drop`.
|
||||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||||
&'chk self,
|
&'chk self,
|
||||||
|
@ -3175,7 +3177,7 @@ impl<'a> LintContext<'a> {
|
||||||
if self.is_rule_enabled(rule) {
|
if self.is_rule_enabled(rule) {
|
||||||
Some(DiagnosticGuard {
|
Some(DiagnosticGuard {
|
||||||
context: self,
|
context: self,
|
||||||
diagnostic: Some(OldDiagnostic::new(kind, range, &self.source_file)),
|
diagnostic: Some(diagnostic_from_violation(kind, range, &self.source_file)),
|
||||||
rule,
|
rule,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -3199,17 +3201,17 @@ impl<'a> LintContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn into_parts(self) -> (Vec<OldDiagnostic>, SourceFile) {
|
pub(crate) fn into_parts(self) -> (Vec<Diagnostic>, SourceFile) {
|
||||||
(self.diagnostics.into_inner(), self.source_file)
|
(self.diagnostics.into_inner(), self.source_file)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<OldDiagnostic> {
|
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<Diagnostic> {
|
||||||
self.diagnostics.get_mut()
|
self.diagnostics.get_mut()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &OldDiagnostic> {
|
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &Diagnostic> {
|
||||||
self.diagnostics.get_mut().iter()
|
self.diagnostics.get_mut().iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3227,7 +3229,7 @@ pub(crate) struct DiagnosticGuard<'a, 'b> {
|
||||||
/// The diagnostic that we want to report.
|
/// The diagnostic that we want to report.
|
||||||
///
|
///
|
||||||
/// This is always `Some` until the `Drop` (or `defuse`) call.
|
/// This is always `Some` until the `Drop` (or `defuse`) call.
|
||||||
diagnostic: Option<OldDiagnostic>,
|
diagnostic: Option<Diagnostic>,
|
||||||
rule: Rule,
|
rule: Rule,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3253,11 +3255,14 @@ impl DiagnosticGuard<'_, '_> {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn set_fix(&mut self, fix: Fix) {
|
pub(crate) fn set_fix(&mut self, fix: Fix) {
|
||||||
if !self.context.rules.should_fix(self.rule) {
|
if !self.context.rules.should_fix(self.rule) {
|
||||||
self.fix = None;
|
self.diagnostic.as_mut().unwrap().remove_fix();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let applicability = self.resolve_applicability(&fix);
|
let applicability = self.resolve_applicability(&fix);
|
||||||
self.fix = Some(fix.with_applicability(applicability));
|
self.diagnostic
|
||||||
|
.as_mut()
|
||||||
|
.unwrap()
|
||||||
|
.set_fix(fix.with_applicability(applicability));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the [`Fix`] used to fix the diagnostic, if the provided function returns `Ok`.
|
/// Set the [`Fix`] used to fix the diagnostic, if the provided function returns `Ok`.
|
||||||
|
@ -3286,9 +3291,9 @@ impl DiagnosticGuard<'_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
||||||
type Target = OldDiagnostic;
|
type Target = Diagnostic;
|
||||||
|
|
||||||
fn deref(&self) -> &OldDiagnostic {
|
fn deref(&self) -> &Diagnostic {
|
||||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||||
self.diagnostic.as_ref().unwrap()
|
self.diagnostic.as_ref().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -3296,7 +3301,7 @@ impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
||||||
|
|
||||||
/// Return a mutable borrow of the diagnostic in this guard.
|
/// Return a mutable borrow of the diagnostic in this guard.
|
||||||
impl std::ops::DerefMut for DiagnosticGuard<'_, '_> {
|
impl std::ops::DerefMut for DiagnosticGuard<'_, '_> {
|
||||||
fn deref_mut(&mut self) -> &mut OldDiagnostic {
|
fn deref_mut(&mut self) -> &mut Diagnostic {
|
||||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||||
self.diagnostic.as_mut().unwrap()
|
self.diagnostic.as_mut().unwrap()
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,9 +66,9 @@ pub(crate) fn check_noqa(
|
||||||
}
|
}
|
||||||
|
|
||||||
let noqa_offsets = diagnostic
|
let noqa_offsets = diagnostic
|
||||||
.parent
|
.parent()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(std::iter::once(diagnostic.start()))
|
.chain(std::iter::once(diagnostic.expect_range().start()))
|
||||||
.map(|position| noqa_line_for.resolve(position))
|
.map(|position| noqa_line_for.resolve(position))
|
||||||
.unique();
|
.unique();
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
||||||
use std::fmt::Formatter;
|
use std::fmt::Formatter;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::SecondaryCode;
|
||||||
use strum_macros::EnumIter;
|
use strum_macros::EnumIter;
|
||||||
|
|
||||||
use crate::registry::Linter;
|
use crate::registry::Linter;
|
||||||
|
@ -52,6 +53,18 @@ impl PartialEq<NoqaCode> for &str {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PartialEq<NoqaCode> for SecondaryCode {
|
||||||
|
fn eq(&self, other: &NoqaCode) -> bool {
|
||||||
|
&self.as_str() == other
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SecondaryCode> for NoqaCode {
|
||||||
|
fn eq(&self, other: &SecondaryCode) -> bool {
|
||||||
|
other.eq(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl serde::Serialize for NoqaCode {
|
impl serde::Serialize for NoqaCode {
|
||||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
|
|
|
@ -618,7 +618,8 @@ mod tests {
|
||||||
use crate::fix::edits::{
|
use crate::fix::edits::{
|
||||||
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
||||||
};
|
};
|
||||||
use crate::{Edit, Fix, Locator, OldDiagnostic};
|
use crate::message::diagnostic_from_violation;
|
||||||
|
use crate::{Edit, Fix, Locator};
|
||||||
|
|
||||||
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
||||||
fn parse_first_stmt(source: &str) -> Result<Stmt> {
|
fn parse_first_stmt(source: &str) -> Result<Stmt> {
|
||||||
|
@ -749,12 +750,12 @@ x = 1 \
|
||||||
let diag = {
|
let diag = {
|
||||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||||
let mut iter = edits.into_iter();
|
let mut iter = edits.into_iter();
|
||||||
let mut diagnostic = OldDiagnostic::new(
|
let mut diagnostic = diagnostic_from_violation(
|
||||||
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
||||||
TextRange::default(),
|
TextRange::default(),
|
||||||
&SourceFileBuilder::new("<filename>", "<code>").finish(),
|
&SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||||
);
|
);
|
||||||
diagnostic.fix = Some(Fix::safe_edits(
|
diagnostic.set_fix(Fix::safe_edits(
|
||||||
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
||||||
iter,
|
iter,
|
||||||
));
|
));
|
||||||
|
|
|
@ -3,12 +3,12 @@ use std::collections::BTreeSet;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_diagnostics::{IsolationLevel, SourceMap};
|
use ruff_diagnostics::{IsolationLevel, SourceMap};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::Locator;
|
use crate::Locator;
|
||||||
use crate::linter::FixTable;
|
use crate::linter::FixTable;
|
||||||
use crate::message::OldDiagnostic;
|
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
use crate::settings::types::UnsafeFixes;
|
use crate::settings::types::UnsafeFixes;
|
||||||
use crate::{Edit, Fix};
|
use crate::{Edit, Fix};
|
||||||
|
@ -28,7 +28,7 @@ pub(crate) struct FixResult {
|
||||||
|
|
||||||
/// Fix errors in a file, and write the fixed source code to disk.
|
/// Fix errors in a file, and write the fixed source code to disk.
|
||||||
pub(crate) fn fix_file(
|
pub(crate) fn fix_file(
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
unsafe_fixes: UnsafeFixes,
|
unsafe_fixes: UnsafeFixes,
|
||||||
) -> Option<FixResult> {
|
) -> Option<FixResult> {
|
||||||
|
@ -52,7 +52,7 @@ pub(crate) fn fix_file(
|
||||||
|
|
||||||
/// Apply a series of fixes.
|
/// Apply a series of fixes.
|
||||||
fn apply_fixes<'a>(
|
fn apply_fixes<'a>(
|
||||||
diagnostics: impl Iterator<Item = &'a OldDiagnostic>,
|
diagnostics: impl Iterator<Item = &'a Diagnostic>,
|
||||||
locator: &'a Locator<'a>,
|
locator: &'a Locator<'a>,
|
||||||
) -> FixResult {
|
) -> FixResult {
|
||||||
let mut output = String::with_capacity(locator.len());
|
let mut output = String::with_capacity(locator.len());
|
||||||
|
@ -173,25 +173,26 @@ mod tests {
|
||||||
use ruff_text_size::{Ranged, TextSize};
|
use ruff_text_size::{Ranged, TextSize};
|
||||||
|
|
||||||
use crate::Locator;
|
use crate::Locator;
|
||||||
use crate::OldDiagnostic;
|
|
||||||
use crate::fix::{FixResult, apply_fixes};
|
use crate::fix::{FixResult, apply_fixes};
|
||||||
|
use crate::message::diagnostic_from_violation;
|
||||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||||
use crate::{Edit, Fix};
|
use crate::{Edit, Fix};
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
||||||
fn create_diagnostics(
|
fn create_diagnostics(
|
||||||
filename: &str,
|
filename: &str,
|
||||||
source: &str,
|
source: &str,
|
||||||
edit: impl IntoIterator<Item = Edit>,
|
edit: impl IntoIterator<Item = Edit>,
|
||||||
) -> Vec<OldDiagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
edit.into_iter()
|
edit.into_iter()
|
||||||
.map(|edit| {
|
.map(|edit| {
|
||||||
// The choice of rule here is arbitrary.
|
// The choice of rule here is arbitrary.
|
||||||
let mut diagnostic = OldDiagnostic::new(
|
let mut diagnostic = diagnostic_from_violation(
|
||||||
MissingNewlineAtEndOfFile,
|
MissingNewlineAtEndOfFile,
|
||||||
edit.range(),
|
edit.range(),
|
||||||
&SourceFileBuilder::new(filename, source).finish(),
|
&SourceFileBuilder::new(filename, source).finish(),
|
||||||
);
|
);
|
||||||
diagnostic.fix = Some(Fix::safe_edit(edit));
|
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||||
diagnostic
|
diagnostic
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -14,7 +14,6 @@ pub use rule_selector::RuleSelector;
|
||||||
pub use rule_selector::clap_completion::RuleSelectorParser;
|
pub use rule_selector::clap_completion::RuleSelectorParser;
|
||||||
pub use rules::pycodestyle::rules::IOError;
|
pub use rules::pycodestyle::rules::IOError;
|
||||||
|
|
||||||
pub use message::OldDiagnostic;
|
|
||||||
pub(crate) use ruff_diagnostics::{Applicability, Edit, Fix};
|
pub(crate) use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||||
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
||||||
|
|
||||||
|
|
|
@ -7,15 +7,14 @@ use itertools::Itertools;
|
||||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
||||||
use rustc_hash::FxBuildHasher;
|
use rustc_hash::FxBuildHasher;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||||
use ruff_notebook::Notebook;
|
use ruff_notebook::Notebook;
|
||||||
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
|
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
|
||||||
use ruff_source_file::SourceFile;
|
use ruff_source_file::SourceFile;
|
||||||
use ruff_text_size::Ranged;
|
|
||||||
|
|
||||||
use crate::OldDiagnostic;
|
|
||||||
use crate::checkers::ast::{LintContext, check_ast};
|
use crate::checkers::ast::{LintContext, check_ast};
|
||||||
use crate::checkers::filesystem::check_file_path;
|
use crate::checkers::filesystem::check_file_path;
|
||||||
use crate::checkers::imports::check_imports;
|
use crate::checkers::imports::check_imports;
|
||||||
|
@ -25,7 +24,7 @@ use crate::checkers::tokens::check_tokens;
|
||||||
use crate::directives::Directives;
|
use crate::directives::Directives;
|
||||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||||
use crate::fix::{FixResult, fix_file};
|
use crate::fix::{FixResult, fix_file};
|
||||||
use crate::message::SecondaryCode;
|
use crate::message::create_syntax_error_diagnostic;
|
||||||
use crate::noqa::add_noqa;
|
use crate::noqa::add_noqa;
|
||||||
use crate::package::PackageRoot;
|
use crate::package::PackageRoot;
|
||||||
use crate::preview::is_py314_support_enabled;
|
use crate::preview::is_py314_support_enabled;
|
||||||
|
@ -41,7 +40,7 @@ pub(crate) mod float;
|
||||||
|
|
||||||
pub struct LinterResult {
|
pub struct LinterResult {
|
||||||
/// A collection of diagnostic messages generated by the linter.
|
/// A collection of diagnostic messages generated by the linter.
|
||||||
pub diagnostics: Vec<OldDiagnostic>,
|
pub diagnostics: Vec<Diagnostic>,
|
||||||
/// Flag indicating that the parsed source code does not contain any
|
/// Flag indicating that the parsed source code does not contain any
|
||||||
/// [`ParseError`]s
|
/// [`ParseError`]s
|
||||||
has_valid_syntax: bool,
|
has_valid_syntax: bool,
|
||||||
|
@ -145,7 +144,7 @@ pub struct FixerResult<'a> {
|
||||||
pub fixed: FixTable,
|
pub fixed: FixTable,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate [`OldDiagnostic`]s from the source code contents at the given `Path`.
|
/// Generate [`Diagnostic`]s from the source code contents at the given `Path`.
|
||||||
#[expect(clippy::too_many_arguments)]
|
#[expect(clippy::too_many_arguments)]
|
||||||
pub fn check_path(
|
pub fn check_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
|
@ -160,7 +159,7 @@ pub fn check_path(
|
||||||
source_type: PySourceType,
|
source_type: PySourceType,
|
||||||
parsed: &Parsed<ModModule>,
|
parsed: &Parsed<ModModule>,
|
||||||
target_version: TargetVersion,
|
target_version: TargetVersion,
|
||||||
) -> Vec<OldDiagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
// Aggregate all diagnostics.
|
// Aggregate all diagnostics.
|
||||||
let mut context = LintContext::new(path, locator.contents(), settings);
|
let mut context = LintContext::new(path, locator.contents(), settings);
|
||||||
|
|
||||||
|
@ -382,7 +381,7 @@ pub fn check_path(
|
||||||
if !parsed.has_valid_syntax() {
|
if !parsed.has_valid_syntax() {
|
||||||
// Avoid fixing in case the source code contains syntax errors.
|
// Avoid fixing in case the source code contains syntax errors.
|
||||||
for diagnostic in &mut diagnostics {
|
for diagnostic in &mut diagnostics {
|
||||||
diagnostic.fix = None;
|
diagnostic.remove_fix();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -393,7 +392,6 @@ pub fn check_path(
|
||||||
parsed.errors(),
|
parsed.errors(),
|
||||||
syntax_errors,
|
syntax_errors,
|
||||||
&semantic_syntax_errors,
|
&semantic_syntax_errors,
|
||||||
locator,
|
|
||||||
directives,
|
directives,
|
||||||
&source_file,
|
&source_file,
|
||||||
)
|
)
|
||||||
|
@ -459,7 +457,7 @@ pub fn add_noqa_to_path(
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate an [`OldDiagnostic`] for each diagnostic triggered by the given source code.
|
/// Generate a [`Diagnostic`] for each diagnostic triggered by the given source code.
|
||||||
pub fn lint_only(
|
pub fn lint_only(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
package: Option<PackageRoot<'_>>,
|
package: Option<PackageRoot<'_>>,
|
||||||
|
@ -516,7 +514,7 @@ pub fn lint_only(
|
||||||
|
|
||||||
LinterResult {
|
LinterResult {
|
||||||
has_valid_syntax: parsed.has_valid_syntax(),
|
has_valid_syntax: parsed.has_valid_syntax(),
|
||||||
has_no_syntax_errors: !diagnostics.iter().any(OldDiagnostic::is_syntax_error),
|
has_no_syntax_errors: !diagnostics.iter().any(Diagnostic::is_syntax_error),
|
||||||
diagnostics,
|
diagnostics,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -525,30 +523,32 @@ pub fn lint_only(
|
||||||
///
|
///
|
||||||
/// Also use `directives` to attach noqa offsets to lint diagnostics.
|
/// Also use `directives` to attach noqa offsets to lint diagnostics.
|
||||||
fn diagnostics_to_messages(
|
fn diagnostics_to_messages(
|
||||||
diagnostics: Vec<OldDiagnostic>,
|
diagnostics: Vec<Diagnostic>,
|
||||||
parse_errors: &[ParseError],
|
parse_errors: &[ParseError],
|
||||||
unsupported_syntax_errors: &[UnsupportedSyntaxError],
|
unsupported_syntax_errors: &[UnsupportedSyntaxError],
|
||||||
semantic_syntax_errors: &[SemanticSyntaxError],
|
semantic_syntax_errors: &[SemanticSyntaxError],
|
||||||
locator: &Locator,
|
|
||||||
directives: &Directives,
|
directives: &Directives,
|
||||||
source_file: &SourceFile,
|
source_file: &SourceFile,
|
||||||
) -> Vec<OldDiagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
parse_errors
|
parse_errors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|parse_error| {
|
.map(|parse_error| {
|
||||||
OldDiagnostic::from_parse_error(parse_error, locator, source_file.clone())
|
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||||
})
|
})
|
||||||
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
||||||
OldDiagnostic::from_unsupported_syntax_error(syntax_error, source_file.clone())
|
create_syntax_error_diagnostic(source_file.clone(), syntax_error, syntax_error)
|
||||||
}))
|
}))
|
||||||
.chain(
|
.chain(
|
||||||
semantic_syntax_errors
|
semantic_syntax_errors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|error| OldDiagnostic::from_semantic_syntax_error(error, source_file.clone())),
|
.map(|error| create_syntax_error_diagnostic(source_file.clone(), error, error)),
|
||||||
)
|
)
|
||||||
.chain(diagnostics.into_iter().map(|diagnostic| {
|
.chain(diagnostics.into_iter().map(|mut diagnostic| {
|
||||||
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
let noqa_offset = directives
|
||||||
diagnostic.with_noqa_offset(noqa_offset)
|
.noqa_line_for
|
||||||
|
.resolve(diagnostic.expect_range().start());
|
||||||
|
diagnostic.set_noqa_offset(noqa_offset);
|
||||||
|
diagnostic
|
||||||
}))
|
}))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -629,7 +629,7 @@ pub fn lint_fix<'a>(
|
||||||
|
|
||||||
if iterations == 0 {
|
if iterations == 0 {
|
||||||
has_valid_syntax = parsed.has_valid_syntax();
|
has_valid_syntax = parsed.has_valid_syntax();
|
||||||
has_no_syntax_errors = !diagnostics.iter().any(OldDiagnostic::is_syntax_error);
|
has_no_syntax_errors = !diagnostics.iter().any(Diagnostic::is_syntax_error);
|
||||||
} else {
|
} else {
|
||||||
// If the source code had no syntax errors on the first pass, but
|
// If the source code had no syntax errors on the first pass, but
|
||||||
// does on a subsequent pass, then we've introduced a
|
// does on a subsequent pass, then we've introduced a
|
||||||
|
@ -687,8 +687,8 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
#[expect(clippy::print_stderr)]
|
#[expect(clippy::print_stderr)]
|
||||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) {
|
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[Diagnostic]) {
|
||||||
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::secondary_code));
|
let codes = collect_rule_codes(diagnostics.iter().filter_map(Diagnostic::secondary_code));
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||||
|
@ -806,13 +806,12 @@ mod tests {
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_parser::ParseOptions;
|
use ruff_python_parser::ParseOptions;
|
||||||
use ruff_python_trivia::textwrap::dedent;
|
use ruff_python_trivia::textwrap::dedent;
|
||||||
use ruff_text_size::Ranged;
|
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_notebook::{Notebook, NotebookError};
|
use ruff_notebook::{Notebook, NotebookError};
|
||||||
|
|
||||||
use crate::linter::check_path;
|
use crate::linter::check_path;
|
||||||
use crate::message::OldDiagnostic;
|
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
use crate::settings::LinterSettings;
|
use crate::settings::LinterSettings;
|
||||||
use crate::source_kind::SourceKind;
|
use crate::source_kind::SourceKind;
|
||||||
|
@ -970,7 +969,7 @@ mod tests {
|
||||||
|
|
||||||
/// Wrapper around `test_contents_syntax_errors` for testing a snippet of code instead of a
|
/// Wrapper around `test_contents_syntax_errors` for testing a snippet of code instead of a
|
||||||
/// file.
|
/// file.
|
||||||
fn test_snippet_syntax_errors(contents: &str, settings: &LinterSettings) -> Vec<OldDiagnostic> {
|
fn test_snippet_syntax_errors(contents: &str, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||||
let contents = dedent(contents);
|
let contents = dedent(contents);
|
||||||
test_contents_syntax_errors(
|
test_contents_syntax_errors(
|
||||||
&SourceKind::Python(contents.to_string()),
|
&SourceKind::Python(contents.to_string()),
|
||||||
|
@ -985,7 +984,7 @@ mod tests {
|
||||||
source_kind: &SourceKind,
|
source_kind: &SourceKind,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
settings: &LinterSettings,
|
settings: &LinterSettings,
|
||||||
) -> Vec<OldDiagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
let source_type = PySourceType::from(path);
|
let source_type = PySourceType::from(path);
|
||||||
let target_version = settings.resolve_target_version(path);
|
let target_version = settings.resolve_target_version(path);
|
||||||
let options =
|
let options =
|
||||||
|
@ -1016,7 +1015,7 @@ mod tests {
|
||||||
&parsed,
|
&parsed,
|
||||||
target_version,
|
target_version,
|
||||||
);
|
);
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
diagnostics
|
diagnostics
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::fmt::{Display, Formatter, Write};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::{LazyLock, Mutex};
|
use std::sync::{LazyLock, Mutex};
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ use anyhow::Result;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use fern;
|
use fern;
|
||||||
use log::Level;
|
use log::Level;
|
||||||
use ruff_python_parser::{ParseError, ParseErrorType};
|
use ruff_python_parser::ParseError;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
|
|
||||||
use ruff_source_file::{LineColumn, LineIndex, OneIndexed, SourceCode};
|
use ruff_source_file::{LineColumn, LineIndex, OneIndexed, SourceCode};
|
||||||
|
@ -248,7 +248,7 @@ impl Display for DisplayParseError {
|
||||||
row = location.line,
|
row = location.line,
|
||||||
column = location.column,
|
column = location.column,
|
||||||
colon = ":".cyan(),
|
colon = ":".cyan(),
|
||||||
inner = &DisplayParseErrorType(&self.error.error)
|
inner = self.error.error
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ErrorLocation::Cell(cell, location) => {
|
ErrorLocation::Cell(cell, location) => {
|
||||||
|
@ -259,27 +259,13 @@ impl Display for DisplayParseError {
|
||||||
row = location.line,
|
row = location.line,
|
||||||
column = location.column,
|
column = location.column,
|
||||||
colon = ":".cyan(),
|
colon = ":".cyan(),
|
||||||
inner = &DisplayParseErrorType(&self.error.error)
|
inner = self.error.error
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct DisplayParseErrorType<'a>(&'a ParseErrorType);
|
|
||||||
|
|
||||||
impl<'a> DisplayParseErrorType<'a> {
|
|
||||||
pub(crate) fn new(error: &'a ParseErrorType) -> Self {
|
|
||||||
Self(error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for DisplayParseErrorType<'_> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", TruncateAtNewline(&self.0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ErrorLocation {
|
enum ErrorLocation {
|
||||||
/// The error occurred in a Python file.
|
/// The error occurred in a Python file.
|
||||||
|
@ -288,44 +274,6 @@ enum ErrorLocation {
|
||||||
Cell(OneIndexed, LineColumn),
|
Cell(OneIndexed, LineColumn),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Truncates the display text before the first newline character to avoid line breaks.
|
|
||||||
struct TruncateAtNewline<'a>(&'a dyn Display);
|
|
||||||
|
|
||||||
impl Display for TruncateAtNewline<'_> {
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
struct TruncateAdapter<'a> {
|
|
||||||
inner: &'a mut dyn Write,
|
|
||||||
after_new_line: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Write for TruncateAdapter<'_> {
|
|
||||||
fn write_str(&mut self, s: &str) -> std::fmt::Result {
|
|
||||||
if self.after_new_line {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
if let Some(end) = s.find(['\n', '\r']) {
|
|
||||||
self.inner.write_str(&s[..end])?;
|
|
||||||
self.inner.write_str("\u{23ce}...")?;
|
|
||||||
self.after_new_line = true;
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
self.inner.write_str(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
TruncateAdapter {
|
|
||||||
inner: f,
|
|
||||||
after_new_line: false,
|
|
||||||
},
|
|
||||||
"{}",
|
|
||||||
self.0
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::logging::LogLevel;
|
use crate::logging::LogLevel;
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::LineColumn;
|
use ruff_source_file::LineColumn;
|
||||||
|
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
/// Generate error logging commands for Azure Pipelines format.
|
/// Generate error logging commands for Azure Pipelines format.
|
||||||
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
|
/// See [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands?view=azure-devops&tabs=bash#logissue-log-an-error-or-warning)
|
||||||
|
@ -13,23 +14,23 @@ impl Emitter for AzureEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
let location = if context.is_notebook(&diagnostic.filename()) {
|
let filename = diagnostic.expect_ruff_filename();
|
||||||
|
let location = if context.is_notebook(&filename) {
|
||||||
// We can't give a reasonable location for the structured formats,
|
// We can't give a reasonable location for the structured formats,
|
||||||
// so we show one that's clearly a fallback
|
// so we show one that's clearly a fallback
|
||||||
LineColumn::default()
|
LineColumn::default()
|
||||||
} else {
|
} else {
|
||||||
diagnostic.compute_start_location()
|
diagnostic.expect_ruff_start_location()
|
||||||
};
|
};
|
||||||
|
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
"##vso[task.logissue type=error\
|
"##vso[task.logissue type=error\
|
||||||
;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
|
;sourcepath={filename};linenumber={line};columnnumber={col};{code}]{body}",
|
||||||
filename = diagnostic.filename(),
|
|
||||||
line = location.line,
|
line = location.line,
|
||||||
col = location.column,
|
col = location.column,
|
||||||
code = diagnostic
|
code = diagnostic
|
||||||
|
|
|
@ -2,13 +2,12 @@ use std::fmt::{Display, Formatter};
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use colored::{Color, ColoredString, Colorize, Styles};
|
use colored::{Color, ColoredString, Colorize, Styles};
|
||||||
|
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
|
||||||
use similar::{ChangeTag, TextDiff};
|
use similar::{ChangeTag, TextDiff};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::{OneIndexed, SourceFile};
|
use ruff_source_file::{OneIndexed, SourceFile};
|
||||||
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::message::OldDiagnostic;
|
|
||||||
use crate::text_helpers::ShowNonprinting;
|
use crate::text_helpers::ShowNonprinting;
|
||||||
use crate::{Applicability, Fix};
|
use crate::{Applicability, Fix};
|
||||||
|
|
||||||
|
@ -26,9 +25,9 @@ pub(super) struct Diff<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Diff<'a> {
|
impl<'a> Diff<'a> {
|
||||||
pub(crate) fn from_message(message: &'a OldDiagnostic) -> Option<Diff<'a>> {
|
pub(crate) fn from_message(message: &'a Diagnostic) -> Option<Diff<'a>> {
|
||||||
message.fix().map(|fix| Diff {
|
message.fix().map(|fix| Diff {
|
||||||
source_code: message.source_file(),
|
source_code: message.expect_ruff_source_file(),
|
||||||
fix,
|
fix,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::LineColumn;
|
use ruff_source_file::LineColumn;
|
||||||
|
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
/// Generate error workflow command in GitHub Actions format.
|
/// Generate error workflow command in GitHub Actions format.
|
||||||
/// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message)
|
/// See: [GitHub documentation](https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message)
|
||||||
|
@ -14,12 +15,13 @@ impl Emitter for GithubEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
let source_location = diagnostic.compute_start_location();
|
let source_location = diagnostic.expect_ruff_start_location();
|
||||||
let location = if context.is_notebook(&diagnostic.filename()) {
|
let filename = diagnostic.expect_ruff_filename();
|
||||||
|
let location = if context.is_notebook(&filename) {
|
||||||
// We can't give a reasonable location for the structured formats,
|
// We can't give a reasonable location for the structured formats,
|
||||||
// so we show one that's clearly a fallback
|
// so we show one that's clearly a fallback
|
||||||
LineColumn::default()
|
LineColumn::default()
|
||||||
|
@ -27,7 +29,7 @@ impl Emitter for GithubEmitter {
|
||||||
source_location
|
source_location
|
||||||
};
|
};
|
||||||
|
|
||||||
let end_location = diagnostic.compute_end_location();
|
let end_location = diagnostic.expect_ruff_end_location();
|
||||||
|
|
||||||
write!(
|
write!(
|
||||||
writer,
|
writer,
|
||||||
|
@ -35,7 +37,7 @@ impl Emitter for GithubEmitter {
|
||||||
code = diagnostic
|
code = diagnostic
|
||||||
.secondary_code()
|
.secondary_code()
|
||||||
.map_or_else(String::new, |code| format!(" ({code})")),
|
.map_or_else(String::new, |code| format!(" ({code})")),
|
||||||
file = diagnostic.filename(),
|
file = filename,
|
||||||
row = source_location.line,
|
row = source_location.line,
|
||||||
column = source_location.column,
|
column = source_location.column,
|
||||||
end_row = end_location.line,
|
end_row = end_location.line,
|
||||||
|
@ -45,7 +47,7 @@ impl Emitter for GithubEmitter {
|
||||||
write!(
|
write!(
|
||||||
writer,
|
writer,
|
||||||
"{path}:{row}:{column}:",
|
"{path}:{row}:{column}:",
|
||||||
path = relativize_path(&*diagnostic.filename()),
|
path = relativize_path(&filename),
|
||||||
row = location.line,
|
row = location.line,
|
||||||
column = location.column,
|
column = location.column,
|
||||||
)?;
|
)?;
|
||||||
|
|
|
@ -7,8 +7,10 @@ use serde::ser::SerializeSeq;
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
||||||
use crate::fs::{relativize_path, relativize_path_to};
|
use crate::fs::{relativize_path, relativize_path_to};
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
/// Generate JSON with violations in GitLab CI format
|
/// Generate JSON with violations in GitLab CI format
|
||||||
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
|
// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
|
||||||
|
@ -28,7 +30,7 @@ impl Emitter for GitlabEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
serde_json::to_writer_pretty(
|
serde_json::to_writer_pretty(
|
||||||
|
@ -45,7 +47,7 @@ impl Emitter for GitlabEmitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SerializedMessages<'a> {
|
struct SerializedMessages<'a> {
|
||||||
diagnostics: &'a [OldDiagnostic],
|
diagnostics: &'a [Diagnostic],
|
||||||
context: &'a EmitterContext<'a>,
|
context: &'a EmitterContext<'a>,
|
||||||
project_dir: Option<&'a str>,
|
project_dir: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
@ -59,10 +61,11 @@ impl Serialize for SerializedMessages<'_> {
|
||||||
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
|
let mut fingerprints = HashSet::<u64>::with_capacity(self.diagnostics.len());
|
||||||
|
|
||||||
for diagnostic in self.diagnostics {
|
for diagnostic in self.diagnostics {
|
||||||
let start_location = diagnostic.compute_start_location();
|
let start_location = diagnostic.expect_ruff_start_location();
|
||||||
let end_location = diagnostic.compute_end_location();
|
let end_location = diagnostic.expect_ruff_end_location();
|
||||||
|
|
||||||
let lines = if self.context.is_notebook(&diagnostic.filename()) {
|
let filename = diagnostic.expect_ruff_filename();
|
||||||
|
let lines = if self.context.is_notebook(&filename) {
|
||||||
// We can't give a reasonable location for the structured formats,
|
// We can't give a reasonable location for the structured formats,
|
||||||
// so we show one that's clearly a fallback
|
// so we show one that's clearly a fallback
|
||||||
json!({
|
json!({
|
||||||
|
@ -77,8 +80,8 @@ impl Serialize for SerializedMessages<'_> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let path = self.project_dir.as_ref().map_or_else(
|
let path = self.project_dir.as_ref().map_or_else(
|
||||||
|| relativize_path(&*diagnostic.filename()),
|
|| relativize_path(&filename),
|
||||||
|project_dir| relativize_path_to(&*diagnostic.filename(), project_dir),
|
|project_dir| relativize_path_to(&filename, project_dir),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut message_fingerprint = fingerprint(diagnostic, &path, 0);
|
let mut message_fingerprint = fingerprint(diagnostic, &path, 0);
|
||||||
|
@ -120,7 +123,7 @@ impl Serialize for SerializedMessages<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a unique fingerprint to identify a violation.
|
/// Generate a unique fingerprint to identify a violation.
|
||||||
fn fingerprint(message: &OldDiagnostic, project_path: &str, salt: u64) -> u64 {
|
fn fingerprint(message: &Diagnostic, project_path: &str, salt: u64) -> u64 {
|
||||||
let mut hasher = DefaultHasher::new();
|
let mut hasher = DefaultHasher::new();
|
||||||
|
|
||||||
salt.hash(&mut hasher);
|
salt.hash(&mut hasher);
|
||||||
|
|
|
@ -4,15 +4,14 @@ use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_source_file::OneIndexed;
|
use ruff_source_file::OneIndexed;
|
||||||
|
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::message::diff::calculate_print_width;
|
use crate::message::diff::calculate_print_width;
|
||||||
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
use crate::message::text::{MessageCodeFrame, RuleCodeAndBody};
|
||||||
use crate::message::{
|
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
|
||||||
Emitter, EmitterContext, MessageWithLocation, OldDiagnostic, group_diagnostics_by_filename,
|
|
||||||
};
|
|
||||||
use crate::settings::types::UnsafeFixes;
|
use crate::settings::types::UnsafeFixes;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -46,7 +45,7 @@ impl Emitter for GroupedEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for (filename, messages) in group_diagnostics_by_filename(diagnostics) {
|
for (filename, messages) in group_diagnostics_by_filename(diagnostics) {
|
||||||
|
@ -73,7 +72,7 @@ impl Emitter for GroupedEmitter {
|
||||||
writer,
|
writer,
|
||||||
"{}",
|
"{}",
|
||||||
DisplayGroupedMessage {
|
DisplayGroupedMessage {
|
||||||
notebook_index: context.notebook_index(&message.filename()),
|
notebook_index: context.notebook_index(&message.expect_ruff_filename()),
|
||||||
message,
|
message,
|
||||||
show_fix_status: self.show_fix_status,
|
show_fix_status: self.show_fix_status,
|
||||||
unsafe_fixes: self.unsafe_fixes,
|
unsafe_fixes: self.unsafe_fixes,
|
||||||
|
|
|
@ -4,12 +4,13 @@ use serde::ser::SerializeSeq;
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
use crate::Edit;
|
use crate::Edit;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct JsonEmitter;
|
pub struct JsonEmitter;
|
||||||
|
@ -18,7 +19,7 @@ impl Emitter for JsonEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
serde_json::to_writer_pretty(
|
serde_json::to_writer_pretty(
|
||||||
|
@ -34,7 +35,7 @@ impl Emitter for JsonEmitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ExpandedMessages<'a> {
|
struct ExpandedMessages<'a> {
|
||||||
diagnostics: &'a [OldDiagnostic],
|
diagnostics: &'a [Diagnostic],
|
||||||
context: &'a EmitterContext<'a>,
|
context: &'a EmitterContext<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,10 +55,11 @@ impl Serialize for ExpandedMessages<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterContext) -> Value {
|
pub(crate) fn message_to_json_value(message: &Diagnostic, context: &EmitterContext) -> Value {
|
||||||
let source_file = message.source_file();
|
let source_file = message.expect_ruff_source_file();
|
||||||
let source_code = source_file.to_source_code();
|
let source_code = source_file.to_source_code();
|
||||||
let notebook_index = context.notebook_index(&message.filename());
|
let filename = message.expect_ruff_filename();
|
||||||
|
let notebook_index = context.notebook_index(&filename);
|
||||||
|
|
||||||
let fix = message.fix().map(|fix| {
|
let fix = message.fix().map(|fix| {
|
||||||
json!({
|
json!({
|
||||||
|
@ -67,8 +69,8 @@ pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterCo
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut start_location = source_code.line_column(message.start());
|
let mut start_location = source_code.line_column(message.expect_range().start());
|
||||||
let mut end_location = source_code.line_column(message.end());
|
let mut end_location = source_code.line_column(message.expect_range().end());
|
||||||
let mut noqa_location = message
|
let mut noqa_location = message
|
||||||
.noqa_offset()
|
.noqa_offset()
|
||||||
.map(|offset| source_code.line_column(offset));
|
.map(|offset| source_code.line_column(offset));
|
||||||
|
@ -94,7 +96,7 @@ pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterCo
|
||||||
"cell": notebook_cell_index,
|
"cell": notebook_cell_index,
|
||||||
"location": location_to_json(start_location),
|
"location": location_to_json(start_location),
|
||||||
"end_location": location_to_json(end_location),
|
"end_location": location_to_json(end_location),
|
||||||
"filename": message.filename(),
|
"filename": filename,
|
||||||
"noqa_row": noqa_location.map(|location| location.line)
|
"noqa_row": noqa_location.map(|location| location.line)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
||||||
use crate::message::json::message_to_json_value;
|
use crate::message::json::message_to_json_value;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct JsonLinesEmitter;
|
pub struct JsonLinesEmitter;
|
||||||
|
@ -10,7 +12,7 @@ impl Emitter for JsonLinesEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
|
|
|
@ -3,11 +3,10 @@ use std::path::Path;
|
||||||
|
|
||||||
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite, XmlString};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::LineColumn;
|
use ruff_source_file::LineColumn;
|
||||||
|
|
||||||
use crate::message::{
|
use crate::message::{Emitter, EmitterContext, MessageWithLocation, group_diagnostics_by_filename};
|
||||||
Emitter, EmitterContext, MessageWithLocation, OldDiagnostic, group_diagnostics_by_filename,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct JunitEmitter;
|
pub struct JunitEmitter;
|
||||||
|
@ -16,7 +15,7 @@ impl Emitter for JunitEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let mut report = Report::new("ruff");
|
let mut report = Report::new("ruff");
|
||||||
|
@ -44,7 +43,7 @@ impl Emitter for JunitEmitter {
|
||||||
} = message;
|
} = message;
|
||||||
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
|
||||||
status.set_message(message.body());
|
status.set_message(message.body());
|
||||||
let location = if context.is_notebook(&message.filename()) {
|
let location = if context.is_notebook(&message.expect_ruff_filename()) {
|
||||||
// We can't give a reasonable location for the structured formats,
|
// We can't give a reasonable location for the structured formats,
|
||||||
// so we show one that's clearly a fallback
|
// so we show one that's clearly a fallback
|
||||||
LineColumn::default()
|
LineColumn::default()
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use std::cmp::Ordering;
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, LintName, Severity, Span};
|
use ruff_db::diagnostic::{
|
||||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
Annotation, Diagnostic, DiagnosticId, LintName, SecondaryCode, Severity, Span,
|
||||||
|
};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
pub use azure::AzureEmitter;
|
pub use azure::AzureEmitter;
|
||||||
|
@ -18,17 +18,14 @@ pub use junit::JunitEmitter;
|
||||||
pub use pylint::PylintEmitter;
|
pub use pylint::PylintEmitter;
|
||||||
pub use rdjson::RdjsonEmitter;
|
pub use rdjson::RdjsonEmitter;
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_python_parser::{ParseError, UnsupportedSyntaxError};
|
|
||||||
use ruff_source_file::{LineColumn, SourceFile};
|
use ruff_source_file::{LineColumn, SourceFile};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
pub use sarif::SarifEmitter;
|
pub use sarif::SarifEmitter;
|
||||||
pub use text::TextEmitter;
|
pub use text::TextEmitter;
|
||||||
|
|
||||||
use crate::Fix;
|
use crate::Fix;
|
||||||
use crate::codes::NoqaCode;
|
use crate::Violation;
|
||||||
use crate::logging::DisplayParseErrorType;
|
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
use crate::{Locator, Violation};
|
|
||||||
|
|
||||||
mod azure;
|
mod azure;
|
||||||
mod diff;
|
mod diff;
|
||||||
|
@ -43,292 +40,103 @@ mod rdjson;
|
||||||
mod sarif;
|
mod sarif;
|
||||||
mod text;
|
mod text;
|
||||||
|
|
||||||
/// `OldDiagnostic` represents either a diagnostic message corresponding to a rule violation or a
|
/// Creates a `Diagnostic` from a syntax error, with the format expected by Ruff.
|
||||||
/// syntax error message.
|
|
||||||
///
|
///
|
||||||
/// All of the information for syntax errors is captured in the underlying [`db::Diagnostic`], while
|
/// This is almost identical to `ruff_db::diagnostic::create_syntax_error_diagnostic`, except the
|
||||||
/// rule violations can have the additional optional fields like fixes, suggestions, and (parent)
|
/// `message` is stored as the primary diagnostic message instead of on the primary annotation, and
|
||||||
/// `noqa` offsets.
|
/// `SyntaxError: ` is prepended to the message.
|
||||||
///
|
///
|
||||||
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
|
/// TODO(brent) These should be unified at some point, but we keep them separate for now to avoid a
|
||||||
/// [`OldDiagnostic::body`], and the primary annotation optionally contains the suggestion
|
/// ton of snapshot changes while combining ruff's diagnostic type with `Diagnostic`.
|
||||||
/// accompanying a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived
|
pub fn create_syntax_error_diagnostic(
|
||||||
/// from the `Rule`.
|
span: impl Into<Span>,
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
message: impl std::fmt::Display,
|
||||||
pub struct OldDiagnostic {
|
range: impl Ranged,
|
||||||
pub diagnostic: db::Diagnostic,
|
) -> Diagnostic {
|
||||||
|
let mut diag = Diagnostic::new(
|
||||||
// these fields are specific to rule violations
|
DiagnosticId::InvalidSyntax,
|
||||||
pub fix: Option<Fix>,
|
Severity::Error,
|
||||||
pub parent: Option<TextSize>,
|
format_args!("SyntaxError: {message}"),
|
||||||
pub(crate) noqa_offset: Option<TextSize>,
|
);
|
||||||
pub(crate) secondary_code: Option<SecondaryCode>,
|
let span = span.into().with_range(range.range());
|
||||||
|
diag.annotate(Annotation::primary(span));
|
||||||
|
diag
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OldDiagnostic {
|
#[expect(clippy::too_many_arguments)]
|
||||||
pub fn syntax_error(
|
pub fn create_lint_diagnostic<B, S>(
|
||||||
message: impl Display,
|
body: B,
|
||||||
range: TextRange,
|
suggestion: Option<S>,
|
||||||
file: SourceFile,
|
range: TextRange,
|
||||||
) -> OldDiagnostic {
|
fix: Option<Fix>,
|
||||||
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
|
parent: Option<TextSize>,
|
||||||
let span = Span::from(file).with_range(range);
|
file: SourceFile,
|
||||||
diag.annotate(Annotation::primary(span));
|
noqa_offset: Option<TextSize>,
|
||||||
Self {
|
rule: Rule,
|
||||||
diagnostic: diag,
|
) -> Diagnostic
|
||||||
fix: None,
|
where
|
||||||
parent: None,
|
B: Display,
|
||||||
noqa_offset: None,
|
S: Display,
|
||||||
secondary_code: None,
|
{
|
||||||
}
|
let mut diagnostic = Diagnostic::new(
|
||||||
|
DiagnosticId::Lint(LintName::of(rule.into())),
|
||||||
|
Severity::Error,
|
||||||
|
body,
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Some(fix) = fix {
|
||||||
|
diagnostic.set_fix(fix);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[expect(clippy::too_many_arguments)]
|
if let Some(parent) = parent {
|
||||||
pub fn lint<B, S>(
|
diagnostic.set_parent(parent);
|
||||||
body: B,
|
|
||||||
suggestion: Option<S>,
|
|
||||||
range: TextRange,
|
|
||||||
fix: Option<Fix>,
|
|
||||||
parent: Option<TextSize>,
|
|
||||||
file: SourceFile,
|
|
||||||
noqa_offset: Option<TextSize>,
|
|
||||||
rule: Rule,
|
|
||||||
) -> OldDiagnostic
|
|
||||||
where
|
|
||||||
B: Display,
|
|
||||||
S: Display,
|
|
||||||
{
|
|
||||||
let mut diagnostic = db::Diagnostic::new(
|
|
||||||
DiagnosticId::Lint(LintName::of(rule.into())),
|
|
||||||
Severity::Error,
|
|
||||||
body,
|
|
||||||
);
|
|
||||||
let span = Span::from(file).with_range(range);
|
|
||||||
let mut annotation = Annotation::primary(span);
|
|
||||||
if let Some(suggestion) = suggestion {
|
|
||||||
annotation = annotation.message(suggestion);
|
|
||||||
}
|
|
||||||
diagnostic.annotate(annotation);
|
|
||||||
|
|
||||||
OldDiagnostic {
|
|
||||||
diagnostic,
|
|
||||||
fix,
|
|
||||||
parent,
|
|
||||||
noqa_offset,
|
|
||||||
secondary_code: Some(SecondaryCode(rule.noqa_code().to_string())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an [`OldDiagnostic`] from the given [`ParseError`].
|
if let Some(noqa_offset) = noqa_offset {
|
||||||
pub fn from_parse_error(
|
diagnostic.set_noqa_offset(noqa_offset);
|
||||||
parse_error: &ParseError,
|
|
||||||
locator: &Locator,
|
|
||||||
file: SourceFile,
|
|
||||||
) -> OldDiagnostic {
|
|
||||||
// Try to create a non-empty range so that the diagnostic can print a caret at the right
|
|
||||||
// position. This requires that we retrieve the next character, if any, and take its length
|
|
||||||
// to maintain char-boundaries.
|
|
||||||
let len = locator
|
|
||||||
.after(parse_error.location.start())
|
|
||||||
.chars()
|
|
||||||
.next()
|
|
||||||
.map_or(TextSize::new(0), TextLen::text_len);
|
|
||||||
|
|
||||||
OldDiagnostic::syntax_error(
|
|
||||||
format_args!(
|
|
||||||
"SyntaxError: {}",
|
|
||||||
DisplayParseErrorType::new(&parse_error.error)
|
|
||||||
),
|
|
||||||
TextRange::at(parse_error.location.start(), len),
|
|
||||||
file,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an [`OldDiagnostic`] from the given [`UnsupportedSyntaxError`].
|
let span = Span::from(file).with_range(range);
|
||||||
pub fn from_unsupported_syntax_error(
|
let mut annotation = Annotation::primary(span);
|
||||||
unsupported_syntax_error: &UnsupportedSyntaxError,
|
if let Some(suggestion) = suggestion {
|
||||||
file: SourceFile,
|
annotation = annotation.message(suggestion);
|
||||||
) -> OldDiagnostic {
|
|
||||||
OldDiagnostic::syntax_error(
|
|
||||||
format_args!("SyntaxError: {unsupported_syntax_error}"),
|
|
||||||
unsupported_syntax_error.range,
|
|
||||||
file,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
diagnostic.annotate(annotation);
|
||||||
|
|
||||||
/// Create an [`OldDiagnostic`] from the given [`SemanticSyntaxError`].
|
diagnostic.set_secondary_code(SecondaryCode::new(rule.noqa_code().to_string()));
|
||||||
pub fn from_semantic_syntax_error(
|
|
||||||
semantic_syntax_error: &SemanticSyntaxError,
|
|
||||||
file: SourceFile,
|
|
||||||
) -> OldDiagnostic {
|
|
||||||
OldDiagnostic::syntax_error(
|
|
||||||
format_args!("SyntaxError: {semantic_syntax_error}"),
|
|
||||||
semantic_syntax_error.range,
|
|
||||||
file,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
|
diagnostic
|
||||||
// references. I expect this method to go away or change significantly with the rest of the
|
|
||||||
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
|
|
||||||
// should just update the call sites.
|
|
||||||
#[expect(clippy::needless_pass_by_value)]
|
|
||||||
pub fn new<T: Violation>(kind: T, range: TextRange, file: &SourceFile) -> Self {
|
|
||||||
Self::lint(
|
|
||||||
Violation::message(&kind),
|
|
||||||
Violation::fix_title(&kind),
|
|
||||||
range,
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
file.clone(),
|
|
||||||
None,
|
|
||||||
T::rule(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Consumes `self` and returns a new `Diagnostic` with the given parent node.
|
|
||||||
#[inline]
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_parent(mut self, parent: TextSize) -> Self {
|
|
||||||
self.set_parent(parent);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the location of the diagnostic's parent node.
|
|
||||||
#[inline]
|
|
||||||
pub fn set_parent(&mut self, parent: TextSize) {
|
|
||||||
self.parent = Some(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Consumes `self` and returns a new `Diagnostic` with the given noqa offset.
|
|
||||||
#[inline]
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_noqa_offset(mut self, noqa_offset: TextSize) -> Self {
|
|
||||||
self.noqa_offset = Some(noqa_offset);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if `self` is a syntax error message.
|
|
||||||
pub fn is_syntax_error(&self) -> bool {
|
|
||||||
self.diagnostic.id().is_invalid_syntax()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the name used to represent the diagnostic.
|
|
||||||
pub fn name(&self) -> &'static str {
|
|
||||||
if self.is_syntax_error() {
|
|
||||||
"syntax-error"
|
|
||||||
} else {
|
|
||||||
self.diagnostic.id().as_str()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the message body to display to the user.
|
|
||||||
pub fn body(&self) -> &str {
|
|
||||||
self.diagnostic.primary_message()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the fix suggestion for the violation.
|
|
||||||
pub fn suggestion(&self) -> Option<&str> {
|
|
||||||
self.diagnostic.primary_annotation()?.get_message()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the offset at which the `noqa` comment will be placed if it's a diagnostic message.
|
|
||||||
pub fn noqa_offset(&self) -> Option<TextSize> {
|
|
||||||
self.noqa_offset
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [`Fix`] for the diagnostic, if there is any.
|
|
||||||
pub fn fix(&self) -> Option<&Fix> {
|
|
||||||
self.fix.as_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the diagnostic contains a [`Fix`].
|
|
||||||
pub fn fixable(&self) -> bool {
|
|
||||||
self.fix().is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the noqa code for the diagnostic message as a string.
|
|
||||||
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
|
|
||||||
self.secondary_code.as_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the URL for the rule documentation, if it exists.
|
|
||||||
pub fn to_url(&self) -> Option<String> {
|
|
||||||
if self.is_syntax_error() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(format!(
|
|
||||||
"{}/rules/{}",
|
|
||||||
env!("CARGO_PKG_HOMEPAGE"),
|
|
||||||
self.name()
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the filename for the message.
|
|
||||||
pub fn filename(&self) -> String {
|
|
||||||
self.diagnostic
|
|
||||||
.expect_primary_span()
|
|
||||||
.expect_ruff_file()
|
|
||||||
.name()
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes the start source location for the message.
|
|
||||||
pub fn compute_start_location(&self) -> LineColumn {
|
|
||||||
self.diagnostic
|
|
||||||
.expect_primary_span()
|
|
||||||
.expect_ruff_file()
|
|
||||||
.to_source_code()
|
|
||||||
.line_column(self.start())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Computes the end source location for the message.
|
|
||||||
pub fn compute_end_location(&self) -> LineColumn {
|
|
||||||
self.diagnostic
|
|
||||||
.expect_primary_span()
|
|
||||||
.expect_ruff_file()
|
|
||||||
.to_source_code()
|
|
||||||
.line_column(self.end())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the [`SourceFile`] which the message belongs to.
|
|
||||||
pub fn source_file(&self) -> SourceFile {
|
|
||||||
self.diagnostic
|
|
||||||
.expect_primary_span()
|
|
||||||
.expect_ruff_file()
|
|
||||||
.clone()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Ord for OldDiagnostic {
|
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
// references. I expect this method to go away or change significantly with the rest of the
|
||||||
(self.source_file(), self.start()).cmp(&(other.source_file(), other.start()))
|
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
|
||||||
}
|
// should just update the call sites.
|
||||||
}
|
#[expect(clippy::needless_pass_by_value)]
|
||||||
|
pub fn diagnostic_from_violation<T: Violation>(
|
||||||
impl PartialOrd for OldDiagnostic {
|
kind: T,
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
range: TextRange,
|
||||||
Some(self.cmp(other))
|
file: &SourceFile,
|
||||||
}
|
) -> Diagnostic {
|
||||||
}
|
create_lint_diagnostic(
|
||||||
|
Violation::message(&kind),
|
||||||
impl Ranged for OldDiagnostic {
|
Violation::fix_title(&kind),
|
||||||
fn range(&self) -> TextRange {
|
range,
|
||||||
self.diagnostic
|
None,
|
||||||
.expect_primary_span()
|
None,
|
||||||
.range()
|
file.clone(),
|
||||||
.expect("Expected range for ruff span")
|
None,
|
||||||
}
|
T::rule(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct MessageWithLocation<'a> {
|
struct MessageWithLocation<'a> {
|
||||||
message: &'a OldDiagnostic,
|
message: &'a Diagnostic,
|
||||||
start_location: LineColumn,
|
start_location: LineColumn,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Deref for MessageWithLocation<'_> {
|
impl Deref for MessageWithLocation<'_> {
|
||||||
type Target = OldDiagnostic;
|
type Target = Diagnostic;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
self.message
|
self.message
|
||||||
|
@ -336,30 +144,30 @@ impl Deref for MessageWithLocation<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn group_diagnostics_by_filename(
|
fn group_diagnostics_by_filename(
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
) -> BTreeMap<String, Vec<MessageWithLocation>> {
|
) -> BTreeMap<String, Vec<MessageWithLocation>> {
|
||||||
let mut grouped_messages = BTreeMap::default();
|
let mut grouped_messages = BTreeMap::default();
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
grouped_messages
|
grouped_messages
|
||||||
.entry(diagnostic.filename().to_string())
|
.entry(diagnostic.expect_ruff_filename())
|
||||||
.or_insert_with(Vec::new)
|
.or_insert_with(Vec::new)
|
||||||
.push(MessageWithLocation {
|
.push(MessageWithLocation {
|
||||||
message: diagnostic,
|
message: diagnostic,
|
||||||
start_location: diagnostic.compute_start_location(),
|
start_location: diagnostic.expect_ruff_start_location(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
grouped_messages
|
grouped_messages
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Display format for [`OldDiagnostic`]s.
|
/// Display format for [`Diagnostic`]s.
|
||||||
///
|
///
|
||||||
/// The emitter serializes a slice of [`OldDiagnostic`]s and writes them to a [`Write`].
|
/// The emitter serializes a slice of [`Diagnostic`]s and writes them to a [`Write`].
|
||||||
pub trait Emitter {
|
pub trait Emitter {
|
||||||
/// Serializes the `diagnostics` and writes the output to `writer`.
|
/// Serializes the `diagnostics` and writes the output to `writer`.
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()>;
|
) -> anyhow::Result<()>;
|
||||||
}
|
}
|
||||||
|
@ -384,101 +192,40 @@ impl<'a> EmitterContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A secondary identifier for a lint diagnostic.
|
|
||||||
///
|
|
||||||
/// For Ruff rules this means the noqa code.
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, serde::Serialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct SecondaryCode(String);
|
|
||||||
|
|
||||||
impl SecondaryCode {
|
|
||||||
pub fn new(code: String) -> Self {
|
|
||||||
Self(code)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for SecondaryCode {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.write_str(&self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::Deref for SecondaryCode {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<&str> for SecondaryCode {
|
|
||||||
fn eq(&self, other: &&str) -> bool {
|
|
||||||
self.0 == *other
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<SecondaryCode> for &str {
|
|
||||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
|
||||||
other.eq(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<NoqaCode> for SecondaryCode {
|
|
||||||
fn eq(&self, other: &NoqaCode) -> bool {
|
|
||||||
&self.as_str() == other
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq<SecondaryCode> for NoqaCode {
|
|
||||||
fn eq(&self, other: &SecondaryCode) -> bool {
|
|
||||||
other.eq(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// for `hashbrown::EntryRef`
|
|
||||||
impl From<&SecondaryCode> for SecondaryCode {
|
|
||||||
fn from(value: &SecondaryCode) -> Self {
|
|
||||||
value.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
use crate::codes::Rule;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use crate::{Edit, Fix};
|
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
|
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
|
||||||
use ruff_source_file::{OneIndexed, SourceFileBuilder};
|
use ruff_source_file::{OneIndexed, SourceFileBuilder};
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
use crate::Locator;
|
use crate::codes::Rule;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext, create_lint_diagnostic};
|
||||||
|
use crate::{Edit, Fix};
|
||||||
|
|
||||||
pub(super) fn create_syntax_error_diagnostics() -> Vec<OldDiagnostic> {
|
use super::create_syntax_error_diagnostic;
|
||||||
|
|
||||||
|
pub(super) fn create_syntax_error_diagnostics() -> Vec<Diagnostic> {
|
||||||
let source = r"from os import
|
let source = r"from os import
|
||||||
|
|
||||||
if call(foo
|
if call(foo
|
||||||
def bar():
|
def bar():
|
||||||
pass
|
pass
|
||||||
";
|
";
|
||||||
let locator = Locator::new(source);
|
|
||||||
let source_file = SourceFileBuilder::new("syntax_errors.py", source).finish();
|
let source_file = SourceFileBuilder::new("syntax_errors.py", source).finish();
|
||||||
parse_unchecked(source, ParseOptions::from(Mode::Module))
|
parse_unchecked(source, ParseOptions::from(Mode::Module))
|
||||||
.errors()
|
.errors()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|parse_error| {
|
.map(|parse_error| {
|
||||||
OldDiagnostic::from_parse_error(parse_error, &locator, source_file.clone())
|
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn create_diagnostics() -> Vec<OldDiagnostic> {
|
pub(super) fn create_diagnostics() -> Vec<Diagnostic> {
|
||||||
let fib = r#"import os
|
let fib = r#"import os
|
||||||
|
|
||||||
|
|
||||||
|
@ -496,7 +243,7 @@ def fibonacci(n):
|
||||||
let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
|
let fib_source = SourceFileBuilder::new("fib.py", fib).finish();
|
||||||
|
|
||||||
let unused_import_start = TextSize::from(7);
|
let unused_import_start = TextSize::from(7);
|
||||||
let unused_import = OldDiagnostic::lint(
|
let unused_import = create_lint_diagnostic(
|
||||||
"`os` imported but unused",
|
"`os` imported but unused",
|
||||||
Some("Remove unused import: `os`"),
|
Some("Remove unused import: `os`"),
|
||||||
TextRange::new(unused_import_start, TextSize::from(9)),
|
TextRange::new(unused_import_start, TextSize::from(9)),
|
||||||
|
@ -511,7 +258,7 @@ def fibonacci(n):
|
||||||
);
|
);
|
||||||
|
|
||||||
let unused_variable_start = TextSize::from(94);
|
let unused_variable_start = TextSize::from(94);
|
||||||
let unused_variable = OldDiagnostic::lint(
|
let unused_variable = create_lint_diagnostic(
|
||||||
"Local variable `x` is assigned to but never used",
|
"Local variable `x` is assigned to but never used",
|
||||||
Some("Remove assignment to unused variable `x`"),
|
Some("Remove assignment to unused variable `x`"),
|
||||||
TextRange::new(unused_variable_start, TextSize::from(95)),
|
TextRange::new(unused_variable_start, TextSize::from(95)),
|
||||||
|
@ -528,7 +275,7 @@ def fibonacci(n):
|
||||||
let file_2 = r"if a == 1: pass";
|
let file_2 = r"if a == 1: pass";
|
||||||
|
|
||||||
let undefined_name_start = TextSize::from(3);
|
let undefined_name_start = TextSize::from(3);
|
||||||
let undefined_name = OldDiagnostic::lint(
|
let undefined_name = create_lint_diagnostic(
|
||||||
"Undefined name `a`",
|
"Undefined name `a`",
|
||||||
Option::<&'static str>::None,
|
Option::<&'static str>::None,
|
||||||
TextRange::new(undefined_name_start, TextSize::from(4)),
|
TextRange::new(undefined_name_start, TextSize::from(4)),
|
||||||
|
@ -543,7 +290,7 @@ def fibonacci(n):
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn create_notebook_diagnostics()
|
pub(super) fn create_notebook_diagnostics()
|
||||||
-> (Vec<OldDiagnostic>, FxHashMap<String, NotebookIndex>) {
|
-> (Vec<Diagnostic>, FxHashMap<String, NotebookIndex>) {
|
||||||
let notebook = r"# cell 1
|
let notebook = r"# cell 1
|
||||||
import os
|
import os
|
||||||
# cell 2
|
# cell 2
|
||||||
|
@ -559,7 +306,7 @@ def foo():
|
||||||
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
|
let notebook_source = SourceFileBuilder::new("notebook.ipynb", notebook).finish();
|
||||||
|
|
||||||
let unused_import_os_start = TextSize::from(16);
|
let unused_import_os_start = TextSize::from(16);
|
||||||
let unused_import_os = OldDiagnostic::lint(
|
let unused_import_os = create_lint_diagnostic(
|
||||||
"`os` imported but unused",
|
"`os` imported but unused",
|
||||||
Some("Remove unused import: `os`"),
|
Some("Remove unused import: `os`"),
|
||||||
TextRange::new(unused_import_os_start, TextSize::from(18)),
|
TextRange::new(unused_import_os_start, TextSize::from(18)),
|
||||||
|
@ -574,7 +321,7 @@ def foo():
|
||||||
);
|
);
|
||||||
|
|
||||||
let unused_import_math_start = TextSize::from(35);
|
let unused_import_math_start = TextSize::from(35);
|
||||||
let unused_import_math = OldDiagnostic::lint(
|
let unused_import_math = create_lint_diagnostic(
|
||||||
"`math` imported but unused",
|
"`math` imported but unused",
|
||||||
Some("Remove unused import: `math`"),
|
Some("Remove unused import: `math`"),
|
||||||
TextRange::new(unused_import_math_start, TextSize::from(39)),
|
TextRange::new(unused_import_math_start, TextSize::from(39)),
|
||||||
|
@ -589,7 +336,7 @@ def foo():
|
||||||
);
|
);
|
||||||
|
|
||||||
let unused_variable_start = TextSize::from(98);
|
let unused_variable_start = TextSize::from(98);
|
||||||
let unused_variable = OldDiagnostic::lint(
|
let unused_variable = create_lint_diagnostic(
|
||||||
"Local variable `x` is assigned to but never used",
|
"Local variable `x` is assigned to but never used",
|
||||||
Some("Remove assignment to unused variable `x`"),
|
Some("Remove assignment to unused variable `x`"),
|
||||||
TextRange::new(unused_variable_start, TextSize::from(99)),
|
TextRange::new(unused_variable_start, TextSize::from(99)),
|
||||||
|
@ -642,7 +389,7 @@ def foo():
|
||||||
|
|
||||||
pub(super) fn capture_emitter_output(
|
pub(super) fn capture_emitter_output(
|
||||||
emitter: &mut dyn Emitter,
|
emitter: &mut dyn Emitter,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
) -> String {
|
) -> String {
|
||||||
let notebook_indexes = FxHashMap::default();
|
let notebook_indexes = FxHashMap::default();
|
||||||
let context = EmitterContext::new(¬ebook_indexes);
|
let context = EmitterContext::new(¬ebook_indexes);
|
||||||
|
@ -654,7 +401,7 @@ def foo():
|
||||||
|
|
||||||
pub(super) fn capture_emitter_notebook_output(
|
pub(super) fn capture_emitter_notebook_output(
|
||||||
emitter: &mut dyn Emitter,
|
emitter: &mut dyn Emitter,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
notebook_indexes: &FxHashMap<String, NotebookIndex>,
|
notebook_indexes: &FxHashMap<String, NotebookIndex>,
|
||||||
) -> String {
|
) -> String {
|
||||||
let context = EmitterContext::new(notebook_indexes);
|
let context = EmitterContext::new(notebook_indexes);
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::OneIndexed;
|
use ruff_source_file::OneIndexed;
|
||||||
|
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
|
|
||||||
/// Generate violations in Pylint format.
|
/// Generate violations in Pylint format.
|
||||||
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
/// See: [Flake8 documentation](https://flake8.pycqa.org/en/latest/internal/formatters.html#pylint-formatter)
|
||||||
|
@ -14,16 +15,17 @@ impl Emitter for PylintEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for diagnostic in diagnostics {
|
for diagnostic in diagnostics {
|
||||||
let row = if context.is_notebook(&diagnostic.filename()) {
|
let filename = diagnostic.expect_ruff_filename();
|
||||||
|
let row = if context.is_notebook(&filename) {
|
||||||
// We can't give a reasonable location for the structured formats,
|
// We can't give a reasonable location for the structured formats,
|
||||||
// so we show one that's clearly a fallback
|
// so we show one that's clearly a fallback
|
||||||
OneIndexed::from_zero_indexed(0)
|
OneIndexed::from_zero_indexed(0)
|
||||||
} else {
|
} else {
|
||||||
diagnostic.compute_start_location().line
|
diagnostic.expect_ruff_start_location().line
|
||||||
};
|
};
|
||||||
|
|
||||||
let body = if let Some(code) = diagnostic.secondary_code() {
|
let body = if let Some(code) = diagnostic.secondary_code() {
|
||||||
|
@ -35,7 +37,7 @@ impl Emitter for PylintEmitter {
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
"{path}:{row}: {body}",
|
"{path}:{row}: {body}",
|
||||||
path = relativize_path(&*diagnostic.filename()),
|
path = relativize_path(&filename),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,12 @@ use serde::ser::SerializeSeq;
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::SourceCode;
|
use ruff_source_file::SourceCode;
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
use crate::Edit;
|
use crate::Edit;
|
||||||
use crate::message::{Emitter, EmitterContext, LineColumn, OldDiagnostic};
|
use crate::message::{Emitter, EmitterContext, LineColumn};
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct RdjsonEmitter;
|
pub struct RdjsonEmitter;
|
||||||
|
@ -17,7 +18,7 @@ impl Emitter for RdjsonEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
_context: &EmitterContext,
|
_context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
serde_json::to_writer_pretty(
|
serde_json::to_writer_pretty(
|
||||||
|
@ -37,7 +38,7 @@ impl Emitter for RdjsonEmitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ExpandedMessages<'a> {
|
struct ExpandedMessages<'a> {
|
||||||
diagnostics: &'a [OldDiagnostic],
|
diagnostics: &'a [Diagnostic],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for ExpandedMessages<'_> {
|
impl Serialize for ExpandedMessages<'_> {
|
||||||
|
@ -56,18 +57,18 @@ impl Serialize for ExpandedMessages<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
|
fn message_to_rdjson_value(message: &Diagnostic) -> Value {
|
||||||
let source_file = message.source_file();
|
let source_file = message.expect_ruff_source_file();
|
||||||
let source_code = source_file.to_source_code();
|
let source_code = source_file.to_source_code();
|
||||||
|
|
||||||
let start_location = source_code.line_column(message.start());
|
let start_location = source_code.line_column(message.expect_range().start());
|
||||||
let end_location = source_code.line_column(message.end());
|
let end_location = source_code.line_column(message.expect_range().end());
|
||||||
|
|
||||||
if let Some(fix) = message.fix() {
|
if let Some(fix) = message.fix() {
|
||||||
json!({
|
json!({
|
||||||
"message": message.body(),
|
"message": message.body(),
|
||||||
"location": {
|
"location": {
|
||||||
"path": message.filename(),
|
"path": message.expect_ruff_filename(),
|
||||||
"range": rdjson_range(start_location, end_location),
|
"range": rdjson_range(start_location, end_location),
|
||||||
},
|
},
|
||||||
"code": {
|
"code": {
|
||||||
|
@ -80,7 +81,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
|
||||||
json!({
|
json!({
|
||||||
"message": message.body(),
|
"message": message.body(),
|
||||||
"location": {
|
"location": {
|
||||||
"path": message.filename(),
|
"path": message.expect_ruff_filename(),
|
||||||
"range": rdjson_range(start_location, end_location),
|
"range": rdjson_range(start_location, end_location),
|
||||||
},
|
},
|
||||||
"code": {
|
"code": {
|
||||||
|
|
|
@ -5,11 +5,12 @@ use anyhow::Result;
|
||||||
use serde::{Serialize, Serializer};
|
use serde::{Serialize, Serializer};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||||
use ruff_source_file::OneIndexed;
|
use ruff_source_file::OneIndexed;
|
||||||
|
|
||||||
use crate::VERSION;
|
use crate::VERSION;
|
||||||
use crate::fs::normalize_path;
|
use crate::fs::normalize_path;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
use crate::registry::{Linter, RuleNamespace};
|
use crate::registry::{Linter, RuleNamespace};
|
||||||
|
|
||||||
pub struct SarifEmitter;
|
pub struct SarifEmitter;
|
||||||
|
@ -18,7 +19,7 @@ impl Emitter for SarifEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
_context: &EmitterContext,
|
_context: &EmitterContext,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let results = diagnostics
|
let results = diagnostics
|
||||||
|
@ -122,10 +123,10 @@ struct SarifResult<'a> {
|
||||||
|
|
||||||
impl<'a> SarifResult<'a> {
|
impl<'a> SarifResult<'a> {
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||||
let start_location = message.compute_start_location();
|
let start_location = message.expect_ruff_start_location();
|
||||||
let end_location = message.compute_end_location();
|
let end_location = message.expect_ruff_end_location();
|
||||||
let path = normalize_path(&*message.filename());
|
let path = normalize_path(&*message.expect_ruff_filename());
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
code: message.secondary_code(),
|
code: message.secondary_code(),
|
||||||
level: "error".to_string(),
|
level: "error".to_string(),
|
||||||
|
@ -142,10 +143,10 @@ impl<'a> SarifResult<'a> {
|
||||||
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(target_arch = "wasm32")]
|
||||||
#[expect(clippy::unnecessary_wraps)]
|
#[expect(clippy::unnecessary_wraps)]
|
||||||
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
|
fn from_message(message: &'a Diagnostic) -> Result<Self> {
|
||||||
let start_location = message.compute_start_location();
|
let start_location = message.expect_ruff_start_location();
|
||||||
let end_location = message.compute_end_location();
|
let end_location = message.expect_ruff_end_location();
|
||||||
let path = normalize_path(&*message.filename());
|
let path = normalize_path(&*message.expect_ruff_filename());
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
code: message.secondary_code(),
|
code: message.secondary_code(),
|
||||||
level: "error".to_string(),
|
level: "error".to_string(),
|
||||||
|
|
|
@ -6,15 +6,16 @@ use bitflags::bitflags;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||||
use ruff_notebook::NotebookIndex;
|
use ruff_notebook::NotebookIndex;
|
||||||
use ruff_source_file::{LineColumn, OneIndexed};
|
use ruff_source_file::{LineColumn, OneIndexed};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::Locator;
|
use crate::Locator;
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
use crate::line_width::{IndentWidth, LineWidthBuilder};
|
||||||
use crate::message::diff::Diff;
|
use crate::message::diff::Diff;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
|
use crate::message::{Emitter, EmitterContext};
|
||||||
use crate::settings::types::UnsafeFixes;
|
use crate::settings::types::UnsafeFixes;
|
||||||
|
|
||||||
bitflags! {
|
bitflags! {
|
||||||
|
@ -66,19 +67,20 @@ impl Emitter for TextEmitter {
|
||||||
fn emit(
|
fn emit(
|
||||||
&mut self,
|
&mut self,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
context: &EmitterContext,
|
context: &EmitterContext,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
for message in diagnostics {
|
for message in diagnostics {
|
||||||
|
let filename = message.expect_ruff_filename();
|
||||||
write!(
|
write!(
|
||||||
writer,
|
writer,
|
||||||
"{path}{sep}",
|
"{path}{sep}",
|
||||||
path = relativize_path(&*message.filename()).bold(),
|
path = relativize_path(&filename).bold(),
|
||||||
sep = ":".cyan(),
|
sep = ":".cyan(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let start_location = message.compute_start_location();
|
let start_location = message.expect_ruff_start_location();
|
||||||
let notebook_index = context.notebook_index(&message.filename());
|
let notebook_index = context.notebook_index(&filename);
|
||||||
|
|
||||||
// Check if we're working on a jupyter notebook and translate positions with cell accordingly
|
// Check if we're working on a jupyter notebook and translate positions with cell accordingly
|
||||||
let diagnostic_location = if let Some(notebook_index) = notebook_index {
|
let diagnostic_location = if let Some(notebook_index) = notebook_index {
|
||||||
|
@ -116,7 +118,7 @@ impl Emitter for TextEmitter {
|
||||||
|
|
||||||
if self.flags.intersects(EmitterFlags::SHOW_SOURCE) {
|
if self.flags.intersects(EmitterFlags::SHOW_SOURCE) {
|
||||||
// The `0..0` range is used to highlight file-level diagnostics.
|
// The `0..0` range is used to highlight file-level diagnostics.
|
||||||
if message.range() != TextRange::default() {
|
if message.expect_range() != TextRange::default() {
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
"{}",
|
"{}",
|
||||||
|
@ -140,7 +142,7 @@ impl Emitter for TextEmitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct RuleCodeAndBody<'a> {
|
pub(super) struct RuleCodeAndBody<'a> {
|
||||||
pub(crate) message: &'a OldDiagnostic,
|
pub(crate) message: &'a Diagnostic,
|
||||||
pub(crate) show_fix_status: bool,
|
pub(crate) show_fix_status: bool,
|
||||||
pub(crate) unsafe_fixes: UnsafeFixes,
|
pub(crate) unsafe_fixes: UnsafeFixes,
|
||||||
}
|
}
|
||||||
|
@ -178,7 +180,7 @@ impl Display for RuleCodeAndBody<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct MessageCodeFrame<'a> {
|
pub(super) struct MessageCodeFrame<'a> {
|
||||||
pub(crate) message: &'a OldDiagnostic,
|
pub(crate) message: &'a Diagnostic,
|
||||||
pub(crate) notebook_index: Option<&'a NotebookIndex>,
|
pub(crate) notebook_index: Option<&'a NotebookIndex>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,10 +193,10 @@ impl Display for MessageCodeFrame<'_> {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
let source_file = self.message.source_file();
|
let source_file = self.message.expect_ruff_source_file();
|
||||||
let source_code = source_file.to_source_code();
|
let source_code = source_file.to_source_code();
|
||||||
|
|
||||||
let content_start_index = source_code.line_index(self.message.start());
|
let content_start_index = source_code.line_index(self.message.expect_range().start());
|
||||||
let mut start_index = content_start_index.saturating_sub(2);
|
let mut start_index = content_start_index.saturating_sub(2);
|
||||||
|
|
||||||
// If we're working with a Jupyter Notebook, skip the lines which are
|
// If we're working with a Jupyter Notebook, skip the lines which are
|
||||||
|
@ -217,7 +219,7 @@ impl Display for MessageCodeFrame<'_> {
|
||||||
start_index = start_index.saturating_add(1);
|
start_index = start_index.saturating_add(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
let content_end_index = source_code.line_index(self.message.end());
|
let content_end_index = source_code.line_index(self.message.expect_range().end());
|
||||||
let mut end_index = content_end_index
|
let mut end_index = content_end_index
|
||||||
.saturating_add(2)
|
.saturating_add(2)
|
||||||
.min(OneIndexed::from_zero_indexed(source_code.line_count()));
|
.min(OneIndexed::from_zero_indexed(source_code.line_count()));
|
||||||
|
@ -248,7 +250,7 @@ impl Display for MessageCodeFrame<'_> {
|
||||||
|
|
||||||
let source = replace_whitespace_and_unprintable(
|
let source = replace_whitespace_and_unprintable(
|
||||||
source_code.slice(TextRange::new(start_offset, end_offset)),
|
source_code.slice(TextRange::new(start_offset, end_offset)),
|
||||||
self.message.range() - start_offset,
|
self.message.expect_range() - start_offset,
|
||||||
)
|
)
|
||||||
.fix_up_empty_spans_after_line_terminator();
|
.fix_up_empty_spans_after_line_terminator();
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ use anyhow::Result;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, SecondaryCode};
|
||||||
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
|
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
|
||||||
use ruff_source_file::{LineEnding, LineRanges};
|
use ruff_source_file::{LineEnding, LineRanges};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
@ -17,7 +18,6 @@ use rustc_hash::FxHashSet;
|
||||||
use crate::Edit;
|
use crate::Edit;
|
||||||
use crate::Locator;
|
use crate::Locator;
|
||||||
use crate::fs::relativize_path;
|
use crate::fs::relativize_path;
|
||||||
use crate::message::{OldDiagnostic, SecondaryCode};
|
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
use crate::rule_redirects::get_redirect_target;
|
use crate::rule_redirects::get_redirect_target;
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ use crate::rule_redirects::get_redirect_target;
|
||||||
/// simultaneously.
|
/// simultaneously.
|
||||||
pub fn generate_noqa_edits(
|
pub fn generate_noqa_edits(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
comment_ranges: &CommentRanges,
|
comment_ranges: &CommentRanges,
|
||||||
external: &[String],
|
external: &[String],
|
||||||
|
@ -717,7 +717,7 @@ impl Error for LexicalError {}
|
||||||
/// Adds noqa comments to suppress all messages of a file.
|
/// Adds noqa comments to suppress all messages of a file.
|
||||||
pub(crate) fn add_noqa(
|
pub(crate) fn add_noqa(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
comment_ranges: &CommentRanges,
|
comment_ranges: &CommentRanges,
|
||||||
external: &[String],
|
external: &[String],
|
||||||
|
@ -740,7 +740,7 @@ pub(crate) fn add_noqa(
|
||||||
|
|
||||||
fn add_noqa_inner(
|
fn add_noqa_inner(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
comment_ranges: &CommentRanges,
|
comment_ranges: &CommentRanges,
|
||||||
external: &[String],
|
external: &[String],
|
||||||
|
@ -845,7 +845,7 @@ struct NoqaComment<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_noqa_comments<'a>(
|
fn find_noqa_comments<'a>(
|
||||||
diagnostics: &'a [OldDiagnostic],
|
diagnostics: &'a [Diagnostic],
|
||||||
locator: &'a Locator,
|
locator: &'a Locator,
|
||||||
exemption: &'a FileExemption,
|
exemption: &'a FileExemption,
|
||||||
directives: &'a NoqaDirectives,
|
directives: &'a NoqaDirectives,
|
||||||
|
@ -867,7 +867,7 @@ fn find_noqa_comments<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Is the violation ignored by a `noqa` directive on the parent line?
|
// Is the violation ignored by a `noqa` directive on the parent line?
|
||||||
if let Some(parent) = message.parent {
|
if let Some(parent) = message.parent() {
|
||||||
if let Some(directive_line) =
|
if let Some(directive_line) =
|
||||||
directives.find_line_with_directive(noqa_line_for.resolve(parent))
|
directives.find_line_with_directive(noqa_line_for.resolve(parent))
|
||||||
{
|
{
|
||||||
|
@ -886,7 +886,7 @@ fn find_noqa_comments<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let noqa_offset = noqa_line_for.resolve(message.range().start());
|
let noqa_offset = noqa_line_for.resolve(message.expect_range().start());
|
||||||
|
|
||||||
// Or ignored by the directive itself?
|
// Or ignored by the directive itself?
|
||||||
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
if let Some(directive_line) = directives.find_line_with_directive(noqa_offset) {
|
||||||
|
@ -1225,6 +1225,8 @@ mod tests {
|
||||||
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
||||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
|
use crate::Edit;
|
||||||
|
use crate::message::diagnostic_from_violation;
|
||||||
use crate::noqa::{
|
use crate::noqa::{
|
||||||
Directive, LexicalError, NoqaLexerOutput, NoqaMapping, add_noqa_inner, lex_codes,
|
Directive, LexicalError, NoqaLexerOutput, NoqaMapping, add_noqa_inner, lex_codes,
|
||||||
lex_file_exemption, lex_inline_noqa,
|
lex_file_exemption, lex_inline_noqa,
|
||||||
|
@ -1232,7 +1234,6 @@ mod tests {
|
||||||
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
|
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
|
||||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||||
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
||||||
use crate::{Edit, OldDiagnostic};
|
|
||||||
use crate::{Locator, generate_noqa_edits};
|
use crate::{Locator, generate_noqa_edits};
|
||||||
|
|
||||||
fn assert_lexed_ranges_match_slices(
|
fn assert_lexed_ranges_match_slices(
|
||||||
|
@ -2831,7 +2832,7 @@ mod tests {
|
||||||
assert_eq!(output, format!("{contents}"));
|
assert_eq!(output, format!("{contents}"));
|
||||||
|
|
||||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||||
let messages = [OldDiagnostic::new(
|
let messages = [diagnostic_from_violation(
|
||||||
UnusedVariable {
|
UnusedVariable {
|
||||||
name: "x".to_string(),
|
name: "x".to_string(),
|
||||||
},
|
},
|
||||||
|
@ -2855,12 +2856,12 @@ mod tests {
|
||||||
|
|
||||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||||
let messages = [
|
let messages = [
|
||||||
OldDiagnostic::new(
|
diagnostic_from_violation(
|
||||||
AmbiguousVariableName("x".to_string()),
|
AmbiguousVariableName("x".to_string()),
|
||||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||||
&source_file,
|
&source_file,
|
||||||
),
|
),
|
||||||
OldDiagnostic::new(
|
diagnostic_from_violation(
|
||||||
UnusedVariable {
|
UnusedVariable {
|
||||||
name: "x".to_string(),
|
name: "x".to_string(),
|
||||||
},
|
},
|
||||||
|
@ -2886,12 +2887,12 @@ mod tests {
|
||||||
|
|
||||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||||
let messages = [
|
let messages = [
|
||||||
OldDiagnostic::new(
|
diagnostic_from_violation(
|
||||||
AmbiguousVariableName("x".to_string()),
|
AmbiguousVariableName("x".to_string()),
|
||||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||||
&source_file,
|
&source_file,
|
||||||
),
|
),
|
||||||
OldDiagnostic::new(
|
diagnostic_from_violation(
|
||||||
UnusedVariable {
|
UnusedVariable {
|
||||||
name: "x".to_string(),
|
name: "x".to_string(),
|
||||||
},
|
},
|
||||||
|
@ -2930,7 +2931,7 @@ print(
|
||||||
"#;
|
"#;
|
||||||
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||||
let messages = [OldDiagnostic::new(
|
let messages = [diagnostic_from_violation(
|
||||||
PrintfStringFormatting,
|
PrintfStringFormatting,
|
||||||
TextRange::new(12.into(), 79.into()),
|
TextRange::new(12.into(), 79.into()),
|
||||||
&source_file,
|
&source_file,
|
||||||
|
@ -2963,7 +2964,7 @@ foo;
|
||||||
bar =
|
bar =
|
||||||
";
|
";
|
||||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||||
let messages = [OldDiagnostic::new(
|
let messages = [diagnostic_from_violation(
|
||||||
UselessSemicolon,
|
UselessSemicolon,
|
||||||
TextRange::new(4.into(), 5.into()),
|
TextRange::new(4.into(), 5.into()),
|
||||||
&source_file,
|
&source_file,
|
||||||
|
|
|
@ -3,19 +3,17 @@ use log::warn;
|
||||||
use pyproject_toml::PyProjectToml;
|
use pyproject_toml::PyProjectToml;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_source_file::SourceFile;
|
use ruff_source_file::SourceFile;
|
||||||
|
|
||||||
use crate::IOError;
|
use crate::IOError;
|
||||||
use crate::OldDiagnostic;
|
use crate::message::diagnostic_from_violation;
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
use crate::rules::ruff::rules::InvalidPyprojectToml;
|
use crate::rules::ruff::rules::InvalidPyprojectToml;
|
||||||
use crate::settings::LinterSettings;
|
use crate::settings::LinterSettings;
|
||||||
|
|
||||||
/// RUF200
|
/// RUF200
|
||||||
pub fn lint_pyproject_toml(
|
pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||||
source_file: &SourceFile,
|
|
||||||
settings: &LinterSettings,
|
|
||||||
) -> Vec<OldDiagnostic> {
|
|
||||||
let Some(err) = toml::from_str::<PyProjectToml>(source_file.source_text()).err() else {
|
let Some(err) = toml::from_str::<PyProjectToml>(source_file.source_text()).err() else {
|
||||||
return Vec::default();
|
return Vec::default();
|
||||||
};
|
};
|
||||||
|
@ -32,8 +30,11 @@ pub fn lint_pyproject_toml(
|
||||||
source_file.name(),
|
source_file.name(),
|
||||||
);
|
);
|
||||||
if settings.rules.enabled(Rule::IOError) {
|
if settings.rules.enabled(Rule::IOError) {
|
||||||
let diagnostic =
|
let diagnostic = diagnostic_from_violation(
|
||||||
OldDiagnostic::new(IOError { message }, TextRange::default(), source_file);
|
IOError { message },
|
||||||
|
TextRange::default(),
|
||||||
|
source_file,
|
||||||
|
);
|
||||||
messages.push(diagnostic);
|
messages.push(diagnostic);
|
||||||
} else {
|
} else {
|
||||||
warn!(
|
warn!(
|
||||||
|
@ -55,7 +56,7 @@ pub fn lint_pyproject_toml(
|
||||||
|
|
||||||
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
|
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
|
||||||
let toml_err = err.message().to_string();
|
let toml_err = err.message().to_string();
|
||||||
let diagnostic = OldDiagnostic::new(
|
let diagnostic = diagnostic_from_violation(
|
||||||
InvalidPyprojectToml { message: toml_err },
|
InvalidPyprojectToml { message: toml_err },
|
||||||
range,
|
range,
|
||||||
source_file,
|
source_file,
|
||||||
|
|
|
@ -355,7 +355,7 @@ fn check_token(
|
||||||
if let Some(mut diagnostic) =
|
if let Some(mut diagnostic) =
|
||||||
lint_context.report_diagnostic_if_enabled(ProhibitedTrailingComma, prev.range())
|
lint_context.report_diagnostic_if_enabled(ProhibitedTrailingComma, prev.range())
|
||||||
{
|
{
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ COM81_syntax_error.py:3:5: SyntaxError: Starred expression cannot be used here
|
||||||
1 | # Check for `flake8-commas` violation for a file containing syntax errors.
|
1 | # Check for `flake8-commas` violation for a file containing syntax errors.
|
||||||
2 | (
|
2 | (
|
||||||
3 | *args
|
3 | *args
|
||||||
| ^
|
| ^^^^^
|
||||||
4 | )
|
4 | )
|
||||||
|
|
|
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ ISC_syntax_error.py:2:5: SyntaxError: missing closing quote in string literal
|
||||||
|
|
|
|
||||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||||
2 | "a" "b
|
2 | "a" "b
|
||||||
| ^
|
| ^^
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
|
|
|
|
||||||
|
@ -36,7 +36,7 @@ ISC_syntax_error.py:3:9: SyntaxError: missing closing quote in string literal
|
||||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||||
2 | "a" "b
|
2 | "a" "b
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
| ^
|
| ^^
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
5 | c""" "d
|
5 | c""" "d
|
||||||
|
|
|
|
||||||
|
@ -68,7 +68,7 @@ ISC_syntax_error.py:5:6: SyntaxError: missing closing quote in string literal
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
5 | c""" "d
|
5 | c""" "d
|
||||||
| ^
|
| ^^
|
||||||
6 |
|
6 |
|
||||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||||
|
|
|
|
||||||
|
@ -153,19 +153,21 @@ ISC_syntax_error.py:16:5: SyntaxError: missing closing quote in string literal
|
||||||
14 | (
|
14 | (
|
||||||
15 | "a"
|
15 | "a"
|
||||||
16 | "b
|
16 | "b
|
||||||
| ^
|
| ^^
|
||||||
17 | "c"
|
17 | "c"
|
||||||
18 | "d"
|
18 | "d"
|
||||||
|
|
|
|
||||||
|
|
||||||
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
||||||
|
|
|
|
||||||
24 | (
|
24 | (
|
||||||
25 | """abc"""
|
25 | """abc"""
|
||||||
26 | f"""def
|
26 | f"""def
|
||||||
| ^
|
| _________^
|
||||||
27 | "g" "h"
|
27 | | "g" "h"
|
||||||
28 | "i" "j"
|
28 | | "i" "j"
|
||||||
|
29 | | )
|
||||||
|
| |__^
|
||||||
|
|
|
|
||||||
|
|
||||||
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
||||||
|
|
|
@ -5,7 +5,7 @@ ISC_syntax_error.py:2:5: SyntaxError: missing closing quote in string literal
|
||||||
|
|
|
|
||||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||||
2 | "a" "b
|
2 | "a" "b
|
||||||
| ^
|
| ^^
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
|
|
|
|
||||||
|
@ -25,7 +25,7 @@ ISC_syntax_error.py:3:9: SyntaxError: missing closing quote in string literal
|
||||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||||
2 | "a" "b
|
2 | "a" "b
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
| ^
|
| ^^
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
5 | c""" "d
|
5 | c""" "d
|
||||||
|
|
|
|
||||||
|
@ -45,7 +45,7 @@ ISC_syntax_error.py:5:6: SyntaxError: missing closing quote in string literal
|
||||||
3 | "a" "b" "c
|
3 | "a" "b" "c
|
||||||
4 | "a" """b
|
4 | "a" """b
|
||||||
5 | c""" "d
|
5 | c""" "d
|
||||||
| ^
|
| ^^
|
||||||
6 |
|
6 |
|
||||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||||
|
|
|
|
||||||
|
@ -107,19 +107,21 @@ ISC_syntax_error.py:16:5: SyntaxError: missing closing quote in string literal
|
||||||
14 | (
|
14 | (
|
||||||
15 | "a"
|
15 | "a"
|
||||||
16 | "b
|
16 | "b
|
||||||
| ^
|
| ^^
|
||||||
17 | "c"
|
17 | "c"
|
||||||
18 | "d"
|
18 | "d"
|
||||||
|
|
|
|
||||||
|
|
||||||
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
ISC_syntax_error.py:26:9: SyntaxError: f-string: unterminated triple-quoted string
|
||||||
|
|
|
|
||||||
24 | (
|
24 | (
|
||||||
25 | """abc"""
|
25 | """abc"""
|
||||||
26 | f"""def
|
26 | f"""def
|
||||||
| ^
|
| _________^
|
||||||
27 | "g" "h"
|
27 | | "g" "h"
|
||||||
28 | "i" "j"
|
28 | | "i" "j"
|
||||||
|
29 | | )
|
||||||
|
| |__^
|
||||||
|
|
|
|
||||||
|
|
||||||
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
ISC_syntax_error.py:30:1: SyntaxError: unexpected EOF while parsing
|
||||||
|
|
|
@ -290,7 +290,6 @@ mod tests {
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
|
||||||
use ruff_python_semantic::{MemberNameImport, ModuleNameImport, NameImport};
|
use ruff_python_semantic::{MemberNameImport, ModuleNameImport, NameImport};
|
||||||
use ruff_text_size::Ranged;
|
|
||||||
|
|
||||||
use crate::assert_diagnostics;
|
use crate::assert_diagnostics;
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
|
@ -658,7 +657,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -686,7 +685,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -716,7 +715,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -744,7 +743,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -766,7 +765,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -786,7 +785,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1130,7 +1129,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1155,7 +1154,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1177,7 +1176,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1198,7 +1197,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(&*snapshot, diagnostics);
|
assert_diagnostics!(&*snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1217,7 +1216,7 @@ mod tests {
|
||||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
diagnostics.sort_by_key(Ranged::start);
|
diagnostics.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
assert_diagnostics!(snapshot, diagnostics);
|
assert_diagnostics!(snapshot, diagnostics);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,7 +168,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
WhitespaceAfterOpenBracket { symbol },
|
WhitespaceAfterOpenBracket { symbol },
|
||||||
TextRange::at(token.end(), trailing_len),
|
TextRange::at(token.end(), trailing_len),
|
||||||
) {
|
) {
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
WhitespaceBeforeCloseBracket { symbol },
|
WhitespaceBeforeCloseBracket { symbol },
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
) {
|
) {
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -210,7 +210,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic
|
diagnostic
|
||||||
.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||||
}
|
}
|
||||||
|
@ -227,7 +227,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(
|
diagnostic.set_fix(Fix::safe_edit(
|
||||||
Edit::range_deletion(range),
|
Edit::range_deletion(range),
|
||||||
));
|
));
|
||||||
|
@ -255,7 +255,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edits(
|
diagnostic.set_fix(Fix::safe_edits(
|
||||||
Edit::range_deletion(range),
|
Edit::range_deletion(range),
|
||||||
[Edit::insertion(
|
[Edit::insertion(
|
||||||
|
@ -278,7 +278,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(
|
diagnostic.set_fix(Fix::safe_edit(
|
||||||
Edit::range_deletion(range),
|
Edit::range_deletion(range),
|
||||||
));
|
));
|
||||||
|
@ -297,7 +297,7 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &LintContext) {
|
||||||
WhitespaceBeforePunctuation { symbol },
|
WhitespaceBeforePunctuation { symbol },
|
||||||
TextRange::at(token.start() - offset, offset),
|
TextRange::at(token.start() - offset, offset),
|
||||||
) {
|
) {
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
E11.py:3:1: E111 Indentation is not a multiple of 4
|
E11.py:3:1: E111 Indentation is not a multiple of 4
|
||||||
|
|
|
|
||||||
|
@ -27,7 +26,7 @@ E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -37,7 +36,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -57,7 +56,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -26,7 +26,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -56,7 +56,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
|
|
|
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -27,7 +26,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -47,7 +46,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
|
|
|
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -17,7 +16,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -47,7 +46,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -16,7 +16,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -96,7 +96,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -1,13 +1,12 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
|
|
|
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -17,7 +16,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -77,7 +76,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
E11.py:6:1: E117 Over-indented
|
E11.py:6:1: E117 Over-indented
|
||||||
|
|
|
|
||||||
|
@ -17,7 +16,7 @@ E11.py:9:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
7 | #: E112
|
7 | #: E112
|
||||||
8 | if False:
|
8 | if False:
|
||||||
9 | print()
|
9 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
|
|
|
|
||||||
|
@ -27,7 +26,7 @@ E11.py:12:1: SyntaxError: Unexpected indentation
|
||||||
10 | #: E113
|
10 | #: E113
|
||||||
11 | print()
|
11 | print()
|
||||||
12 | print()
|
12 | print()
|
||||||
| ^
|
| ^^^^
|
||||||
13 | #: E114 E116
|
13 | #: E114 E116
|
||||||
14 | mimetype = 'application/x-directory'
|
14 | mimetype = 'application/x-directory'
|
||||||
|
|
|
|
||||||
|
@ -67,7 +66,7 @@ E11.py:45:1: SyntaxError: Expected an indented block after `if` statement
|
||||||
43 | #: E112
|
43 | #: E112
|
||||||
44 | if False: #
|
44 | if False: #
|
||||||
45 | print()
|
45 | print()
|
||||||
| ^
|
| ^^^^^
|
||||||
46 | #:
|
46 | #:
|
||||||
47 | if False:
|
47 | if False:
|
||||||
|
|
|
|
||||||
|
|
Binary file not shown.
|
@ -11,6 +11,7 @@ mod tests {
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_python_parser::ParseOptions;
|
use ruff_python_parser::ParseOptions;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use test_case::test_case;
|
use test_case::test_case;
|
||||||
|
@ -19,7 +20,6 @@ mod tests {
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_trivia::textwrap::dedent;
|
use ruff_python_trivia::textwrap::dedent;
|
||||||
use ruff_text_size::Ranged;
|
|
||||||
|
|
||||||
use crate::linter::check_path;
|
use crate::linter::check_path;
|
||||||
use crate::registry::{Linter, Rule};
|
use crate::registry::{Linter, Rule};
|
||||||
|
@ -29,7 +29,7 @@ mod tests {
|
||||||
use crate::settings::{LinterSettings, flags};
|
use crate::settings::{LinterSettings, flags};
|
||||||
use crate::source_kind::SourceKind;
|
use crate::source_kind::SourceKind;
|
||||||
use crate::test::{test_contents, test_path, test_snippet};
|
use crate::test::{test_contents, test_path, test_snippet};
|
||||||
use crate::{Locator, OldDiagnostic, assert_diagnostics, directives};
|
use crate::{Locator, assert_diagnostics, directives};
|
||||||
|
|
||||||
#[test_case(Rule::UnusedImport, Path::new("F401_0.py"))]
|
#[test_case(Rule::UnusedImport, Path::new("F401_0.py"))]
|
||||||
#[test_case(Rule::UnusedImport, Path::new("F401_1.py"))]
|
#[test_case(Rule::UnusedImport, Path::new("F401_1.py"))]
|
||||||
|
@ -771,11 +771,11 @@ mod tests {
|
||||||
&parsed,
|
&parsed,
|
||||||
target_version,
|
target_version,
|
||||||
);
|
);
|
||||||
messages.sort_by_key(Ranged::start);
|
messages.sort_by_key(|diagnostic| diagnostic.expect_range().start());
|
||||||
let actual = messages
|
let actual = messages
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|msg| !msg.is_syntax_error())
|
.filter(|msg| !msg.is_syntax_error())
|
||||||
.map(OldDiagnostic::name)
|
.map(Diagnostic::name)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
|
let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
|
||||||
assert_eq!(actual, expected);
|
assert_eq!(actual, expected);
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
invalid_characters_syntax_error.py:5:6: PLE2510 Invalid unescaped character backspace, use "\b" instead
|
invalid_characters_syntax_error.py:5:6: PLE2510 Invalid unescaped character backspace, use "\b" instead
|
||||||
|
|
|
|
||||||
|
@ -17,7 +16,7 @@ invalid_characters_syntax_error.py:7:5: SyntaxError: missing closing quote in st
|
||||||
5 | b = '␈'
|
5 | b = '␈'
|
||||||
6 | # Unterminated string
|
6 | # Unterminated string
|
||||||
7 | b = '␈
|
7 | b = '␈
|
||||||
| ^
|
| ^^
|
||||||
8 | b = '␈'
|
8 | b = '␈'
|
||||||
9 | # Unterminated f-string
|
9 | # Unterminated f-string
|
||||||
|
|
|
|
||||||
|
@ -99,7 +98,7 @@ invalid_characters_syntax_error.py:13:14: SyntaxError: missing closing quote in
|
||||||
11 | b = f'␈'
|
11 | b = f'␈'
|
||||||
12 | # Implicitly concatenated
|
12 | # Implicitly concatenated
|
||||||
13 | b = '␈' f'␈' '␈
|
13 | b = '␈' f'␈' '␈
|
||||||
| ^
|
| ^^
|
||||||
|
|
|
|
||||||
|
|
||||||
invalid_characters_syntax_error.py:13:16: SyntaxError: Expected a statement
|
invalid_characters_syntax_error.py:13:16: SyntaxError: Expected a statement
|
||||||
|
|
|
@ -128,7 +128,7 @@ pub(crate) fn post_init_default(checker: &Checker, function_def: &ast::StmtFunct
|
||||||
// Need to stop fixes as soon as there is a parameter we cannot fix.
|
// Need to stop fixes as soon as there is a parameter we cannot fix.
|
||||||
// Otherwise, we risk a syntax error (a parameter without a default
|
// Otherwise, we risk a syntax error (a parameter without a default
|
||||||
// following parameter with a default).
|
// following parameter with a default).
|
||||||
stopped_fixes |= diagnostic.fix.is_none();
|
stopped_fixes |= diagnostic.fix().is_none();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,9 +7,9 @@ use std::path::Path;
|
||||||
#[cfg(not(fuzzing))]
|
#[cfg(not(fuzzing))]
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ruff_text_size::Ranged;
|
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
|
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_notebook::Notebook;
|
use ruff_notebook::Notebook;
|
||||||
#[cfg(not(fuzzing))]
|
#[cfg(not(fuzzing))]
|
||||||
use ruff_notebook::NotebookError;
|
use ruff_notebook::NotebookError;
|
||||||
|
@ -23,7 +23,7 @@ use ruff_source_file::SourceFileBuilder;
|
||||||
use crate::codes::Rule;
|
use crate::codes::Rule;
|
||||||
use crate::fix::{FixResult, fix_file};
|
use crate::fix::{FixResult, fix_file};
|
||||||
use crate::linter::check_path;
|
use crate::linter::check_path;
|
||||||
use crate::message::{Emitter, EmitterContext, OldDiagnostic, TextEmitter};
|
use crate::message::{Emitter, EmitterContext, TextEmitter, create_syntax_error_diagnostic};
|
||||||
use crate::package::PackageRoot;
|
use crate::package::PackageRoot;
|
||||||
use crate::packaging::detect_package_root;
|
use crate::packaging::detect_package_root;
|
||||||
use crate::settings::types::UnsafeFixes;
|
use crate::settings::types::UnsafeFixes;
|
||||||
|
@ -42,7 +42,7 @@ pub(crate) fn test_resource_path(path: impl AsRef<Path>) -> std::path::PathBuf {
|
||||||
pub(crate) fn test_path(
|
pub(crate) fn test_path(
|
||||||
path: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
settings: &LinterSettings,
|
settings: &LinterSettings,
|
||||||
) -> Result<Vec<OldDiagnostic>> {
|
) -> Result<Vec<Diagnostic>> {
|
||||||
let path = test_resource_path("fixtures").join(path);
|
let path = test_resource_path("fixtures").join(path);
|
||||||
let source_type = PySourceType::from(&path);
|
let source_type = PySourceType::from(&path);
|
||||||
let source_kind = SourceKind::from_path(path.as_ref(), source_type)?.expect("valid source");
|
let source_kind = SourceKind::from_path(path.as_ref(), source_type)?.expect("valid source");
|
||||||
|
@ -51,7 +51,7 @@ pub(crate) fn test_path(
|
||||||
|
|
||||||
#[cfg(not(fuzzing))]
|
#[cfg(not(fuzzing))]
|
||||||
pub(crate) struct TestedNotebook {
|
pub(crate) struct TestedNotebook {
|
||||||
pub(crate) diagnostics: Vec<OldDiagnostic>,
|
pub(crate) diagnostics: Vec<Diagnostic>,
|
||||||
pub(crate) source_notebook: Notebook,
|
pub(crate) source_notebook: Notebook,
|
||||||
pub(crate) linted_notebook: Notebook,
|
pub(crate) linted_notebook: Notebook,
|
||||||
}
|
}
|
||||||
|
@ -87,7 +87,7 @@ pub(crate) fn assert_notebook_path(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run [`check_path`] on a snippet of Python code.
|
/// Run [`check_path`] on a snippet of Python code.
|
||||||
pub fn test_snippet(contents: &str, settings: &LinterSettings) -> Vec<OldDiagnostic> {
|
pub fn test_snippet(contents: &str, settings: &LinterSettings) -> Vec<Diagnostic> {
|
||||||
let path = Path::new("<filename>");
|
let path = Path::new("<filename>");
|
||||||
let contents = dedent(contents);
|
let contents = dedent(contents);
|
||||||
test_contents(&SourceKind::Python(contents.into_owned()), path, settings).0
|
test_contents(&SourceKind::Python(contents.into_owned()), path, settings).0
|
||||||
|
@ -111,7 +111,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
source_kind: &'a SourceKind,
|
source_kind: &'a SourceKind,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
settings: &LinterSettings,
|
settings: &LinterSettings,
|
||||||
) -> (Vec<OldDiagnostic>, Cow<'a, SourceKind>) {
|
) -> (Vec<Diagnostic>, Cow<'a, SourceKind>) {
|
||||||
let source_type = PySourceType::from(path);
|
let source_type = PySourceType::from(path);
|
||||||
let target_version = settings.resolve_target_version(path);
|
let target_version = settings.resolve_target_version(path);
|
||||||
let options =
|
let options =
|
||||||
|
@ -211,8 +211,7 @@ pub(crate) fn test_contents<'a>(
|
||||||
if parsed.has_invalid_syntax() && !source_has_errors {
|
if parsed.has_invalid_syntax() && !source_has_errors {
|
||||||
// Previous fix introduced a syntax error, abort
|
// Previous fix introduced a syntax error, abort
|
||||||
let fixes = print_diagnostics(messages, path, source_kind);
|
let fixes = print_diagnostics(messages, path, source_kind);
|
||||||
let syntax_errors =
|
let syntax_errors = print_syntax_errors(parsed.errors(), path, &transformed);
|
||||||
print_syntax_errors(parsed.errors(), path, &locator, &transformed);
|
|
||||||
|
|
||||||
panic!(
|
panic!(
|
||||||
"Fixed source has a syntax error where the source document does not. This is a bug in one of the generated fixes:
|
"Fixed source has a syntax error where the source document does not. This is a bug in one of the generated fixes:
|
||||||
|
@ -280,9 +279,9 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
|
||||||
|
|
||||||
// Not strictly necessary but adds some coverage for this code path by overriding the
|
// Not strictly necessary but adds some coverage for this code path by overriding the
|
||||||
// noqa offset and the source file
|
// noqa offset and the source file
|
||||||
let range = diagnostic.range();
|
let range = diagnostic.expect_range();
|
||||||
diagnostic.noqa_offset = Some(directives.noqa_line_for.resolve(range.start()));
|
diagnostic.set_noqa_offset(directives.noqa_line_for.resolve(range.start()));
|
||||||
if let Some(annotation) = diagnostic.diagnostic.primary_annotation_mut() {
|
if let Some(annotation) = diagnostic.primary_annotation_mut() {
|
||||||
annotation.set_span(
|
annotation.set_span(
|
||||||
ruff_db::diagnostic::Span::from(source_code.clone()).with_range(range),
|
ruff_db::diagnostic::Span::from(source_code.clone()).with_range(range),
|
||||||
);
|
);
|
||||||
|
@ -291,26 +290,21 @@ Either ensure you always emit a fix or change `Violation::FIX_AVAILABILITY` to e
|
||||||
diagnostic
|
diagnostic
|
||||||
})
|
})
|
||||||
.chain(parsed.errors().iter().map(|parse_error| {
|
.chain(parsed.errors().iter().map(|parse_error| {
|
||||||
OldDiagnostic::from_parse_error(parse_error, &locator, source_code.clone())
|
create_syntax_error_diagnostic(source_code.clone(), &parse_error.error, parse_error)
|
||||||
}))
|
}))
|
||||||
.sorted()
|
.sorted()
|
||||||
.collect();
|
.collect();
|
||||||
(messages, transformed)
|
(messages, transformed)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_syntax_errors(
|
fn print_syntax_errors(errors: &[ParseError], path: &Path, source: &SourceKind) -> String {
|
||||||
errors: &[ParseError],
|
|
||||||
path: &Path,
|
|
||||||
locator: &Locator,
|
|
||||||
source: &SourceKind,
|
|
||||||
) -> String {
|
|
||||||
let filename = path.file_name().unwrap().to_string_lossy();
|
let filename = path.file_name().unwrap().to_string_lossy();
|
||||||
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();
|
let source_file = SourceFileBuilder::new(filename.as_ref(), source.source_code()).finish();
|
||||||
|
|
||||||
let messages: Vec<_> = errors
|
let messages: Vec<_> = errors
|
||||||
.iter()
|
.iter()
|
||||||
.map(|parse_error| {
|
.map(|parse_error| {
|
||||||
OldDiagnostic::from_parse_error(parse_error, locator, source_file.clone())
|
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -321,12 +315,8 @@ fn print_syntax_errors(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Print the [`Message::Diagnostic`]s in `messages`.
|
/// Print the lint diagnostics in `diagnostics`.
|
||||||
fn print_diagnostics(
|
fn print_diagnostics(mut diagnostics: Vec<Diagnostic>, path: &Path, source: &SourceKind) -> String {
|
||||||
mut diagnostics: Vec<OldDiagnostic>,
|
|
||||||
path: &Path,
|
|
||||||
source: &SourceKind,
|
|
||||||
) -> String {
|
|
||||||
diagnostics.retain(|msg| !msg.is_syntax_error());
|
diagnostics.retain(|msg| !msg.is_syntax_error());
|
||||||
|
|
||||||
if let Some(notebook) = source.as_ipy_notebook() {
|
if let Some(notebook) = source.as_ipy_notebook() {
|
||||||
|
@ -337,7 +327,7 @@ fn print_diagnostics(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn print_jupyter_messages(
|
pub(crate) fn print_jupyter_messages(
|
||||||
diagnostics: &[OldDiagnostic],
|
diagnostics: &[Diagnostic],
|
||||||
path: &Path,
|
path: &Path,
|
||||||
notebook: &Notebook,
|
notebook: &Notebook,
|
||||||
) -> String {
|
) -> String {
|
||||||
|
@ -361,7 +351,7 @@ pub(crate) fn print_jupyter_messages(
|
||||||
String::from_utf8(output).unwrap()
|
String::from_utf8(output).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn print_messages(diagnostics: &[OldDiagnostic]) -> String {
|
pub(crate) fn print_messages(diagnostics: &[Diagnostic]) -> String {
|
||||||
let mut output = Vec::new();
|
let mut output = Vec::new();
|
||||||
|
|
||||||
TextEmitter::default()
|
TextEmitter::default()
|
||||||
|
|
|
@ -42,6 +42,12 @@ impl From<LexicalError> for ParseError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Ranged for ParseError {
|
||||||
|
fn range(&self) -> TextRange {
|
||||||
|
self.location
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ParseError {
|
impl ParseError {
|
||||||
pub fn error(self) -> ParseErrorType {
|
pub fn error(self) -> ParseErrorType {
|
||||||
self.error
|
self.error
|
||||||
|
|
|
@ -981,6 +981,12 @@ impl Display for SemanticSyntaxError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Ranged for SemanticSyntaxError {
|
||||||
|
fn range(&self) -> TextRange {
|
||||||
|
self.range
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize)]
|
||||||
pub enum SemanticSyntaxErrorKind {
|
pub enum SemanticSyntaxErrorKind {
|
||||||
/// Represents the use of a `__future__` import after the beginning of a file.
|
/// Represents the use of a `__future__` import after the beginning of a file.
|
||||||
|
|
|
@ -13,6 +13,7 @@ license = { workspace = true }
|
||||||
[lib]
|
[lib]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
ruff_db = { workspace = true }
|
||||||
ruff_diagnostics = { workspace = true }
|
ruff_diagnostics = { workspace = true }
|
||||||
ruff_formatter = { workspace = true }
|
ruff_formatter = { workspace = true }
|
||||||
ruff_linter = { workspace = true }
|
ruff_linter = { workspace = true }
|
||||||
|
|
|
@ -9,13 +9,13 @@ use crate::{
|
||||||
resolve::is_document_excluded_for_linting,
|
resolve::is_document_excluded_for_linting,
|
||||||
session::DocumentQuery,
|
session::DocumentQuery,
|
||||||
};
|
};
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_diagnostics::{Applicability, Edit, Fix};
|
use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||||
use ruff_linter::{
|
use ruff_linter::{
|
||||||
Locator,
|
Locator,
|
||||||
directives::{Flags, extract_directives},
|
directives::{Flags, extract_directives},
|
||||||
generate_noqa_edits,
|
generate_noqa_edits,
|
||||||
linter::check_path,
|
linter::check_path,
|
||||||
message::OldDiagnostic,
|
|
||||||
package::PackageRoot,
|
package::PackageRoot,
|
||||||
packaging::detect_package_root,
|
packaging::detect_package_root,
|
||||||
settings::flags,
|
settings::flags,
|
||||||
|
@ -228,13 +228,13 @@ pub(crate) fn fixes_for_diagnostics(
|
||||||
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
|
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
|
||||||
/// If the source kind is a text document, the cell index will always be `0`.
|
/// If the source kind is a text document, the cell index will always be `0`.
|
||||||
fn to_lsp_diagnostic(
|
fn to_lsp_diagnostic(
|
||||||
diagnostic: &OldDiagnostic,
|
diagnostic: &Diagnostic,
|
||||||
noqa_edit: Option<Edit>,
|
noqa_edit: Option<Edit>,
|
||||||
source_kind: &SourceKind,
|
source_kind: &SourceKind,
|
||||||
index: &LineIndex,
|
index: &LineIndex,
|
||||||
encoding: PositionEncoding,
|
encoding: PositionEncoding,
|
||||||
) -> (usize, lsp_types::Diagnostic) {
|
) -> (usize, lsp_types::Diagnostic) {
|
||||||
let diagnostic_range = diagnostic.range();
|
let diagnostic_range = diagnostic.expect_range();
|
||||||
let name = diagnostic.name();
|
let name = diagnostic.name();
|
||||||
let body = diagnostic.body().to_string();
|
let body = diagnostic.body().to_string();
|
||||||
let fix = diagnostic.fix();
|
let fix = diagnostic.fix();
|
||||||
|
|
|
@ -210,8 +210,8 @@ impl Workspace {
|
||||||
.map(|msg| ExpandedMessage {
|
.map(|msg| ExpandedMessage {
|
||||||
code: msg.secondary_code().map(ToString::to_string),
|
code: msg.secondary_code().map(ToString::to_string),
|
||||||
message: msg.body().to_string(),
|
message: msg.body().to_string(),
|
||||||
start_location: source_code.line_column(msg.start()).into(),
|
start_location: source_code.line_column(msg.expect_range().start()).into(),
|
||||||
end_location: source_code.line_column(msg.end()).into(),
|
end_location: source_code.line_column(msg.expect_range().end()).into(),
|
||||||
fix: msg.fix().map(|fix| ExpandedFix {
|
fix: msg.fix().map(|fix| ExpandedFix {
|
||||||
message: msg.suggestion().map(ToString::to_string),
|
message: msg.suggestion().map(ToString::to_string),
|
||||||
edits: fix
|
edits: fix
|
||||||
|
|
|
@ -5,10 +5,7 @@ pub use db::{CheckMode, Db, ProjectDatabase, SalsaMemoryDump};
|
||||||
use files::{Index, Indexed, IndexedFiles};
|
use files::{Index, Indexed, IndexedFiles};
|
||||||
use metadata::settings::Settings;
|
use metadata::settings::Settings;
|
||||||
pub use metadata::{ProjectMetadata, ProjectMetadataError};
|
pub use metadata::{ProjectMetadata, ProjectMetadataError};
|
||||||
use ruff_db::diagnostic::{
|
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span, SubDiagnostic};
|
||||||
Annotation, Diagnostic, DiagnosticId, Severity, Span, SubDiagnostic, create_parse_diagnostic,
|
|
||||||
create_unsupported_syntax_diagnostic,
|
|
||||||
};
|
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_db::source::{SourceTextError, source_text};
|
use ruff_db::source::{SourceTextError, source_text};
|
||||||
|
@ -503,11 +500,11 @@ impl Project {
|
||||||
parsed_ref
|
parsed_ref
|
||||||
.errors()
|
.errors()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|error| create_parse_diagnostic(file, error)),
|
.map(|error| Diagnostic::syntax_error(file, &error.error, error)),
|
||||||
);
|
);
|
||||||
|
|
||||||
diagnostics.extend(parsed_ref.unsupported_syntax_errors().iter().map(|error| {
|
diagnostics.extend(parsed_ref.unsupported_syntax_errors().iter().map(|error| {
|
||||||
let mut error = create_unsupported_syntax_diagnostic(file, error);
|
let mut error = Diagnostic::syntax_error(file, error, error);
|
||||||
add_inferred_python_version_hint_to_diagnostic(db, &mut error, "parsing syntax");
|
add_inferred_python_version_hint_to_diagnostic(db, &mut error, "parsing syntax");
|
||||||
error
|
error
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -11,9 +11,7 @@ use diagnostic::{
|
||||||
INVALID_CONTEXT_MANAGER, INVALID_SUPER_ARGUMENT, NOT_ITERABLE, POSSIBLY_UNBOUND_IMPLICIT_CALL,
|
INVALID_CONTEXT_MANAGER, INVALID_SUPER_ARGUMENT, NOT_ITERABLE, POSSIBLY_UNBOUND_IMPLICIT_CALL,
|
||||||
UNAVAILABLE_IMPLICIT_SUPER_ARGUMENTS,
|
UNAVAILABLE_IMPLICIT_SUPER_ARGUMENTS,
|
||||||
};
|
};
|
||||||
use ruff_db::diagnostic::{
|
use ruff_db::diagnostic::{Annotation, Diagnostic, Severity, Span, SubDiagnostic};
|
||||||
Annotation, Severity, Span, SubDiagnostic, create_semantic_syntax_diagnostic,
|
|
||||||
};
|
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||||
|
@ -104,7 +102,7 @@ pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||||
index
|
index
|
||||||
.semantic_syntax_errors()
|
.semantic_syntax_errors()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|error| create_semantic_syntax_diagnostic(file, error)),
|
.map(|error| Diagnostic::syntax_error(file, error, error)),
|
||||||
);
|
);
|
||||||
|
|
||||||
check_suppressions(db, file, &mut diagnostics);
|
check_suppressions(db, file, &mut diagnostics);
|
||||||
|
|
|
@ -6,10 +6,7 @@ use colored::Colorize;
|
||||||
use config::SystemKind;
|
use config::SystemKind;
|
||||||
use parser as test_parser;
|
use parser as test_parser;
|
||||||
use ruff_db::Db as _;
|
use ruff_db::Db as _;
|
||||||
use ruff_db::diagnostic::{
|
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig};
|
||||||
Diagnostic, DisplayDiagnosticConfig, create_parse_diagnostic,
|
|
||||||
create_unsupported_syntax_diagnostic,
|
|
||||||
};
|
|
||||||
use ruff_db::files::{File, system_path_to_file};
|
use ruff_db::files::{File, system_path_to_file};
|
||||||
use ruff_db::panic::catch_unwind;
|
use ruff_db::panic::catch_unwind;
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
|
@ -325,14 +322,14 @@ fn run_test(
|
||||||
let mut diagnostics: Vec<Diagnostic> = parsed
|
let mut diagnostics: Vec<Diagnostic> = parsed
|
||||||
.errors()
|
.errors()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|error| create_parse_diagnostic(test_file.file, error))
|
.map(|error| Diagnostic::syntax_error(test_file.file, &error.error, error))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
diagnostics.extend(
|
diagnostics.extend(
|
||||||
parsed
|
parsed
|
||||||
.unsupported_syntax_errors()
|
.unsupported_syntax_errors()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|error| create_unsupported_syntax_diagnostic(test_file.file, error)),
|
.map(|error| Diagnostic::syntax_error(test_file.file, error, error)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let mdtest_result = attempt_test(db, check_types, test_file, "run mdtest", None);
|
let mdtest_result = attempt_test(db, check_types, test_file, "run mdtest", None);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue