Simplify LinterResult, avoid cloning ParseError (#11903)

## Summary

Follow-up to #11902

This PR simplifies the `LinterResult` struct by avoiding the generic and
not store the `ParseError`.

This is possible because the callers already have access to the
`ParseError` via the `Parsed` output. This also means that we can
simplify the return type of `check_path` and avoid the generic `T` on
`LinterResult`.

## Test Plan

`cargo insta test`
This commit is contained in:
Dhruv Manilawala 2024-06-27 07:56:08 +05:30 committed by Micha Reiser
parent 73851e73ab
commit 72b6c26101
10 changed files with 96 additions and 99 deletions

View file

@ -264,8 +264,8 @@ pub(crate) fn lint_path(
// Lint the file.
let (
LinterResult {
data: messages,
error: parse_error,
messages,
has_syntax_error: has_error,
},
transformed,
fixed,
@ -334,7 +334,7 @@ pub(crate) fn lint_path(
if let Some((cache, relative_path, key)) = caching {
// We don't cache parsing errors.
if parse_error.is_none() {
if !has_error {
// `FixMode::Apply` and `FixMode::Diff` rely on side-effects (writing to disk,
// and writing the diff to stdout, respectively). If a file has diagnostics, we
// need to avoid reading from and writing to the cache in these modes.
@ -400,7 +400,7 @@ pub(crate) fn lint_stdin(
};
// Lint the inputs.
let (LinterResult { data: messages, .. }, transformed, fixed) =
let (LinterResult { messages, .. }, transformed, fixed) =
if matches!(fix_mode, flags::FixMode::Apply | flags::FixMode::Diff) {
if let Ok(FixerResult {
result,

View file

@ -73,7 +73,7 @@ fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
);
// Assert that file contains no parse errors
assert_eq!(result.error, None);
assert!(!result.has_syntax_error);
},
criterion::BatchSize::SmallInput,
);

View file

@ -35,29 +35,19 @@ use crate::settings::{flags, LinterSettings};
use crate::source_kind::SourceKind;
use crate::{directives, fs};
/// A [`Result`]-like type that returns both data and an error. Used to return
/// diagnostics even in the face of parse errors, since many diagnostics can be
/// generated without a full AST.
pub struct LinterResult<T> {
pub data: T,
pub error: Option<ParseError>,
}
impl<T> LinterResult<T> {
const fn new(data: T, error: Option<ParseError>) -> Self {
Self { data, error }
}
fn map<U, F: FnOnce(T) -> U>(self, f: F) -> LinterResult<U> {
LinterResult::new(f(self.data), self.error)
}
pub struct LinterResult {
/// A collection of diagnostic messages generated by the linter.
pub messages: Vec<Message>,
/// A flag indicating the presence of syntax errors in the source file.
/// If `true`, at least one syntax error was detected in the source file.
pub has_syntax_error: bool,
}
pub type FixTable = FxHashMap<Rule, usize>;
pub struct FixerResult<'a> {
/// The result returned by the linter, after applying any fixes.
pub result: LinterResult<Vec<Message>>,
pub result: LinterResult,
/// The resulting source code, after applying any fixes.
pub transformed: Cow<'a, SourceKind>,
/// The number of fixes applied for each [`Rule`].
@ -79,7 +69,7 @@ pub fn check_path(
source_kind: &SourceKind,
source_type: PySourceType,
parsed: &Parsed<ModModule>,
) -> LinterResult<Vec<Diagnostic>> {
) -> Vec<Diagnostic> {
// Aggregate all diagnostics.
let mut diagnostics = vec![];
@ -317,7 +307,7 @@ pub fn check_path(
}
}
LinterResult::new(diagnostics, parsed.errors().iter().next().cloned())
diagnostics
}
const MAX_ITERATIONS: usize = 100;
@ -351,9 +341,7 @@ pub fn add_noqa_to_path(
);
// Generate diagnostics, ignoring any existing `noqa` directives.
let LinterResult {
data: diagnostics, ..
} = check_path(
let diagnostics = check_path(
path,
package,
&locator,
@ -390,7 +378,7 @@ pub fn lint_only(
source_kind: &SourceKind,
source_type: PySourceType,
source: ParseSource,
) -> LinterResult<Vec<Message>> {
) -> LinterResult {
let parsed = source.into_parsed(source_kind, source_type);
// Map row and column locations to byte slices (lazily).
@ -411,7 +399,7 @@ pub fn lint_only(
);
// Generate diagnostics.
let result = check_path(
let diagnostics = check_path(
path,
package,
&locator,
@ -425,9 +413,16 @@ pub fn lint_only(
&parsed,
);
result.map(|diagnostics| {
diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
})
LinterResult {
messages: diagnostics_to_messages(
diagnostics,
parsed.errors(),
path,
&locator,
&directives,
),
has_syntax_error: !parsed.is_valid(),
}
}
/// Convert from diagnostics to messages.
@ -479,8 +474,8 @@ pub fn lint_fix<'a>(
// As an escape hatch, bail after 100 iterations.
let mut iterations = 0;
// Track whether the _initial_ source code was parseable.
let mut parseable = false;
// Track whether the _initial_ source code is valid syntax.
let mut is_valid_syntax = false;
// Continuously fix until the source code stabilizes.
loop {
@ -506,7 +501,7 @@ pub fn lint_fix<'a>(
);
// Generate diagnostics.
let result = check_path(
let diagnostics = check_path(
path,
package,
&locator,
@ -521,28 +516,30 @@ pub fn lint_fix<'a>(
);
if iterations == 0 {
parseable = result.error.is_none();
is_valid_syntax = parsed.is_valid();
} else {
// If the source code was parseable on the first pass, but is no
// longer parseable on a subsequent pass, then we've introduced a
// syntax error. Return the original code.
if parseable && result.error.is_some() {
if is_valid_syntax {
if let Some(error) = parsed.errors().first() {
report_fix_syntax_error(
path,
transformed.source_code(),
&result.error.unwrap(),
error,
fixed.keys().copied(),
);
return Err(anyhow!("Fix introduced a syntax error"));
}
}
}
// Apply fix.
if let Some(FixResult {
code: fixed_contents,
fixes: applied,
source_map,
}) = fix_file(&result.data, &locator, unsafe_fixes)
}) = fix_file(&diagnostics, &locator, unsafe_fixes)
{
if iterations < MAX_ITERATIONS {
// Count the number of fixed errors.
@ -559,13 +556,20 @@ pub fn lint_fix<'a>(
continue;
}
report_failed_to_converge_error(path, transformed.source_code(), &result.data);
report_failed_to_converge_error(path, transformed.source_code(), &diagnostics);
}
return Ok(FixerResult {
result: result.map(|diagnostics| {
diagnostics_to_messages(diagnostics, parsed.errors(), path, &locator, &directives)
}),
result: LinterResult {
messages: diagnostics_to_messages(
diagnostics,
parsed.errors(),
path,
&locator,
&directives,
),
has_syntax_error: !is_valid_syntax,
},
transformed,
fixed,
});

View file

@ -22,7 +22,7 @@ mod tests {
use ruff_source_file::Locator;
use ruff_text_size::Ranged;
use crate::linter::{check_path, LinterResult};
use crate::linter::check_path;
use crate::registry::{AsRule, Linter, Rule};
use crate::rules::pyflakes;
use crate::settings::types::PreviewMode;
@ -650,10 +650,7 @@ mod tests {
&locator,
&indexer,
);
let LinterResult {
data: mut diagnostics,
..
} = check_path(
let mut diagnostics = check_path(
Path::new("<filename>"),
None,
&locator,

View file

@ -23,7 +23,7 @@ use ruff_text_size::Ranged;
use crate::directives;
use crate::fix::{fix_file, FixResult};
use crate::linter::{check_path, LinterResult};
use crate::linter::check_path;
use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
use crate::packaging::detect_package_root;
use crate::registry::AsRule;
@ -119,10 +119,7 @@ pub(crate) fn test_contents<'a>(
&locator,
&indexer,
);
let LinterResult {
data: diagnostics,
error,
} = check_path(
let diagnostics = check_path(
path,
path.parent()
.and_then(|parent| detect_package_root(parent, &settings.namespace_packages)),
@ -137,7 +134,7 @@ pub(crate) fn test_contents<'a>(
&parsed,
);
let source_has_errors = error.is_some();
let source_has_errors = !parsed.is_valid();
// Detect fixes that don't converge after multiple iterations.
let mut iterations = 0;
@ -186,10 +183,7 @@ pub(crate) fn test_contents<'a>(
&indexer,
);
let LinterResult {
data: fixed_diagnostics,
..
} = check_path(
let fixed_diagnostics = check_path(
path,
None,
&locator,

View file

@ -68,7 +68,10 @@ pub(crate) fn fix_all(
// which is inconsistent with how `ruff check --fix` works.
let FixerResult {
transformed,
result: LinterResult { error, .. },
result: LinterResult {
has_syntax_error: has_error,
..
},
..
} = ruff_linter::linter::lint_fix(
&query.virtual_file_path(),
@ -80,11 +83,9 @@ pub(crate) fn fix_all(
source_type,
)?;
if let Some(error) = error {
if has_error {
// abort early if a parsing error occurred
return Err(anyhow::anyhow!(
"A parsing error occurred during `fix_all`: {error}"
));
return Err(anyhow::anyhow!("A parsing error occurred during `fix_all`"));
}
// fast path: if `transformed` is still borrowed, no changes were made and we can return early

View file

@ -8,7 +8,7 @@ use ruff_diagnostics::{Applicability, Diagnostic, DiagnosticKind, Edit, Fix};
use ruff_linter::{
directives::{extract_directives, Flags},
generate_noqa_edits,
linter::{check_path, LinterResult},
linter::check_path,
packaging::detect_package_root,
registry::AsRule,
settings::flags,
@ -58,9 +58,9 @@ pub(crate) struct DiagnosticFix {
}
/// A series of diagnostics across a single text document or an arbitrary number of notebook cells.
pub(crate) type Diagnostics = FxHashMap<lsp_types::Url, Vec<lsp_types::Diagnostic>>;
pub(crate) type DiagnosticsMap = FxHashMap<lsp_types::Url, Vec<lsp_types::Diagnostic>>;
pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagnostics {
pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> DiagnosticsMap {
let source_kind = query.make_source_kind();
let file_resolver_settings = query.settings().file_resolver();
let linter_settings = query.settings().linter();
@ -80,7 +80,7 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
exclusion,
document_path.display()
);
return Diagnostics::default();
return DiagnosticsMap::default();
}
detect_package_root(
@ -113,7 +113,7 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer);
// Generate checks.
let LinterResult { data, .. } = check_path(
let diagnostics = check_path(
&query.virtual_file_path(),
package,
&locator,
@ -129,7 +129,7 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
let noqa_edits = generate_noqa_edits(
&query.virtual_file_path(),
data.as_slice(),
&diagnostics,
&locator,
indexer.comment_ranges(),
&linter_settings.external,
@ -137,19 +137,21 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
stylist.line_ending(),
);
let mut diagnostics = Diagnostics::default();
let mut diagnostics_map = DiagnosticsMap::default();
// Populates all relevant URLs with an empty diagnostic list.
// This ensures that documents without diagnostics still get updated.
if let Some(notebook) = query.as_notebook() {
for url in notebook.urls() {
diagnostics.entry(url.clone()).or_default();
diagnostics_map.entry(url.clone()).or_default();
}
} else {
diagnostics.entry(query.make_key().into_url()).or_default();
diagnostics_map
.entry(query.make_key().into_url())
.or_default();
}
let lsp_diagnostics = data
let lsp_diagnostics = diagnostics
.into_iter()
.zip(noqa_edits)
.map(|(diagnostic, noqa_edit)| {
@ -165,18 +167,21 @@ pub(crate) fn check(query: &DocumentQuery, encoding: PositionEncoding) -> Diagno
tracing::warn!("Unable to find notebook cell at index {index}.");
continue;
};
diagnostics.entry(uri.clone()).or_default().push(diagnostic);
diagnostics_map
.entry(uri.clone())
.or_default()
.push(diagnostic);
}
} else {
for (_, diagnostic) in lsp_diagnostics {
diagnostics
diagnostics_map
.entry(query.make_key().into_url())
.or_default()
.push(diagnostic);
}
}
diagnostics
diagnostics_map
}
/// Converts LSP diagnostics to a list of `DiagnosticFix`es by deserializing associated data on each diagnostic.

View file

@ -1,17 +1,17 @@
use crate::{
lint::Diagnostics,
lint::DiagnosticsMap,
server::client::Notifier,
session::{DocumentQuery, DocumentSnapshot},
};
use super::LSPResult;
pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> Diagnostics {
pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> DiagnosticsMap {
if snapshot.client_settings().lint() {
let document = snapshot.query();
crate::lint::check(document, snapshot.encoding())
} else {
Diagnostics::default()
DiagnosticsMap::default()
}
}

View file

@ -9,7 +9,7 @@ use ruff_formatter::printer::SourceMapGeneration;
use ruff_formatter::{FormatResult, Formatted, IndentStyle};
use ruff_linter::directives;
use ruff_linter::line_width::{IndentWidth, LineLength};
use ruff_linter::linter::{check_path, LinterResult};
use ruff_linter::linter::check_path;
use ruff_linter::registry::AsRule;
use ruff_linter::settings::types::PythonVersion;
use ruff_linter::settings::{flags, DEFAULT_SELECTORS, DUMMY_VARIABLE_RGX};
@ -181,9 +181,7 @@ impl Workspace {
);
// Generate checks.
let LinterResult {
data: diagnostics, ..
} = check_path(
let diagnostics = check_path(
Path::new("<filename>"),
None,
&locator,

View file

@ -27,23 +27,23 @@ fn do_fuzz(case: &[u8]) -> Corpus {
let linter_settings = SETTINGS.get_or_init(LinterSettings::default);
let format_options = PyFormatOptions::default();
let linter_results = ruff_linter::linter::lint_only(
let linter_result = ruff_linter::linter::lint_only(
"fuzzed-source.py".as_ref(),
None,
&linter_settings,
linter_settings,
Noqa::Enabled,
&SourceKind::Python(code.to_string()),
PySourceType::Python,
ParseSource::None,
);
if linter_results.error.is_some() {
if linter_result.has_syntax_error {
return Corpus::Keep; // keep, but don't continue
}
let mut warnings = HashMap::new();
for msg in &linter_results.data {
for msg in &linter_result.messages {
let count: &mut usize = warnings.entry(msg.name()).or_default();
*count += 1;
}
@ -52,10 +52,10 @@ fn do_fuzz(case: &[u8]) -> Corpus {
if let Ok(formatted) = format_module_source(code, format_options.clone()) {
let formatted = formatted.as_code().to_string();
let linter_results = ruff_linter::linter::lint_only(
let linter_result = ruff_linter::linter::lint_only(
"fuzzed-source.py".as_ref(),
None,
&linter_settings,
linter_settings,
Noqa::Enabled,
&SourceKind::Python(formatted.clone()),
PySourceType::Python,
@ -63,11 +63,11 @@ fn do_fuzz(case: &[u8]) -> Corpus {
);
assert!(
linter_results.error.is_none(),
!linter_result.has_syntax_error,
"formatter introduced a parse error"
);
for msg in &linter_results.data {
for msg in &linter_result.messages {
if let Some(count) = warnings.get_mut(msg.name()) {
if let Some(decremented) = count.checked_sub(1) {
*count = decremented;
@ -77,7 +77,6 @@ fn do_fuzz(case: &[u8]) -> Corpus {
TextDiff::from_lines(code, &formatted)
.unified_diff()
.header("Unformatted", "Formatted")
.to_string()
);
}
} else {
@ -86,7 +85,6 @@ fn do_fuzz(case: &[u8]) -> Corpus {
TextDiff::from_lines(code, &formatted)
.unified_diff()
.header("Unformatted", "Formatted")
.to_string()
);
}
}