Introduce Diagnostic trait (#14130)

This commit is contained in:
Micha Reiser 2024-11-07 13:26:21 +01:00 committed by GitHub
parent b8188b2262
commit 59c0dacea0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 639 additions and 255 deletions

1
Cargo.lock generated
View file

@ -2173,7 +2173,6 @@ dependencies = [
"regex",
"ruff_db",
"ruff_index",
"ruff_python_parser",
"ruff_python_trivia",
"ruff_source_file",
"ruff_text_size",

View file

@ -5,8 +5,6 @@ use anyhow::{anyhow, Context};
use clap::Parser;
use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use salsa::plumbing::ZalsaDatabase;
use red_knot_python_semantic::SitePackages;
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
@ -14,7 +12,9 @@ use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase;
use target_version::TargetVersion;
use crate::logging::{setup_tracing, Verbosity};
@ -318,8 +318,9 @@ impl MainLoop {
} => {
let has_diagnostics = !result.is_empty();
if check_revision == revision {
#[allow(clippy::print_stdout)]
for diagnostic in result {
tracing::error!("{}", diagnostic);
println!("{}", diagnostic.display(db));
}
} else {
tracing::debug!(
@ -378,7 +379,10 @@ impl MainLoopCancellationToken {
#[derive(Debug)]
enum MainLoopMessage {
CheckWorkspace,
CheckCompleted { result: Vec<String>, revision: u64 },
CheckCompleted {
result: Vec<Box<dyn Diagnostic>>,
revision: u64,
},
ApplyChanges(Vec<watch::ChangeEvent>),
Exit,
}

View file

@ -37,9 +37,12 @@ mod mro;
mod narrow;
mod unpacker;
#[salsa::tracked(return_ref)]
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
tracing::debug!("Checking file '{path}'", path = file.path(db));
let index = semantic_index(db, file);
let mut diagnostics = TypeCheckDiagnostics::new();

View file

@ -1,13 +1,14 @@
use crate::types::{ClassLiteralType, Type};
use crate::Db;
use ruff_db::diagnostic::{Diagnostic, Severity};
use ruff_db::files::File;
use ruff_python_ast::{self as ast, AnyNodeRef};
use ruff_text_size::{Ranged, TextRange};
use std::borrow::Cow;
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
use crate::types::{ClassLiteralType, Type};
use crate::Db;
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct TypeCheckDiagnostic {
// TODO: Don't use string keys for rules
@ -31,6 +32,28 @@ impl TypeCheckDiagnostic {
}
}
impl Diagnostic for TypeCheckDiagnostic {
fn rule(&self) -> &str {
TypeCheckDiagnostic::rule(self)
}
fn message(&self) -> Cow<str> {
TypeCheckDiagnostic::message(self).into()
}
fn file(&self) -> File {
TypeCheckDiagnostic::file(self)
}
fn range(&self) -> Option<TextRange> {
Some(Ranged::range(self))
}
fn severity(&self) -> Severity {
Severity::Error
}
}
impl Ranged for TypeCheckDiagnostic {
fn range(&self) -> TextRange {
self.range

View file

@ -4533,7 +4533,7 @@ mod tests {
let file = system_path_to_file(db, filename).unwrap();
let diagnostics = check_types(db, file);
assert_diagnostic_messages(&diagnostics, expected);
assert_diagnostic_messages(diagnostics, expected);
}
#[test]

View file

@ -6,7 +6,7 @@ mod text_document;
use lsp_types::{PositionEncodingKind, Url};
pub use notebook::NotebookDocument;
pub(crate) use range::RangeExt;
pub(crate) use range::{RangeExt, ToRangeExt};
pub(crate) use text_document::DocumentVersion;
pub use text_document::TextDocument;

View file

@ -1,13 +1,32 @@
use super::notebook;
use super::PositionEncoding;
use ruff_source_file::LineIndex;
use lsp_types as types;
use ruff_notebook::NotebookIndex;
use ruff_source_file::OneIndexed;
use ruff_source_file::{LineIndex, SourceLocation};
use ruff_text_size::{TextRange, TextSize};
pub(crate) struct NotebookRange {
pub(crate) cell: notebook::CellId,
pub(crate) range: types::Range,
}
pub(crate) trait RangeExt {
fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding)
-> TextRange;
}
pub(crate) trait ToRangeExt {
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range;
fn to_notebook_range(
&self,
text: &str,
source_index: &LineIndex,
notebook_index: &NotebookIndex,
encoding: PositionEncoding,
) -> NotebookRange;
}
fn u32_index_to_usize(index: u32) -> usize {
usize::try_from(index).expect("u32 fits in usize")
}
@ -75,6 +94,61 @@ impl RangeExt for lsp_types::Range {
}
}
impl ToRangeExt for TextRange {
fn to_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> types::Range {
types::Range {
start: source_location_to_position(&offset_to_source_location(
self.start(),
text,
index,
encoding,
)),
end: source_location_to_position(&offset_to_source_location(
self.end(),
text,
index,
encoding,
)),
}
}
fn to_notebook_range(
&self,
text: &str,
source_index: &LineIndex,
notebook_index: &NotebookIndex,
encoding: PositionEncoding,
) -> NotebookRange {
let start = offset_to_source_location(self.start(), text, source_index, encoding);
let mut end = offset_to_source_location(self.end(), text, source_index, encoding);
let starting_cell = notebook_index.cell(start.row);
// weird edge case here - if the end of the range is where the newline after the cell got added (making it 'out of bounds')
// we need to move it one character back (which should place it at the end of the last line).
// we test this by checking if the ending offset is in a different (or nonexistent) cell compared to the cell of the starting offset.
if notebook_index.cell(end.row) != starting_cell {
end.row = end.row.saturating_sub(1);
end.column = offset_to_source_location(
self.end().checked_sub(1.into()).unwrap_or_default(),
text,
source_index,
encoding,
)
.column;
}
let start = source_location_to_position(&notebook_index.translate_location(&start));
let end = source_location_to_position(&notebook_index.translate_location(&end));
NotebookRange {
cell: starting_cell
.map(OneIndexed::to_zero_indexed)
.unwrap_or_default(),
range: types::Range { start, end },
}
}
}
/// Converts a UTF-16 code unit offset for a given line into a UTF-8 column number.
fn utf8_column_offset(utf16_code_unit_offset: u32, line: &str) -> TextSize {
let mut utf8_code_unit_offset = TextSize::new(0);
@ -96,3 +170,46 @@ fn utf8_column_offset(utf16_code_unit_offset: u32, line: &str) -> TextSize {
utf8_code_unit_offset
}
fn offset_to_source_location(
offset: TextSize,
text: &str,
index: &LineIndex,
encoding: PositionEncoding,
) -> SourceLocation {
match encoding {
PositionEncoding::UTF8 => {
let row = index.line_index(offset);
let column = offset - index.line_start(row, text);
SourceLocation {
column: OneIndexed::from_zero_indexed(column.to_usize()),
row,
}
}
PositionEncoding::UTF16 => {
let row = index.line_index(offset);
let column = if index.is_ascii() {
(offset - index.line_start(row, text)).to_usize()
} else {
let up_to_line = &text[TextRange::new(index.line_start(row, text), offset)];
up_to_line.encode_utf16().count()
};
SourceLocation {
column: OneIndexed::from_zero_indexed(column),
row,
}
}
PositionEncoding::UTF32 => index.source_location(offset, text),
}
}
fn source_location_to_position(location: &SourceLocation) -> types::Position {
types::Position {
line: u32::try_from(location.row.to_zero_indexed()).expect("row usize fits in u32"),
character: u32::try_from(location.column.to_zero_indexed())
.expect("character usize fits in u32"),
}
}

View file

@ -3,15 +3,17 @@ use std::borrow::Cow;
use lsp_types::request::DocumentDiagnosticRequest;
use lsp_types::{
Diagnostic, DiagnosticSeverity, DocumentDiagnosticParams, DocumentDiagnosticReport,
DocumentDiagnosticReportResult, FullDocumentDiagnosticReport, Position, Range,
DocumentDiagnosticReportResult, FullDocumentDiagnosticReport, NumberOrString, Range,
RelatedFullDocumentDiagnosticReport, Url,
};
use red_knot_workspace::db::RootDatabase;
use crate::edit::ToRangeExt;
use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler};
use crate::server::{client::Notifier, Result};
use crate::session::DocumentSnapshot;
use red_knot_workspace::db::{Db, RootDatabase};
use ruff_db::diagnostic::Severity;
use ruff_db::source::{line_index, source_text};
pub(crate) struct DocumentDiagnosticRequestHandler;
@ -64,36 +66,37 @@ fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Di
diagnostics
.as_slice()
.iter()
.map(|message| to_lsp_diagnostic(message))
.map(|message| to_lsp_diagnostic(db, message, snapshot.encoding()))
.collect()
}
fn to_lsp_diagnostic(message: &str) -> Diagnostic {
let words = message.split(':').collect::<Vec<_>>();
fn to_lsp_diagnostic(
db: &dyn Db,
diagnostic: &dyn ruff_db::diagnostic::Diagnostic,
encoding: crate::PositionEncoding,
) -> Diagnostic {
let range = if let Some(range) = diagnostic.range() {
let index = line_index(db.upcast(), diagnostic.file());
let source = source_text(db.upcast(), diagnostic.file());
let (range, message) = match words.as_slice() {
[_, _, line, column, message] | [_, line, column, message] => {
let line = line.parse::<u32>().unwrap_or_default().saturating_sub(1);
let column = column.parse::<u32>().unwrap_or_default();
(
Range::new(
Position::new(line, column.saturating_sub(1)),
Position::new(line, column),
),
message.trim(),
)
}
_ => (Range::default(), message),
range.to_range(&source, &index, encoding)
} else {
Range::default()
};
let severity = match diagnostic.severity() {
Severity::Info => DiagnosticSeverity::INFORMATION,
Severity::Error => DiagnosticSeverity::ERROR,
};
Diagnostic {
range,
severity: Some(DiagnosticSeverity::ERROR),
severity: Some(severity),
tags: None,
code: None,
code: Some(NumberOrString::String(diagnostic.rule().to_string())),
code_description: None,
source: Some("red-knot".into()),
message: message.to_string(),
message: diagnostic.message().into_owned(),
related_information: None,
data: None,
}

View file

@ -15,7 +15,6 @@ red_knot_python_semantic = { workspace = true }
red_knot_vendored = { workspace = true }
ruff_db = { workspace = true }
ruff_index = { workspace = true }
ruff_python_parser = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }

View file

@ -2,63 +2,10 @@
//!
//! We don't assume that we will get the diagnostics in source order.
use red_knot_python_semantic::types::TypeCheckDiagnostic;
use ruff_python_parser::ParseError;
use ruff_db::diagnostic::Diagnostic;
use ruff_source_file::{LineIndex, OneIndexed};
use ruff_text_size::{Ranged, TextRange};
use std::borrow::Cow;
use std::ops::{Deref, Range};
pub(super) trait Diagnostic: std::fmt::Debug {
fn rule(&self) -> &str;
fn message(&self) -> Cow<str>;
fn range(&self) -> TextRange;
}
impl Diagnostic for TypeCheckDiagnostic {
fn rule(&self) -> &str {
TypeCheckDiagnostic::rule(self)
}
fn message(&self) -> Cow<str> {
TypeCheckDiagnostic::message(self).into()
}
fn range(&self) -> TextRange {
Ranged::range(self)
}
}
impl Diagnostic for ParseError {
fn rule(&self) -> &str {
"invalid-syntax"
}
fn message(&self) -> Cow<str> {
self.error.to_string().into()
}
fn range(&self) -> TextRange {
self.location
}
}
impl Diagnostic for Box<dyn Diagnostic> {
fn rule(&self) -> &str {
(**self).rule()
}
fn message(&self) -> Cow<str> {
(**self).message()
}
fn range(&self) -> TextRange {
(**self).range()
}
}
/// All diagnostics for one embedded Python file, sorted and grouped by start line number.
///
/// The diagnostics are kept in a flat vector, sorted by line number. A separate vector of
@ -78,7 +25,11 @@ where
let mut diagnostics: Vec<_> = diagnostics
.into_iter()
.map(|diagnostic| DiagnosticWithLine {
line_number: line_index.line_index(diagnostic.range().start()),
line_number: diagnostic
.range()
.map_or(OneIndexed::from_zero_indexed(0), |range| {
line_index.line_index(range.start())
}),
diagnostic,
})
.collect();
@ -193,7 +144,8 @@ struct DiagnosticWithLine<T> {
mod tests {
use crate::db::Db;
use crate::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::diagnostic::Severity;
use ruff_db::files::{system_path_to_file, File};
use ruff_db::source::line_index;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_source_file::OneIndexed;
@ -215,7 +167,7 @@ mod tests {
let diagnostics: Vec<_> = ranges
.into_iter()
.map(|range| DummyDiagnostic { range })
.map(|range| DummyDiagnostic { range, file })
.collect();
let sorted = super::SortedDiagnostics::new(diagnostics, &lines);
@ -234,6 +186,7 @@ mod tests {
#[derive(Debug)]
struct DummyDiagnostic {
range: TextRange,
file: File,
}
impl Diagnostic for DummyDiagnostic {
@ -245,8 +198,16 @@ mod tests {
"dummy".into()
}
fn range(&self) -> TextRange {
self.range
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
Some(self.range)
}
fn severity(&self) -> Severity {
Severity::Error
}
}
}

View file

@ -1,14 +1,13 @@
use crate::diagnostic::Diagnostic;
use colored::Colorize;
use parser as test_parser;
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic};
use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::parsed::parsed_module;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_source_file::LineIndex;
use ruff_text_size::TextSize;
use std::path::Path;
use std::sync::Arc;
mod assertion;
mod db;
@ -94,14 +93,15 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
.iter()
.cloned()
.map(|error| {
let diagnostic: Box<dyn Diagnostic> = Box::new(error);
let diagnostic: Box<dyn Diagnostic> =
Box::new(ParseDiagnostic::new(test_file.file, error));
diagnostic
})
.collect();
let type_diagnostics = check_types(db, test_file.file);
diagnostics.extend(type_diagnostics.into_iter().map(|diagnostic| {
let diagnostic: Box<dyn Diagnostic> = Box::new(Arc::unwrap_or_clone(diagnostic));
let diagnostic: Box<dyn Diagnostic> = Box::new((*diagnostic).clone());
diagnostic
}));

View file

@ -2,8 +2,9 @@
//! mismatches.
use crate::assertion::{Assertion, ErrorAssertion, InlineFileAssertions};
use crate::db::Db;
use crate::diagnostic::{Diagnostic, SortedDiagnostics};
use crate::diagnostic::SortedDiagnostics;
use colored::Colorize;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::File;
use ruff_db::source::{line_index, source_text, SourceText};
use ruff_source_file::{LineIndex, OneIndexed};
@ -235,9 +236,14 @@ impl Matcher {
}
fn column<T: Diagnostic>(&self, diagnostic: &T) -> OneIndexed {
diagnostic
.range()
.map(|range| {
self.line_index
.source_location(diagnostic.range().start(), &self.source)
.source_location(range.start(), &self.source)
.column
})
.unwrap_or(OneIndexed::from_zero_indexed(0))
}
/// Check if `assertion` matches any [`Diagnostic`]s in `unmatched`.
@ -304,22 +310,21 @@ impl Matcher {
#[cfg(test)]
mod tests {
use super::FailuresByLine;
use crate::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::diagnostic::{Diagnostic, Severity};
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_python_trivia::textwrap::dedent;
use ruff_source_file::OneIndexed;
use ruff_text_size::TextRange;
use std::borrow::Cow;
#[derive(Clone, Debug)]
struct TestDiagnostic {
struct ExpectedDiagnostic {
rule: &'static str,
message: &'static str,
range: TextRange,
}
impl TestDiagnostic {
impl ExpectedDiagnostic {
fn new(rule: &'static str, message: &'static str, offset: usize) -> Self {
let offset: u32 = offset.try_into().unwrap();
Self {
@ -328,6 +333,23 @@ mod tests {
range: TextRange::new(offset.into(), (offset + 1).into()),
}
}
fn into_diagnostic(self, file: File) -> TestDiagnostic {
TestDiagnostic {
rule: self.rule,
message: self.message,
range: self.range,
file,
}
}
}
#[derive(Debug)]
struct TestDiagnostic {
rule: &'static str,
message: &'static str,
range: TextRange,
file: File,
}
impl Diagnostic for TestDiagnostic {
@ -339,19 +361,36 @@ mod tests {
self.message.into()
}
fn range(&self) -> TextRange {
self.range
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
Some(self.range)
}
fn severity(&self) -> Severity {
Severity::Error
}
}
fn get_result(source: &str, diagnostics: Vec<TestDiagnostic>) -> Result<(), FailuresByLine> {
fn get_result(
source: &str,
diagnostics: Vec<ExpectedDiagnostic>,
) -> Result<(), FailuresByLine> {
colored::control::set_override(false);
let mut db = crate::db::Db::setup(SystemPathBuf::from("/src"));
db.write_file("/src/test.py", source).unwrap();
let file = system_path_to_file(&db, "/src/test.py").unwrap();
super::match_file(&db, file, diagnostics)
super::match_file(
&db,
file,
diagnostics
.into_iter()
.map(|diagnostic| diagnostic.into_diagnostic(file)),
)
}
fn assert_fail(result: Result<(), FailuresByLine>, messages: &[(usize, &[&str])]) {
@ -384,7 +423,7 @@ mod tests {
fn revealed_match() {
let result = get_result(
"x # revealed: Foo",
vec![TestDiagnostic::new(
vec![ExpectedDiagnostic::new(
"revealed-type",
"Revealed type is `Foo`",
0,
@ -398,7 +437,7 @@ mod tests {
fn revealed_wrong_rule() {
let result = get_result(
"x # revealed: Foo",
vec![TestDiagnostic::new(
vec![ExpectedDiagnostic::new(
"not-revealed-type",
"Revealed type is `Foo`",
0,
@ -421,7 +460,11 @@ mod tests {
fn revealed_wrong_message() {
let result = get_result(
"x # revealed: Foo",
vec![TestDiagnostic::new("revealed-type", "Something else", 0)],
vec![ExpectedDiagnostic::new(
"revealed-type",
"Something else",
0,
)],
);
assert_fail(
@ -448,8 +491,8 @@ mod tests {
let result = get_result(
"x # revealed: Foo",
vec![
TestDiagnostic::new("revealed-type", "Revealed type is `Foo`", 0),
TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Foo`", 0),
ExpectedDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
],
);
@ -460,7 +503,11 @@ mod tests {
fn revealed_match_with_only_undefined() {
let result = get_result(
"x # revealed: Foo",
vec![TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0)],
vec![ExpectedDiagnostic::new(
"undefined-reveal",
"Doesn't matter",
0,
)],
);
assert_fail(result, &[(0, &["unmatched assertion: revealed: Foo"])]);
@ -471,8 +518,8 @@ mod tests {
let result = get_result(
"x # revealed: Foo",
vec![
TestDiagnostic::new("revealed-type", "Revealed type is `Bar`", 0),
TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Bar`", 0),
ExpectedDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
],
);
@ -493,8 +540,8 @@ mod tests {
let result = get_result(
"reveal_type(1)",
vec![
TestDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
ExpectedDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
],
);
@ -516,8 +563,8 @@ mod tests {
let result = get_result(
"reveal_type(1) # error: [something-else]",
vec![
TestDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
ExpectedDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
],
);
@ -546,7 +593,7 @@ mod tests {
fn error_match_rule() {
let result = get_result(
"x # error: [some-rule]",
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
);
assert_ok(&result);
@ -556,7 +603,7 @@ mod tests {
fn error_wrong_rule() {
let result = get_result(
"x # error: [some-rule]",
vec![TestDiagnostic::new("anything", "Any message", 0)],
vec![ExpectedDiagnostic::new("anything", "Any message", 0)],
);
assert_fail(
@ -575,7 +622,11 @@ mod tests {
fn error_match_message() {
let result = get_result(
r#"x # error: "contains this""#,
vec![TestDiagnostic::new("anything", "message contains this", 0)],
vec![ExpectedDiagnostic::new(
"anything",
"message contains this",
0,
)],
);
assert_ok(&result);
@ -585,7 +636,7 @@ mod tests {
fn error_wrong_message() {
let result = get_result(
r#"x # error: "contains this""#,
vec![TestDiagnostic::new("anything", "Any message", 0)],
vec![ExpectedDiagnostic::new("anything", "Any message", 0)],
);
assert_fail(
@ -604,7 +655,7 @@ mod tests {
fn error_match_column_and_rule() {
let result = get_result(
"x # error: 1 [some-rule]",
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
);
assert_ok(&result);
@ -614,7 +665,7 @@ mod tests {
fn error_wrong_column() {
let result = get_result(
"x # error: 2 [rule]",
vec![TestDiagnostic::new("rule", "Any message", 0)],
vec![ExpectedDiagnostic::new("rule", "Any message", 0)],
);
assert_fail(
@ -633,7 +684,11 @@ mod tests {
fn error_match_column_and_message() {
let result = get_result(
r#"x # error: 1 "contains this""#,
vec![TestDiagnostic::new("anything", "message contains this", 0)],
vec![ExpectedDiagnostic::new(
"anything",
"message contains this",
0,
)],
);
assert_ok(&result);
@ -643,7 +698,11 @@ mod tests {
fn error_match_rule_and_message() {
let result = get_result(
r#"x # error: [a-rule] "contains this""#,
vec![TestDiagnostic::new("a-rule", "message contains this", 0)],
vec![ExpectedDiagnostic::new(
"a-rule",
"message contains this",
0,
)],
);
assert_ok(&result);
@ -653,7 +712,11 @@ mod tests {
fn error_match_all() {
let result = get_result(
r#"x # error: 1 [a-rule] "contains this""#,
vec![TestDiagnostic::new("a-rule", "message contains this", 0)],
vec![ExpectedDiagnostic::new(
"a-rule",
"message contains this",
0,
)],
);
assert_ok(&result);
@ -663,7 +726,11 @@ mod tests {
fn error_match_all_wrong_column() {
let result = get_result(
r#"x # error: 2 [some-rule] "contains this""#,
vec![TestDiagnostic::new("some-rule", "message contains this", 0)],
vec![ExpectedDiagnostic::new(
"some-rule",
"message contains this",
0,
)],
);
assert_fail(
@ -682,7 +749,7 @@ mod tests {
fn error_match_all_wrong_rule() {
let result = get_result(
r#"x # error: 1 [some-rule] "contains this""#,
vec![TestDiagnostic::new(
vec![ExpectedDiagnostic::new(
"other-rule",
"message contains this",
0,
@ -705,7 +772,7 @@ mod tests {
fn error_match_all_wrong_message() {
let result = get_result(
r#"x # error: 1 [some-rule] "contains this""#,
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
vec![ExpectedDiagnostic::new("some-rule", "Any message", 0)],
);
assert_fail(
@ -738,9 +805,9 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("line-two", "msg", two),
TestDiagnostic::new("line-three", "msg", three),
TestDiagnostic::new("line-five", "msg", five),
ExpectedDiagnostic::new("line-two", "msg", two),
ExpectedDiagnostic::new("line-three", "msg", three),
ExpectedDiagnostic::new("line-five", "msg", five),
],
);
@ -769,8 +836,8 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("line-one", "msg", one),
TestDiagnostic::new("line-two", "msg", two),
ExpectedDiagnostic::new("line-one", "msg", one),
ExpectedDiagnostic::new("line-two", "msg", two),
],
);
@ -790,8 +857,8 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("one-rule", "msg", x),
TestDiagnostic::new("other-rule", "msg", x),
ExpectedDiagnostic::new("one-rule", "msg", x),
ExpectedDiagnostic::new("other-rule", "msg", x),
],
);
@ -811,8 +878,8 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("one-rule", "msg", x),
TestDiagnostic::new("one-rule", "msg", x),
ExpectedDiagnostic::new("one-rule", "msg", x),
ExpectedDiagnostic::new("one-rule", "msg", x),
],
);
@ -832,9 +899,9 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("one-rule", "msg", x),
TestDiagnostic::new("other-rule", "msg", x),
TestDiagnostic::new("third-rule", "msg", x),
ExpectedDiagnostic::new("one-rule", "msg", x),
ExpectedDiagnostic::new("other-rule", "msg", x),
ExpectedDiagnostic::new("third-rule", "msg", x),
],
);
@ -858,8 +925,8 @@ mod tests {
let result = get_result(
&source,
vec![
TestDiagnostic::new("undefined-reveal", "msg", reveal),
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[5]`", reveal),
ExpectedDiagnostic::new("undefined-reveal", "msg", reveal),
ExpectedDiagnostic::new("revealed-type", "Revealed type is `Literal[5]`", reveal),
],
);
@ -872,7 +939,7 @@ mod tests {
let x = source.find('x').unwrap();
let result = get_result(
source,
vec![TestDiagnostic::new("some-rule", "some message", x)],
vec![ExpectedDiagnostic::new("some-rule", "some message", x)],
);
assert_fail(
@ -893,7 +960,7 @@ mod tests {
let x = source.find('x').unwrap();
let result = get_result(
source,
vec![TestDiagnostic::new("some-rule", "some message", x)],
vec![ExpectedDiagnostic::new("some-rule", "some message", x)],
);
assert_fail(

View file

@ -6,6 +6,7 @@ use wasm_bindgen::prelude::*;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
use ruff_db::system::{
@ -110,14 +111,20 @@ impl Workspace {
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
let result = self.db.check_file(file_id.file).map_err(into_error)?;
Ok(result)
Ok(result
.into_iter()
.map(|diagnostic| diagnostic.display(&self.db).to_string())
.collect())
}
/// Checks all open files
pub fn check(&self) -> Result<Vec<String>, Error> {
let result = self.db.check().map_err(into_error)?;
Ok(result)
Ok(result
.into_iter()
.map(|diagnostic| diagnostic.display(&self.db).to_string())
.collect())
}
/// Returns the parsed AST for `path`

View file

@ -19,6 +19,6 @@ fn check() {
assert_eq!(
result,
vec!["/test.py:1:8: Cannot resolve import `random22`"]
vec!["error[unresolved-import] /test.py:1:8 Cannot resolve import `random22`"]
);
}

View file

@ -4,14 +4,14 @@ use std::sync::Arc;
use salsa::plumbing::ZalsaDatabase;
use salsa::{Cancelled, Event};
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
use red_knot_python_semantic::{Db as SemanticDb, Program};
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{File, Files};
use ruff_db::system::System;
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
mod changes;
#[salsa::db]
@ -51,11 +51,11 @@ impl RootDatabase {
}
/// Checks all open files in the workspace and its dependencies.
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
self.with_db(|db| db.workspace().check(db))
}
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
self.with_db(|db| check_file(db, file))

View file

@ -1,23 +1,23 @@
use std::{collections::BTreeMap, sync::Arc};
use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::{Durability, Setter as _};
use std::borrow::Cow;
use std::{collections::BTreeMap, sync::Arc};
use crate::db::Db;
use crate::db::RootDatabase;
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
pub use metadata::{PackageMetadata, WorkspaceMetadata};
use red_knot_python_semantic::types::check_types;
use red_knot_python_semantic::SearchPathSettings;
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic, Severity};
use ruff_db::parsed::parsed_module;
use ruff_db::source::{line_index, source_text, SourceDiagnostic};
use ruff_db::source::{source_text, SourceTextError};
use ruff_db::{
files::{system_path_to_file, File},
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
};
use ruff_python_ast::{name::Name, PySourceType};
use ruff_text_size::Ranged;
use crate::db::Db;
use crate::db::RootDatabase;
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
use ruff_text_size::TextRange;
mod files;
mod metadata;
@ -188,7 +188,7 @@ impl Workspace {
}
/// Checks all open files in the workspace and its dependencies.
pub fn check(self, db: &RootDatabase) -> Vec<String> {
pub fn check(self, db: &RootDatabase) -> Vec<Box<dyn Diagnostic>> {
let workspace_span = tracing::debug_span!("check_workspace");
let _span = workspace_span.enter();
@ -378,47 +378,31 @@ impl Package {
}
}
#[salsa::tracked]
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
tracing::debug!("Checking file '{path}'", path = file.path(db));
let mut diagnostics = Vec::new();
let source_diagnostics = source_text::accumulated::<SourceDiagnostic>(db.upcast(), file);
// TODO(micha): Consider using a single accumulator for all diagnostics
diagnostics.extend(
source_diagnostics
.iter()
.map(std::string::ToString::to_string),
);
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
if source.has_read_error() {
if let Some(read_error) = source.read_error() {
diagnostics.push(Box::new(IOErrorDiagnostic {
file,
error: read_error.clone(),
}));
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
if !parsed.errors().is_empty() {
let path = file.path(db);
let line_index = line_index(db.upcast(), file);
diagnostics.extend(parsed.errors().iter().map(|err| {
let source_location = line_index.source_location(err.location.start(), source.as_str());
format!("{path}:{source_location}: {message}", message = err.error)
diagnostics.extend(parsed.errors().iter().map(|error| {
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
diagnostic
}));
}
for diagnostic in check_types(db.upcast(), file) {
let index = line_index(db.upcast(), diagnostic.file());
let location = index.source_location(diagnostic.start(), source.as_str());
diagnostics.push(format!(
"{path}:{location}: {message}",
path = file.path(db),
message = diagnostic.message()
));
}
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
boxed
}));
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
diagnostics
}
@ -533,17 +517,45 @@ impl Iterator for WorkspaceFilesIter<'_> {
}
}
#[derive(Debug)]
pub struct IOErrorDiagnostic {
file: File,
error: SourceTextError,
}
impl Diagnostic for IOErrorDiagnostic {
fn rule(&self) -> &str {
"io"
}
fn message(&self) -> Cow<str> {
self.error.to_string().into()
}
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
None
}
fn severity(&self) -> Severity {
Severity::Error
}
}
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::workspace::check_file;
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithTestSystem, SystemPath};
use ruff_db::testing::assert_function_query_was_not_run;
use crate::db::tests::TestDb;
use crate::workspace::check_file;
#[test]
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
let mut db = TestDb::new();
@ -558,7 +570,10 @@ mod tests {
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file),
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec!["Failed to read file: No such file or directory".to_string()]
);
@ -570,7 +585,13 @@ mod tests {
db.write_file(path, "").unwrap();
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(check_file(&db, file), vec![] as Vec<String>);
assert_eq!(
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec![] as Vec<String>
);
Ok(())
}

View file

@ -8,6 +8,7 @@ use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use ruff_benchmark::TestFile;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{system_path_to_file, File};
use ruff_db::source::source_text;
use ruff_db::system::{MemoryFileSystem, SystemPath, SystemPathBuf, TestSystem};
@ -24,32 +25,32 @@ const TOMLLIB_312_URL: &str = "https://raw.githubusercontent.com/python/cpython/
static EXPECTED_DIAGNOSTICS: &[&str] = &[
// We don't support `*` imports yet:
"/src/tomllib/_parser.py:7:29: Module `collections.abc` has no member `Iterable`",
"error[unresolved-import] /src/tomllib/_parser.py:7:29 Module `collections.abc` has no member `Iterable`",
// We don't support terminal statements in control flow yet:
"/src/tomllib/_parser.py:246:15: Method `__class_getitem__` of type `Literal[frozenset]` is possibly unbound",
"/src/tomllib/_parser.py:692:8354: Invalid class base with type `GenericAlias` (all bases must be a class, `Any`, `Unknown` or `Todo`)",
"/src/tomllib/_parser.py:66:18: Name `s` used when possibly not defined",
"/src/tomllib/_parser.py:98:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:101:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:104:14: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:108:17: Conflicting declared types for `second_char`: Unknown, str | None",
"/src/tomllib/_parser.py:115:14: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:126:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:267:9: Conflicting declared types for `char`: Unknown, str | None",
"/src/tomllib/_parser.py:348:20: Name `nest` used when possibly not defined",
"/src/tomllib/_parser.py:353:5: Name `nest` used when possibly not defined",
"/src/tomllib/_parser.py:364:9: Conflicting declared types for `char`: Unknown, str | None",
"/src/tomllib/_parser.py:381:13: Conflicting declared types for `char`: Unknown, str | None",
"/src/tomllib/_parser.py:395:9: Conflicting declared types for `char`: Unknown, str | None",
"/src/tomllib/_parser.py:453:24: Name `nest` used when possibly not defined",
"/src/tomllib/_parser.py:455:9: Name `nest` used when possibly not defined",
"/src/tomllib/_parser.py:482:16: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:566:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:573:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:579:12: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:580:63: Name `char` used when possibly not defined",
"/src/tomllib/_parser.py:590:9: Conflicting declared types for `char`: Unknown, str | None",
"/src/tomllib/_parser.py:629:38: Name `datetime_obj` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:66:18 Name `s` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:98:12 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:101:12 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:104:14 Name `char` used when possibly not defined",
"error[conflicting-declarations] /src/tomllib/_parser.py:108:17 Conflicting declared types for `second_char`: Unknown, str | None",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:115:14 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:126:12 Name `char` used when possibly not defined",
"error[call-possibly-unbound-method] /src/tomllib/_parser.py:246:15 Method `__class_getitem__` of type `Literal[frozenset]` is possibly unbound",
"error[conflicting-declarations] /src/tomllib/_parser.py:267:9 Conflicting declared types for `char`: Unknown, str | None",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:348:20 Name `nest` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:353:5 Name `nest` used when possibly not defined",
"error[conflicting-declarations] /src/tomllib/_parser.py:364:9 Conflicting declared types for `char`: Unknown, str | None",
"error[conflicting-declarations] /src/tomllib/_parser.py:381:13 Conflicting declared types for `char`: Unknown, str | None",
"error[conflicting-declarations] /src/tomllib/_parser.py:395:9 Conflicting declared types for `char`: Unknown, str | None",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:453:24 Name `nest` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:455:9 Name `nest` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:482:16 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:566:12 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:573:12 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:579:12 Name `char` used when possibly not defined",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:580:63 Name `char` used when possibly not defined",
"error[conflicting-declarations] /src/tomllib/_parser.py:590:9 Conflicting declared types for `char`: Unknown, str | None",
"error[possibly-unresolved-reference] /src/tomllib/_parser.py:629:38 Name `datetime_obj` used when possibly not defined",
"error[invalid-base] /src/tomllib/_parser.py:692:8354 Invalid class base with type `GenericAlias` (all bases must be a class, `Any`, `Unknown` or `Todo`)",
];
fn get_test_file(name: &str) -> TestFile {
@ -123,7 +124,14 @@ fn setup_rayon() {
fn benchmark_incremental(criterion: &mut Criterion) {
fn setup() -> Case {
let case = setup_case();
let result = case.db.check().unwrap();
let result: Vec<_> = case
.db
.check()
.unwrap()
.into_iter()
.map(|diagnostic| diagnostic.display(&case.db).to_string())
.collect();
assert_eq!(result, EXPECTED_DIAGNOSTICS);
@ -150,7 +158,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
let result = db.check().unwrap();
assert_eq!(result, EXPECTED_DIAGNOSTICS);
assert_eq!(result.len(), EXPECTED_DIAGNOSTICS.len());
}
setup_rayon();
@ -168,7 +176,12 @@ fn benchmark_cold(criterion: &mut Criterion) {
setup_case,
|case| {
let Case { db, .. } = case;
let result = db.check().unwrap();
let result: Vec<_> = db
.check()
.unwrap()
.into_iter()
.map(|diagnostic| diagnostic.display(db).to_string())
.collect();
assert_eq!(result, EXPECTED_DIAGNOSTICS);
},

View file

@ -0,0 +1,180 @@
use crate::{
files::File,
source::{line_index, source_text},
Db,
};
use ruff_python_parser::ParseError;
use ruff_text_size::TextRange;
use std::borrow::Cow;
pub trait Diagnostic: Send + Sync + std::fmt::Debug {
fn rule(&self) -> &str;
fn message(&self) -> std::borrow::Cow<str>;
fn file(&self) -> File;
fn range(&self) -> Option<TextRange>;
fn severity(&self) -> Severity;
fn display<'a>(&'a self, db: &'a dyn Db) -> DisplayDiagnostic<'a>
where
Self: Sized,
{
DisplayDiagnostic {
db,
diagnostic: self,
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum Severity {
Info,
Error,
}
pub struct DisplayDiagnostic<'db> {
db: &'db dyn Db,
diagnostic: &'db dyn Diagnostic,
}
impl<'db> DisplayDiagnostic<'db> {
pub fn new(db: &'db dyn Db, diagnostic: &'db dyn Diagnostic) -> Self {
Self { db, diagnostic }
}
}
impl std::fmt::Display for DisplayDiagnostic<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.diagnostic.severity() {
Severity::Info => f.write_str("info")?,
Severity::Error => f.write_str("error")?,
}
write!(
f,
"[{rule}] {path}",
rule = self.diagnostic.rule(),
path = self.diagnostic.file().path(self.db)
)?;
if let Some(range) = self.diagnostic.range() {
let index = line_index(self.db, self.diagnostic.file());
let source = source_text(self.db, self.diagnostic.file());
let start = index.source_location(range.start(), &source);
write!(f, ":{line}:{col}", line = start.row, col = start.column)?;
}
write!(f, " {message}", message = self.diagnostic.message())
}
}
impl<T> Diagnostic for Box<T>
where
T: Diagnostic,
{
fn rule(&self) -> &str {
(**self).rule()
}
fn message(&self) -> Cow<str> {
(**self).message()
}
fn file(&self) -> File {
(**self).file()
}
fn range(&self) -> Option<TextRange> {
(**self).range()
}
fn severity(&self) -> Severity {
(**self).severity()
}
}
impl<T> Diagnostic for std::sync::Arc<T>
where
T: Diagnostic,
{
fn rule(&self) -> &str {
(**self).rule()
}
fn message(&self) -> std::borrow::Cow<str> {
(**self).message()
}
fn file(&self) -> File {
(**self).file()
}
fn range(&self) -> Option<TextRange> {
(**self).range()
}
fn severity(&self) -> Severity {
(**self).severity()
}
}
impl Diagnostic for Box<dyn Diagnostic> {
fn rule(&self) -> &str {
(**self).rule()
}
fn message(&self) -> Cow<str> {
(**self).message()
}
fn file(&self) -> File {
(**self).file()
}
fn range(&self) -> Option<TextRange> {
(**self).range()
}
fn severity(&self) -> Severity {
(**self).severity()
}
}
#[derive(Debug)]
pub struct ParseDiagnostic {
file: File,
error: ParseError,
}
impl ParseDiagnostic {
pub fn new(file: File, error: ParseError) -> Self {
Self { file, error }
}
}
impl Diagnostic for ParseDiagnostic {
fn rule(&self) -> &str {
"invalid-syntax"
}
fn message(&self) -> Cow<str> {
self.error.error.to_string().into()
}
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
Some(self.error.location)
}
fn severity(&self) -> Severity {
Severity::Error
}
}

View file

@ -6,6 +6,7 @@ use crate::files::Files;
use crate::system::System;
use crate::vendored::VendoredFileSystem;
pub mod diagnostic;
pub mod display;
pub mod file_revision;
pub mod files;

View file

@ -1,9 +1,7 @@
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
use countme::Count;
use salsa::Accumulator;
use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType;
@ -17,16 +15,14 @@ use crate::Db;
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
let path = file.path(db);
let _span = tracing::trace_span!("source_text", file = %path).entered();
let mut has_read_error = false;
let mut read_error = None;
let kind = if is_notebook(file.path(db)) {
file.read_to_notebook(db)
.unwrap_or_else(|error| {
tracing::debug!("Failed to read notebook '{path}': {error}");
has_read_error = true;
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
.accumulate(db);
read_error = Some(SourceTextError::FailedToReadNotebook(error.to_string()));
Notebook::empty()
})
.into()
@ -35,8 +31,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
.unwrap_or_else(|error| {
tracing::debug!("Failed to read file '{path}': {error}");
has_read_error = true;
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);
read_error = Some(SourceTextError::FailedToReadFile(error.to_string()));
String::new()
})
.into()
@ -45,7 +40,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
SourceText {
inner: Arc::new(SourceTextInner {
kind,
has_read_error,
read_error,
count: Count::new(),
}),
}
@ -98,8 +93,8 @@ impl SourceText {
}
/// Returns `true` if there was an error when reading the content of the file.
pub fn has_read_error(&self) -> bool {
self.inner.has_read_error
pub fn read_error(&self) -> Option<&SourceTextError> {
self.inner.read_error.as_ref()
}
}
@ -132,7 +127,7 @@ impl std::fmt::Debug for SourceText {
struct SourceTextInner {
count: Count<SourceText>,
kind: SourceTextKind,
has_read_error: bool,
read_error: Option<SourceTextError>,
}
#[derive(Eq, PartialEq)]
@ -153,21 +148,12 @@ impl From<Notebook> for SourceTextKind {
}
}
#[salsa::accumulator]
pub struct SourceDiagnostic(Arc<SourceTextError>);
impl std::fmt::Display for SourceDiagnostic {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.0, f)
}
}
#[derive(Debug, thiserror::Error)]
#[derive(Debug, thiserror::Error, PartialEq, Eq, Clone)]
pub enum SourceTextError {
#[error("Failed to read notebook: {0}`")]
FailedToReadNotebook(#[from] ruff_notebook::NotebookError),
FailedToReadNotebook(String),
#[error("Failed to read file: {0}")]
FailedToReadFile(#[from] std::io::Error),
FailedToReadFile(String),
}
/// Computes the [`LineIndex`] for `file`.