Convert Message::SyntaxError to use Diagnostic internally (#17784)

## Summary

This PR is a first step toward integration of the new `Diagnostic` type
into ruff. There are two main changes:
- A new `UnifiedFile` enum wrapping `File` for red-knot and a
`SourceFile` for ruff
- ruff's `Message::SyntaxError` variant is now a `Diagnostic` instead of
a `SyntaxErrorMessage`

The second of these changes was mostly just a proof of concept for the
first, and it went pretty smoothly. Converting `DiagnosticMessage`s will
be most of the work in replacing `Message` entirely.

## Test Plan

Existing tests, which show no changes.

---------

Co-authored-by: Carl Meyer <carl@astral.sh>
Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
Brent Westbrook 2025-05-08 12:45:51 -04:00 committed by GitHub
parent 0763331f7f
commit 981bd70d39
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 327 additions and 175 deletions

1
Cargo.lock generated
View file

@ -2814,6 +2814,7 @@ dependencies = [
"regex", "regex",
"ruff_annotate_snippets", "ruff_annotate_snippets",
"ruff_cache", "ruff_cache",
"ruff_db",
"ruff_diagnostics", "ruff_diagnostics",
"ruff_macros", "ruff_macros",
"ruff_notebook", "ruff_notebook",

View file

@ -439,7 +439,7 @@ impl LintCacheData {
.map(|msg| { .map(|msg| {
// Make sure that all message use the same source file. // Make sure that all message use the same source file.
assert_eq!( assert_eq!(
&msg.file, msg.file,
messages.first().unwrap().source_file(), messages.first().unwrap().source_file(),
"message uses a different source file" "message uses a different source file"
); );

View file

@ -15,7 +15,7 @@ use rustc_hash::FxHashMap;
use ruff_diagnostics::Diagnostic; use ruff_diagnostics::Diagnostic;
use ruff_linter::codes::Rule; use ruff_linter::codes::Rule;
use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource}; use ruff_linter::linter::{lint_fix, lint_only, FixTable, FixerResult, LinterResult, ParseSource};
use ruff_linter::message::{Message, SyntaxErrorMessage}; use ruff_linter::message::Message;
use ruff_linter::package::PackageRoot; use ruff_linter::package::PackageRoot;
use ruff_linter::pyproject_toml::lint_pyproject_toml; use ruff_linter::pyproject_toml::lint_pyproject_toml;
use ruff_linter::settings::types::UnsafeFixes; use ruff_linter::settings::types::UnsafeFixes;
@ -102,11 +102,7 @@ impl Diagnostics {
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy); let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
let dummy = SourceFileBuilder::new(name, "").finish(); let dummy = SourceFileBuilder::new(name, "").finish();
Self::new( Self::new(
vec![Message::SyntaxError(SyntaxErrorMessage { vec![Message::syntax_error(err, TextRange::default(), dummy)],
message: err.to_string(),
range: TextRange::default(),
file: dummy,
})],
FxHashMap::default(), FxHashMap::default(),
) )
} }

View file

@ -197,7 +197,7 @@ fn assert_diagnostics(db: &dyn Db, diagnostics: &[Diagnostic], expected: &[KeyDi
diagnostic.id(), diagnostic.id(),
diagnostic diagnostic
.primary_span() .primary_span()
.map(|span| span.file()) .map(|span| span.expect_ty_file())
.map(|file| file.path(db).as_str()), .map(|file| file.path(db).as_str()),
diagnostic diagnostic
.primary_span() .primary_span()

View file

@ -1,15 +1,15 @@
use std::{fmt::Formatter, sync::Arc}; use std::{fmt::Formatter, sync::Arc};
use render::{FileResolver, Input};
use ruff_source_file::{SourceCode, SourceFile};
use thiserror::Error; use thiserror::Error;
use ruff_annotate_snippets::Level as AnnotateLevel; use ruff_annotate_snippets::Level as AnnotateLevel;
use ruff_text_size::{Ranged, TextRange}; use ruff_text_size::{Ranged, TextRange};
pub use self::render::DisplayDiagnostic; pub use self::render::DisplayDiagnostic;
use crate::files::File; use crate::{files::File, Db};
use crate::Db;
use self::render::FileResolver;
mod render; mod render;
mod stylesheet; mod stylesheet;
@ -115,10 +115,9 @@ impl Diagnostic {
/// callers should prefer using this with `write!` instead of `writeln!`. /// callers should prefer using this with `write!` instead of `writeln!`.
pub fn display<'a>( pub fn display<'a>(
&'a self, &'a self,
db: &'a dyn Db, resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig, config: &'a DisplayDiagnosticConfig,
) -> DisplayDiagnostic<'a> { ) -> DisplayDiagnostic<'a> {
let resolver = FileResolver::new(db);
DisplayDiagnostic::new(resolver, config, self) DisplayDiagnostic::new(resolver, config, self)
} }
@ -233,6 +232,16 @@ impl Diagnostic {
self.primary_annotation().map(|ann| ann.tags.as_slice()) self.primary_annotation().map(|ann| ann.tags.as_slice())
} }
/// Returns the "primary" span of this diagnostic, panicking if it does not exist.
///
/// This should typically only be used when working with diagnostics in Ruff, where diagnostics
/// are currently required to have a primary span.
///
/// See [`Diagnostic::primary_span`] for more details.
pub fn expect_primary_span(&self) -> Span {
self.primary_span().expect("Expected a primary span")
}
/// Returns a key that can be used to sort two diagnostics into the canonical order /// Returns a key that can be used to sort two diagnostics into the canonical order
/// in which they should appear when rendered. /// in which they should appear when rendered.
pub fn rendering_sort_key<'a>(&'a self, db: &'a dyn Db) -> impl Ord + 'a { pub fn rendering_sort_key<'a>(&'a self, db: &'a dyn Db) -> impl Ord + 'a {
@ -267,11 +276,7 @@ impl Ord for RenderingSortKey<'_> {
self.diagnostic.primary_span(), self.diagnostic.primary_span(),
other.diagnostic.primary_span(), other.diagnostic.primary_span(),
) { ) {
let order = span1 let order = span1.file().path(&self.db).cmp(span2.file().path(&self.db));
.file()
.path(self.db)
.as_str()
.cmp(span2.file().path(self.db).as_str());
if order.is_ne() { if order.is_ne() {
return order; return order;
} }
@ -643,6 +648,10 @@ impl DiagnosticId {
DiagnosticId::UnknownRule => "unknown-rule", DiagnosticId::UnknownRule => "unknown-rule",
}) })
} }
pub fn is_invalid_syntax(&self) -> bool {
matches!(self, Self::InvalidSyntax)
}
} }
#[derive(Copy, Clone, Debug, Eq, PartialEq, Error)] #[derive(Copy, Clone, Debug, Eq, PartialEq, Error)]
@ -668,6 +677,62 @@ impl std::fmt::Display for DiagnosticId {
} }
} }
/// A unified file representation for both ruff and ty.
///
/// Such a representation is needed for rendering [`Diagnostic`]s that can optionally contain
/// [`Annotation`]s with [`Span`]s that need to refer to the text of a file. However, ty and ruff
/// use very different file types: a `Copy`-able salsa-interned [`File`], and a heavier-weight
/// [`SourceFile`], respectively.
///
/// This enum presents a unified interface to these two types for the sake of creating [`Span`]s and
/// emitting diagnostics from both ty and ruff.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum UnifiedFile {
Ty(File),
Ruff(SourceFile),
}
impl UnifiedFile {
pub fn path<'a>(&'a self, resolver: &'a dyn FileResolver) -> &'a str {
match self {
UnifiedFile::Ty(file) => resolver.path(*file),
UnifiedFile::Ruff(file) => file.name(),
}
}
fn diagnostic_source(&self, resolver: &dyn FileResolver) -> DiagnosticSource {
match self {
UnifiedFile::Ty(file) => DiagnosticSource::Ty(resolver.input(*file)),
UnifiedFile::Ruff(file) => DiagnosticSource::Ruff(file.clone()),
}
}
}
/// A unified wrapper for types that can be converted to a [`SourceCode`].
///
/// As with [`UnifiedFile`], ruff and ty use slightly different representations for source code.
/// [`DiagnosticSource`] wraps both of these and provides the single
/// [`DiagnosticSource::as_source_code`] method to produce a [`SourceCode`] with the appropriate
/// lifetimes.
///
/// See [`UnifiedFile::diagnostic_source`] for a way to obtain a [`DiagnosticSource`] from a file
/// and [`FileResolver`].
#[derive(Clone, Debug)]
enum DiagnosticSource {
Ty(Input),
Ruff(SourceFile),
}
impl DiagnosticSource {
/// Returns this input as a `SourceCode` for convenient querying.
fn as_source_code(&self) -> SourceCode {
match self {
DiagnosticSource::Ty(input) => SourceCode::new(input.text.as_str(), &input.line_index),
DiagnosticSource::Ruff(source) => SourceCode::new(source.source_text(), source.index()),
}
}
}
/// A span represents the source of a diagnostic. /// A span represents the source of a diagnostic.
/// ///
/// It consists of a `File` and an optional range into that file. When the /// It consists of a `File` and an optional range into that file. When the
@ -675,14 +740,14 @@ impl std::fmt::Display for DiagnosticId {
/// the entire file. For example, when the file should be executable but isn't. /// the entire file. For example, when the file should be executable but isn't.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Span { pub struct Span {
file: File, file: UnifiedFile,
range: Option<TextRange>, range: Option<TextRange>,
} }
impl Span { impl Span {
/// Returns the `File` attached to this `Span`. /// Returns the `UnifiedFile` attached to this `Span`.
pub fn file(&self) -> File { pub fn file(&self) -> &UnifiedFile {
self.file &self.file
} }
/// Returns the range, if available, attached to this `Span`. /// Returns the range, if available, attached to this `Span`.
@ -703,10 +768,38 @@ impl Span {
pub fn with_optional_range(self, range: Option<TextRange>) -> Span { pub fn with_optional_range(self, range: Option<TextRange>) -> Span {
Span { range, ..self } Span { range, ..self }
} }
/// Returns the [`File`] attached to this [`Span`].
///
/// Panics if the file is a [`UnifiedFile::Ruff`] instead of a [`UnifiedFile::Ty`].
pub fn expect_ty_file(&self) -> File {
match self.file {
UnifiedFile::Ty(file) => file,
UnifiedFile::Ruff(_) => panic!("Expected a ty `File`, found a ruff `SourceFile`"),
}
}
/// Returns the [`SourceFile`] attached to this [`Span`].
///
/// Panics if the file is a [`UnifiedFile::Ty`] instead of a [`UnifiedFile::Ruff`].
pub fn expect_ruff_file(&self) -> &SourceFile {
match &self.file {
UnifiedFile::Ty(_) => panic!("Expected a ruff `SourceFile`, found a ty `File`"),
UnifiedFile::Ruff(file) => file,
}
}
} }
impl From<File> for Span { impl From<File> for Span {
fn from(file: File) -> Span { fn from(file: File) -> Span {
let file = UnifiedFile::Ty(file);
Span { file, range: None }
}
}
impl From<SourceFile> for Span {
fn from(file: SourceFile) -> Self {
let file = UnifiedFile::Ruff(file);
Span { file, range: None } Span { file, range: None }
} }
} }

View file

@ -16,7 +16,8 @@ use crate::{
}; };
use super::{ use super::{
Annotation, Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig, Severity, SubDiagnostic, Annotation, Diagnostic, DiagnosticFormat, DiagnosticSource, DisplayDiagnosticConfig, Severity,
SubDiagnostic,
}; };
/// A type that implements `std::fmt::Display` for diagnostic rendering. /// A type that implements `std::fmt::Display` for diagnostic rendering.
@ -30,17 +31,16 @@ use super::{
/// values. When using Salsa, this most commonly corresponds to the lifetime /// values. When using Salsa, this most commonly corresponds to the lifetime
/// of a Salsa `Db`. /// of a Salsa `Db`.
/// * The lifetime of the diagnostic being rendered. /// * The lifetime of the diagnostic being rendered.
#[derive(Debug)]
pub struct DisplayDiagnostic<'a> { pub struct DisplayDiagnostic<'a> {
config: &'a DisplayDiagnosticConfig, config: &'a DisplayDiagnosticConfig,
resolver: FileResolver<'a>, resolver: &'a dyn FileResolver,
annotate_renderer: AnnotateRenderer, annotate_renderer: AnnotateRenderer,
diag: &'a Diagnostic, diag: &'a Diagnostic,
} }
impl<'a> DisplayDiagnostic<'a> { impl<'a> DisplayDiagnostic<'a> {
pub(crate) fn new( pub(crate) fn new(
resolver: FileResolver<'a>, resolver: &'a dyn FileResolver,
config: &'a DisplayDiagnosticConfig, config: &'a DisplayDiagnosticConfig,
diag: &'a Diagnostic, diag: &'a Diagnostic,
) -> DisplayDiagnostic<'a> { ) -> DisplayDiagnostic<'a> {
@ -86,11 +86,13 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
write!( write!(
f, f,
" {path}", " {path}",
path = fmt_styled(self.resolver.path(span.file()), stylesheet.emphasis) path = fmt_styled(span.file().path(self.resolver), stylesheet.emphasis)
)?; )?;
if let Some(range) = span.range() { if let Some(range) = span.range() {
let input = self.resolver.input(span.file()); let diagnostic_source = span.file().diagnostic_source(self.resolver);
let start = input.as_source_code().line_column(range.start()); let start = diagnostic_source
.as_source_code()
.line_column(range.start());
write!( write!(
f, f,
@ -115,7 +117,7 @@ impl std::fmt::Display for DisplayDiagnostic<'_> {
.emphasis(stylesheet.emphasis) .emphasis(stylesheet.emphasis)
.none(stylesheet.none); .none(stylesheet.none);
let resolved = Resolved::new(&self.resolver, self.diag); let resolved = Resolved::new(self.resolver, self.diag);
let renderable = resolved.to_renderable(self.config.context); let renderable = resolved.to_renderable(self.config.context);
for diag in renderable.diagnostics.iter() { for diag in renderable.diagnostics.iter() {
writeln!(f, "{}", renderer.render(diag.to_annotate()))?; writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
@ -144,7 +146,7 @@ struct Resolved<'a> {
impl<'a> Resolved<'a> { impl<'a> Resolved<'a> {
/// Creates a new resolved set of diagnostics. /// Creates a new resolved set of diagnostics.
fn new(resolver: &FileResolver<'a>, diag: &'a Diagnostic) -> Resolved<'a> { fn new(resolver: &'a dyn FileResolver, diag: &'a Diagnostic) -> Resolved<'a> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, diag)); diagnostics.push(ResolvedDiagnostic::from_diagnostic(resolver, diag));
for sub in &diag.inner.subs { for sub in &diag.inner.subs {
@ -182,7 +184,7 @@ struct ResolvedDiagnostic<'a> {
impl<'a> ResolvedDiagnostic<'a> { impl<'a> ResolvedDiagnostic<'a> {
/// Resolve a single diagnostic. /// Resolve a single diagnostic.
fn from_diagnostic( fn from_diagnostic(
resolver: &FileResolver<'a>, resolver: &'a dyn FileResolver,
diag: &'a Diagnostic, diag: &'a Diagnostic,
) -> ResolvedDiagnostic<'a> { ) -> ResolvedDiagnostic<'a> {
let annotations: Vec<_> = diag let annotations: Vec<_> = diag
@ -190,9 +192,9 @@ impl<'a> ResolvedDiagnostic<'a> {
.annotations .annotations
.iter() .iter()
.filter_map(|ann| { .filter_map(|ann| {
let path = resolver.path(ann.span.file); let path = ann.span.file.path(resolver);
let input = resolver.input(ann.span.file); let diagnostic_source = ann.span.file.diagnostic_source(resolver);
ResolvedAnnotation::new(path, &input, ann) ResolvedAnnotation::new(path, &diagnostic_source, ann)
}) })
.collect(); .collect();
let message = if diag.inner.message.as_str().is_empty() { let message = if diag.inner.message.as_str().is_empty() {
@ -216,7 +218,7 @@ impl<'a> ResolvedDiagnostic<'a> {
/// Resolve a single sub-diagnostic. /// Resolve a single sub-diagnostic.
fn from_sub_diagnostic( fn from_sub_diagnostic(
resolver: &FileResolver<'a>, resolver: &'a dyn FileResolver,
diag: &'a SubDiagnostic, diag: &'a SubDiagnostic,
) -> ResolvedDiagnostic<'a> { ) -> ResolvedDiagnostic<'a> {
let annotations: Vec<_> = diag let annotations: Vec<_> = diag
@ -224,9 +226,9 @@ impl<'a> ResolvedDiagnostic<'a> {
.annotations .annotations
.iter() .iter()
.filter_map(|ann| { .filter_map(|ann| {
let path = resolver.path(ann.span.file); let path = ann.span.file.path(resolver);
let input = resolver.input(ann.span.file); let diagnostic_source = ann.span.file.diagnostic_source(resolver);
ResolvedAnnotation::new(path, &input, ann) ResolvedAnnotation::new(path, &diagnostic_source, ann)
}) })
.collect(); .collect();
ResolvedDiagnostic { ResolvedDiagnostic {
@ -259,10 +261,18 @@ impl<'a> ResolvedDiagnostic<'a> {
continue; continue;
}; };
let prev_context_ends = let prev_context_ends = context_after(
context_after(&prev.input.as_source_code(), context, prev.line_end).get(); &prev.diagnostic_source.as_source_code(),
let this_context_begins = context,
context_before(&ann.input.as_source_code(), context, ann.line_start).get(); prev.line_end,
)
.get();
let this_context_begins = context_before(
&ann.diagnostic_source.as_source_code(),
context,
ann.line_start,
)
.get();
// The boundary case here is when `prev_context_ends` // The boundary case here is when `prev_context_ends`
// is exactly one less than `this_context_begins`. In // is exactly one less than `this_context_begins`. In
// that case, the context windows are adajcent and we // that case, the context windows are adajcent and we
@ -304,7 +314,7 @@ impl<'a> ResolvedDiagnostic<'a> {
#[derive(Debug)] #[derive(Debug)]
struct ResolvedAnnotation<'a> { struct ResolvedAnnotation<'a> {
path: &'a str, path: &'a str,
input: Input, diagnostic_source: DiagnosticSource,
range: TextRange, range: TextRange,
line_start: OneIndexed, line_start: OneIndexed,
line_end: OneIndexed, line_end: OneIndexed,
@ -318,8 +328,12 @@ impl<'a> ResolvedAnnotation<'a> {
/// `path` is the path of the file that this annotation points to. /// `path` is the path of the file that this annotation points to.
/// ///
/// `input` is the contents of the file that this annotation points to. /// `input` is the contents of the file that this annotation points to.
fn new(path: &'a str, input: &Input, ann: &'a Annotation) -> Option<ResolvedAnnotation<'a>> { fn new(
let source = input.as_source_code(); path: &'a str,
diagnostic_source: &DiagnosticSource,
ann: &'a Annotation,
) -> Option<ResolvedAnnotation<'a>> {
let source = diagnostic_source.as_source_code();
let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) { let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) {
// An annotation with no range AND no message is probably(?) // An annotation with no range AND no message is probably(?)
// meaningless, but we should try to render it anyway. // meaningless, but we should try to render it anyway.
@ -345,7 +359,7 @@ impl<'a> ResolvedAnnotation<'a> {
}; };
Some(ResolvedAnnotation { Some(ResolvedAnnotation {
path, path,
input: input.clone(), diagnostic_source: diagnostic_source.clone(),
range, range,
line_start, line_start,
line_end, line_end,
@ -510,8 +524,8 @@ impl<'r> RenderableSnippet<'r> {
!anns.is_empty(), !anns.is_empty(),
"creating a renderable snippet requires a non-zero number of annotations", "creating a renderable snippet requires a non-zero number of annotations",
); );
let input = &anns[0].input; let diagnostic_source = &anns[0].diagnostic_source;
let source = input.as_source_code(); let source = diagnostic_source.as_source_code();
let has_primary = anns.iter().any(|ann| ann.is_primary); let has_primary = anns.iter().any(|ann| ann.is_primary);
let line_start = context_before( let line_start = context_before(
@ -527,7 +541,7 @@ impl<'r> RenderableSnippet<'r> {
let snippet_start = source.line_start(line_start); let snippet_start = source.line_start(line_start);
let snippet_end = source.line_end(line_end); let snippet_end = source.line_end(line_end);
let snippet = input let snippet = diagnostic_source
.as_source_code() .as_source_code()
.slice(TextRange::new(snippet_start, snippet_end)); .slice(TextRange::new(snippet_start, snippet_end));
@ -613,7 +627,7 @@ impl<'r> RenderableAnnotation<'r> {
} }
} }
/// A type that facilitates the retrieval of source code from a `Span`. /// A trait that facilitates the retrieval of source code from a `Span`.
/// ///
/// At present, this is tightly coupled with a Salsa database. In the future, /// At present, this is tightly coupled with a Salsa database. In the future,
/// it is intended for this resolver to become an abstraction providing a /// it is intended for this resolver to become an abstraction providing a
@ -628,36 +642,24 @@ impl<'r> RenderableAnnotation<'r> {
/// callers will need to pass in a different "resolver" for turning `Span`s /// callers will need to pass in a different "resolver" for turning `Span`s
/// into actual file paths/contents. The infrastructure for this isn't fully in /// into actual file paths/contents. The infrastructure for this isn't fully in
/// place, but this type serves to demarcate the intended abstraction boundary. /// place, but this type serves to demarcate the intended abstraction boundary.
pub(crate) struct FileResolver<'a> { pub trait FileResolver {
db: &'a dyn Db,
}
impl<'a> FileResolver<'a> {
/// Creates a new resolver from a Salsa database.
pub(crate) fn new(db: &'a dyn Db) -> FileResolver<'a> {
FileResolver { db }
}
/// Returns the path associated with the file given. /// Returns the path associated with the file given.
fn path(&self, file: File) -> &'a str { fn path(&self, file: File) -> &str;
relativize_path(
self.db.system().current_directory(),
file.path(self.db).as_str(),
)
}
/// Returns the input contents associated with the file given. /// Returns the input contents associated with the file given.
fn input(&self, file: File) -> Input { fn input(&self, file: File) -> Input;
Input {
text: source_text(self.db, file),
line_index: line_index(self.db, file),
}
}
} }
impl std::fmt::Debug for FileResolver<'_> { impl FileResolver for &dyn Db {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn path(&self, file: File) -> &str {
write!(f, "<salsa based file resolver>") relativize_path(self.system().current_directory(), file.path(*self).as_str())
}
fn input(&self, file: File) -> Input {
Input {
text: source_text(*self, file),
line_index: line_index(*self, file),
}
} }
} }
@ -667,16 +669,9 @@ impl std::fmt::Debug for FileResolver<'_> {
/// This contains the actual content of that input as well as a /// This contains the actual content of that input as well as a
/// line index for efficiently querying its contents. /// line index for efficiently querying its contents.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct Input { pub struct Input {
text: SourceText, pub(crate) text: SourceText,
line_index: LineIndex, pub(crate) line_index: LineIndex,
}
impl Input {
/// Returns this input as a `SourceCode` for convenient querying.
fn as_source_code(&self) -> SourceCode<'_, '_> {
SourceCode::new(self.text.as_str(), &self.line_index)
}
} }
/// Returns the line number accounting for the given `len` /// Returns the line number accounting for the given `len`
@ -730,6 +725,7 @@ mod tests {
use crate::files::system_path_to_file; use crate::files::system_path_to_file;
use crate::system::{DbWithWritableSystem, SystemPath}; use crate::system::{DbWithWritableSystem, SystemPath};
use crate::tests::TestDb; use crate::tests::TestDb;
use crate::Upcast;
use super::*; use super::*;
@ -2174,8 +2170,9 @@ watermelon
fn span(&self, path: &str, line_offset_start: &str, line_offset_end: &str) -> Span { fn span(&self, path: &str, line_offset_start: &str, line_offset_end: &str) -> Span {
let span = self.path(path); let span = self.path(path);
let text = source_text(&self.db, span.file()); let file = span.expect_ty_file();
let line_index = line_index(&self.db, span.file()); let text = source_text(&self.db, file);
let line_index = line_index(&self.db, file);
let source = SourceCode::new(text.as_str(), &line_index); let source = SourceCode::new(text.as_str(), &line_index);
let (line_start, offset_start) = parse_line_offset(line_offset_start); let (line_start, offset_start) = parse_line_offset(line_offset_start);
@ -2237,7 +2234,7 @@ watermelon
/// ///
/// (This will set the "printed" flag on `Diagnostic`.) /// (This will set the "printed" flag on `Diagnostic`.)
fn render(&self, diag: &Diagnostic) -> String { fn render(&self, diag: &Diagnostic) -> String {
diag.display(&self.db, &self.config).to_string() diag.display(&self.db.upcast(), &self.config).to_string()
} }
} }

View file

@ -67,7 +67,7 @@ mod tests {
use crate::system::TestSystem; use crate::system::TestSystem;
use crate::system::{DbWithTestSystem, System}; use crate::system::{DbWithTestSystem, System};
use crate::vendored::VendoredFileSystem; use crate::vendored::VendoredFileSystem;
use crate::Db; use crate::{Db, Upcast};
type Events = Arc<Mutex<Vec<salsa::Event>>>; type Events = Arc<Mutex<Vec<salsa::Event>>>;
@ -140,6 +140,15 @@ mod tests {
} }
} }
impl Upcast<dyn Db> for TestDb {
fn upcast(&self) -> &(dyn Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
self
}
}
impl DbWithTestSystem for TestDb { impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem { fn test_system(&self) -> &TestSystem {
&self.system &self.system

View file

@ -15,6 +15,7 @@ license = { workspace = true }
[dependencies] [dependencies]
ruff_annotate_snippets = { workspace = true } ruff_annotate_snippets = { workspace = true }
ruff_cache = { workspace = true } ruff_cache = { workspace = true }
ruff_db = { workspace = true }
ruff_diagnostics = { workspace = true, features = ["serde"] } ruff_diagnostics = { workspace = true, features = ["serde"] }
ruff_notebook = { workspace = true } ruff_notebook = { workspace = true }
ruff_macros = { workspace = true } ruff_macros = { workspace = true }

View file

@ -17,7 +17,7 @@ impl Emitter for AzureEmitter {
context: &EmitterContext, context: &EmitterContext,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
for message in messages { for message in messages {
let location = if context.is_notebook(message.filename()) { let location = if context.is_notebook(&message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
LineColumn::default() LineColumn::default()

View file

@ -22,7 +22,7 @@ use crate::text_helpers::ShowNonprinting;
/// * Compute the diff from the [`Edit`] because diff calculation is expensive. /// * Compute the diff from the [`Edit`] because diff calculation is expensive.
pub(super) struct Diff<'a> { pub(super) struct Diff<'a> {
fix: &'a Fix, fix: &'a Fix,
source_code: &'a SourceFile, source_code: SourceFile,
} }
impl<'a> Diff<'a> { impl<'a> Diff<'a> {

View file

@ -19,7 +19,7 @@ impl Emitter for GithubEmitter {
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
for message in messages { for message in messages {
let source_location = message.compute_start_location(); let source_location = message.compute_start_location();
let location = if context.is_notebook(message.filename()) { let location = if context.is_notebook(&message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
LineColumn::default() LineColumn::default()
@ -43,7 +43,7 @@ impl Emitter for GithubEmitter {
write!( write!(
writer, writer,
"{path}:{row}:{column}:", "{path}:{row}:{column}:",
path = relativize_path(message.filename()), path = relativize_path(&*message.filename()),
row = location.line, row = location.line,
column = location.column, column = location.column,
)?; )?;

View file

@ -62,7 +62,7 @@ impl Serialize for SerializedMessages<'_> {
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let lines = if self.context.is_notebook(message.filename()) { let lines = if self.context.is_notebook(&message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
json!({ json!({
@ -77,8 +77,8 @@ impl Serialize for SerializedMessages<'_> {
}; };
let path = self.project_dir.as_ref().map_or_else( let path = self.project_dir.as_ref().map_or_else(
|| relativize_path(message.filename()), || relativize_path(&*message.filename()),
|project_dir| relativize_path_to(message.filename(), project_dir), |project_dir| relativize_path_to(&*message.filename(), project_dir),
); );
let mut message_fingerprint = fingerprint(message, &path, 0); let mut message_fingerprint = fingerprint(message, &path, 0);

View file

@ -65,7 +65,7 @@ impl Emitter for GroupedEmitter {
let column_length = calculate_print_width(max_column_length); let column_length = calculate_print_width(max_column_length);
// Print the filename. // Print the filename.
writeln!(writer, "{}:", relativize_path(filename).underline())?; writeln!(writer, "{}:", relativize_path(&*filename).underline())?;
// Print each message. // Print each message.
for message in messages { for message in messages {
@ -73,7 +73,7 @@ impl Emitter for GroupedEmitter {
writer, writer,
"{}", "{}",
DisplayGroupedMessage { DisplayGroupedMessage {
notebook_index: context.notebook_index(message.filename()), notebook_index: context.notebook_index(&message.filename()),
message, message,
show_fix_status: self.show_fix_status, show_fix_status: self.show_fix_status,
unsafe_fixes: self.unsafe_fixes, unsafe_fixes: self.unsafe_fixes,

View file

@ -49,8 +49,9 @@ impl Serialize for ExpandedMessages<'_> {
} }
pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext) -> Value { pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext) -> Value {
let source_code = message.source_file().to_source_code(); let source_file = message.source_file();
let notebook_index = context.notebook_index(message.filename()); let source_code = source_file.to_source_code();
let notebook_index = context.notebook_index(&message.filename());
let fix = message.fix().map(|fix| { let fix = message.fix().map(|fix| {
json!({ json!({

View file

@ -32,7 +32,7 @@ impl Emitter for JunitEmitter {
report.add_test_suite(test_suite); report.add_test_suite(test_suite);
} else { } else {
for (filename, messages) in group_messages_by_filename(messages) { for (filename, messages) in group_messages_by_filename(messages) {
let mut test_suite = TestSuite::new(filename); let mut test_suite = TestSuite::new(&filename);
test_suite test_suite
.extra .extra
.insert(XmlString::new("package"), XmlString::new("org.ruff")); .insert(XmlString::new("package"), XmlString::new("org.ruff"));
@ -44,7 +44,7 @@ impl Emitter for JunitEmitter {
} = message; } = message;
let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure); let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure);
status.set_message(message.body()); status.set_message(message.body());
let location = if context.is_notebook(message.filename()) { let location = if context.is_notebook(&message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
LineColumn::default() LineColumn::default()
@ -66,7 +66,7 @@ impl Emitter for JunitEmitter {
}, },
status, status,
); );
let file_path = Path::new(filename); let file_path = Path::new(&*filename);
let file_stem = file_path.file_stem().unwrap().to_str().unwrap(); let file_stem = file_path.file_stem().unwrap().to_str().unwrap();
let classname = file_path.parent().unwrap().join(file_stem); let classname = file_path.parent().unwrap().join(file_stem);
case.set_classname(classname.to_str().unwrap()); case.set_classname(classname.to_str().unwrap());

View file

@ -1,8 +1,10 @@
use std::borrow::Cow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::io::Write; use std::io::Write;
use std::ops::Deref; use std::ops::Deref;
use ruff_db::diagnostic::{self as db, Annotation, DiagnosticId, Severity, Span};
use ruff_python_parser::semantic_errors::SemanticSyntaxError; use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -45,7 +47,7 @@ mod text;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum Message { pub enum Message {
Diagnostic(DiagnosticMessage), Diagnostic(DiagnosticMessage),
SyntaxError(SyntaxErrorMessage), SyntaxError(db::Diagnostic),
} }
/// A diagnostic message corresponding to a rule violation. /// A diagnostic message corresponding to a rule violation.
@ -59,14 +61,6 @@ pub struct DiagnosticMessage {
pub noqa_offset: TextSize, pub noqa_offset: TextSize,
} }
/// A syntax error message raised by the parser.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyntaxErrorMessage {
pub message: String,
pub range: TextRange,
pub file: SourceFile,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum MessageKind { pub enum MessageKind {
Diagnostic(Rule), Diagnostic(Rule),
@ -83,6 +77,17 @@ impl MessageKind {
} }
impl Message { impl Message {
pub fn syntax_error(
message: impl std::fmt::Display,
range: TextRange,
file: SourceFile,
) -> Message {
let mut diag = db::Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
let span = Span::from(file).with_range(range);
diag.annotate(Annotation::primary(span).message(message));
Self::SyntaxError(diag)
}
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation. /// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
pub fn from_diagnostic( pub fn from_diagnostic(
diagnostic: Diagnostic, diagnostic: Diagnostic,
@ -114,14 +119,14 @@ impl Message {
.next() .next()
.map_or(TextSize::new(0), TextLen::text_len); .map_or(TextSize::new(0), TextLen::text_len);
Message::SyntaxError(SyntaxErrorMessage { Message::syntax_error(
message: format!( format_args!(
"SyntaxError: {}", "SyntaxError: {}",
DisplayParseErrorType::new(&parse_error.error) DisplayParseErrorType::new(&parse_error.error)
), ),
range: TextRange::at(parse_error.location.start(), len), TextRange::at(parse_error.location.start(), len),
file, file,
}) )
} }
/// Create a [`Message`] from the given [`UnsupportedSyntaxError`]. /// Create a [`Message`] from the given [`UnsupportedSyntaxError`].
@ -129,11 +134,11 @@ impl Message {
unsupported_syntax_error: &UnsupportedSyntaxError, unsupported_syntax_error: &UnsupportedSyntaxError,
file: SourceFile, file: SourceFile,
) -> Message { ) -> Message {
Message::SyntaxError(SyntaxErrorMessage { Message::syntax_error(
message: format!("SyntaxError: {unsupported_syntax_error}"), format_args!("SyntaxError: {unsupported_syntax_error}"),
range: unsupported_syntax_error.range, unsupported_syntax_error.range,
file, file,
}) )
} }
/// Create a [`Message`] from the given [`SemanticSyntaxError`]. /// Create a [`Message`] from the given [`SemanticSyntaxError`].
@ -141,11 +146,11 @@ impl Message {
semantic_syntax_error: &SemanticSyntaxError, semantic_syntax_error: &SemanticSyntaxError,
file: SourceFile, file: SourceFile,
) -> Message { ) -> Message {
Message::SyntaxError(SyntaxErrorMessage { Message::syntax_error(
message: format!("SyntaxError: {semantic_syntax_error}"), format_args!("SyntaxError: {semantic_syntax_error}"),
range: semantic_syntax_error.range, semantic_syntax_error.range,
file, file,
}) )
} }
pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> { pub const fn as_diagnostic_message(&self) -> Option<&DiagnosticMessage> {
@ -168,8 +173,11 @@ impl Message {
} }
/// Returns `true` if `self` is a syntax error message. /// Returns `true` if `self` is a syntax error message.
pub const fn is_syntax_error(&self) -> bool { pub fn is_syntax_error(&self) -> bool {
matches!(self, Message::SyntaxError(_)) match self {
Message::Diagnostic(_) => false,
Message::SyntaxError(diag) => diag.id().is_invalid_syntax(),
}
} }
/// Returns a message kind. /// Returns a message kind.
@ -192,7 +200,11 @@ impl Message {
pub fn body(&self) -> &str { pub fn body(&self) -> &str {
match self { match self {
Message::Diagnostic(m) => &m.kind.body, Message::Diagnostic(m) => &m.kind.body,
Message::SyntaxError(m) => &m.message, Message::SyntaxError(m) => m
.primary_annotation()
.expect("Expected a primary annotation for a ruff diagnostic")
.get_message()
.expect("Expected a message for a ruff diagnostic"),
} }
} }
@ -234,27 +246,47 @@ impl Message {
} }
/// Returns the filename for the message. /// Returns the filename for the message.
pub fn filename(&self) -> &str { pub fn filename(&self) -> Cow<'_, str> {
self.source_file().name() match self {
Message::Diagnostic(m) => Cow::Borrowed(m.file.name()),
Message::SyntaxError(diag) => Cow::Owned(
diag.expect_primary_span()
.expect_ruff_file()
.name()
.to_string(),
),
}
} }
/// Computes the start source location for the message. /// Computes the start source location for the message.
pub fn compute_start_location(&self) -> LineColumn { pub fn compute_start_location(&self) -> LineColumn {
self.source_file() match self {
.to_source_code() Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.start()),
.line_column(self.start()) Message::SyntaxError(diag) => diag
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.start()),
}
} }
/// Computes the end source location for the message. /// Computes the end source location for the message.
pub fn compute_end_location(&self) -> LineColumn { pub fn compute_end_location(&self) -> LineColumn {
self.source_file().to_source_code().line_column(self.end()) match self {
Message::Diagnostic(m) => m.file.to_source_code().line_column(m.range.end()),
Message::SyntaxError(diag) => diag
.expect_primary_span()
.expect_ruff_file()
.to_source_code()
.line_column(self.end()),
}
} }
/// Returns the [`SourceFile`] which the message belongs to. /// Returns the [`SourceFile`] which the message belongs to.
pub fn source_file(&self) -> &SourceFile { pub fn source_file(&self) -> SourceFile {
match self { match self {
Message::Diagnostic(m) => &m.file, Message::Diagnostic(m) => m.file.clone(),
Message::SyntaxError(m) => &m.file, Message::SyntaxError(m) => m.expect_primary_span().expect_ruff_file().clone(),
} }
} }
} }
@ -275,7 +307,10 @@ impl Ranged for Message {
fn range(&self) -> TextRange { fn range(&self) -> TextRange {
match self { match self {
Message::Diagnostic(m) => m.range, Message::Diagnostic(m) => m.range,
Message::SyntaxError(m) => m.range, Message::SyntaxError(m) => m
.expect_primary_span()
.range()
.expect("Expected range for ruff span"),
} }
} }
} }
@ -293,11 +328,11 @@ impl Deref for MessageWithLocation<'_> {
} }
} }
fn group_messages_by_filename(messages: &[Message]) -> BTreeMap<&str, Vec<MessageWithLocation>> { fn group_messages_by_filename(messages: &[Message]) -> BTreeMap<String, Vec<MessageWithLocation>> {
let mut grouped_messages = BTreeMap::default(); let mut grouped_messages = BTreeMap::default();
for message in messages { for message in messages {
grouped_messages grouped_messages
.entry(message.filename()) .entry(message.filename().to_string())
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push(MessageWithLocation { .push(MessageWithLocation {
message, message,

View file

@ -18,7 +18,7 @@ impl Emitter for PylintEmitter {
context: &EmitterContext, context: &EmitterContext,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
for message in messages { for message in messages {
let row = if context.is_notebook(message.filename()) { let row = if context.is_notebook(&message.filename()) {
// We can't give a reasonable location for the structured formats, // We can't give a reasonable location for the structured formats,
// so we show one that's clearly a fallback // so we show one that's clearly a fallback
OneIndexed::from_zero_indexed(0) OneIndexed::from_zero_indexed(0)
@ -39,7 +39,7 @@ impl Emitter for PylintEmitter {
writeln!( writeln!(
writer, writer,
"{path}:{row}: {body}", "{path}:{row}: {body}",
path = relativize_path(message.filename()), path = relativize_path(&*message.filename()),
)?; )?;
} }

View file

@ -57,7 +57,8 @@ impl Serialize for ExpandedMessages<'_> {
} }
fn message_to_rdjson_value(message: &Message) -> Value { fn message_to_rdjson_value(message: &Message) -> Value {
let source_code = message.source_file().to_source_code(); let source_file = message.source_file();
let source_code = source_file.to_source_code();
let start_location = source_code.line_column(message.start()); let start_location = source_code.line_column(message.start());
let end_location = source_code.line_column(message.end()); let end_location = source_code.line_column(message.end());

View file

@ -121,7 +121,7 @@ impl SarifResult {
fn from_message(message: &Message) -> Result<Self> { fn from_message(message: &Message) -> Result<Self> {
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
rule: message.rule(), rule: message.rule(),
level: "error".to_string(), level: "error".to_string(),
@ -141,7 +141,7 @@ impl SarifResult {
fn from_message(message: &Message) -> Result<Self> { fn from_message(message: &Message) -> Result<Self> {
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let end_location = message.compute_end_location(); let end_location = message.compute_end_location();
let path = normalize_path(message.filename()); let path = normalize_path(&*message.filename());
Ok(Self { Ok(Self {
rule: message.rule(), rule: message.rule(),
level: "error".to_string(), level: "error".to_string(),

View file

@ -73,12 +73,12 @@ impl Emitter for TextEmitter {
write!( write!(
writer, writer,
"{path}{sep}", "{path}{sep}",
path = relativize_path(message.filename()).bold(), path = relativize_path(&*message.filename()).bold(),
sep = ":".cyan(), sep = ":".cyan(),
)?; )?;
let start_location = message.compute_start_location(); let start_location = message.compute_start_location();
let notebook_index = context.notebook_index(message.filename()); let notebook_index = context.notebook_index(&message.filename());
// Check if we're working on a jupyter notebook and translate positions with cell accordingly // Check if we're working on a jupyter notebook and translate positions with cell accordingly
let diagnostic_location = if let Some(notebook_index) = notebook_index { let diagnostic_location = if let Some(notebook_index) = notebook_index {
@ -191,7 +191,8 @@ impl Display for MessageCodeFrame<'_> {
Vec::new() Vec::new()
}; };
let source_code = self.message.source_file().to_source_code(); let source_file = self.message.source_file();
let source_code = source_file.to_source_code();
let content_start_index = source_code.line_index(self.message.start()); let content_start_index = source_code.line_index(self.message.start());
let mut start_index = content_start_index.saturating_sub(2); let mut start_index = content_start_index.saturating_sub(2);

View file

@ -14,7 +14,7 @@ use ruff_linter::{
directives::{extract_directives, Flags}, directives::{extract_directives, Flags},
generate_noqa_edits, generate_noqa_edits,
linter::check_path, linter::check_path,
message::{DiagnosticMessage, Message, SyntaxErrorMessage}, message::{DiagnosticMessage, Message},
package::PackageRoot, package::PackageRoot,
packaging::detect_package_root, packaging::detect_package_root,
registry::AsRule, registry::AsRule,
@ -173,10 +173,10 @@ pub(crate) fn check(
locator.to_index(), locator.to_index(),
encoding, encoding,
)), )),
Message::SyntaxError(syntax_error_message) => { Message::SyntaxError(_) => {
if show_syntax_errors { if show_syntax_errors {
Some(syntax_error_to_lsp_diagnostic( Some(syntax_error_to_lsp_diagnostic(
syntax_error_message, &message,
&source_kind, &source_kind,
locator.to_index(), locator.to_index(),
encoding, encoding,
@ -322,7 +322,7 @@ fn to_lsp_diagnostic(
} }
fn syntax_error_to_lsp_diagnostic( fn syntax_error_to_lsp_diagnostic(
syntax_error: SyntaxErrorMessage, syntax_error: &Message,
source_kind: &SourceKind, source_kind: &SourceKind,
index: &LineIndex, index: &LineIndex,
encoding: PositionEncoding, encoding: PositionEncoding,
@ -331,7 +331,7 @@ fn syntax_error_to_lsp_diagnostic(
let cell: usize; let cell: usize;
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) { if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
NotebookRange { cell, range } = syntax_error.range.to_notebook_range( NotebookRange { cell, range } = syntax_error.range().to_notebook_range(
source_kind.source_code(), source_kind.source_code(),
index, index,
notebook_index, notebook_index,
@ -340,7 +340,7 @@ fn syntax_error_to_lsp_diagnostic(
} else { } else {
cell = usize::default(); cell = usize::default();
range = syntax_error range = syntax_error
.range .range()
.to_range(source_kind.source_code(), index, encoding); .to_range(source_kind.source_code(), index, encoding);
} }
@ -353,7 +353,7 @@ fn syntax_error_to_lsp_diagnostic(
code: None, code: None,
code_description: None, code_description: None,
source: Some(DIAGNOSTIC_NAME.into()), source: Some(DIAGNOSTIC_NAME.into()),
message: syntax_error.message, message: syntax_error.body().to_string(),
related_information: None, related_information: None,
data: None, data: None,
}, },

View file

@ -195,7 +195,7 @@ impl SourceFile {
} }
} }
fn index(&self) -> &LineIndex { pub fn index(&self) -> &LineIndex {
self.inner self.inner
.line_index .line_index
.get_or_init(|| LineIndex::from_source_text(self.source_text())) .get_or_init(|| LineIndex::from_source_text(self.source_text()))

View file

@ -1,7 +1,7 @@
use std::path::Path; use std::path::Path;
use js_sys::Error; use js_sys::Error;
use ruff_linter::message::{DiagnosticMessage, Message, SyntaxErrorMessage}; use ruff_linter::message::{DiagnosticMessage, Message};
use ruff_linter::settings::types::PythonVersion; use ruff_linter::settings::types::PythonVersion;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
@ -230,15 +230,13 @@ impl Workspace {
.collect(), .collect(),
}), }),
}, },
Message::SyntaxError(SyntaxErrorMessage { message, range, .. }) => { Message::SyntaxError(_) => ExpandedMessage {
ExpandedMessage { code: None,
code: None, message: message.body().to_string(),
message, start_location: source_code.line_column(message.range().start()).into(),
start_location: source_code.line_column(range.start()).into(), end_location: source_code.line_column(message.range().end()).into(),
end_location: source_code.line_column(range.end()).into(), fix: None,
fix: None, },
}
}
}) })
.collect(); .collect();

View file

@ -14,6 +14,7 @@ use rayon::ThreadPoolBuilder;
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity}; use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
use ruff_db::max_parallelism; use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf}; use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::Upcast;
use salsa::plumbing::ZalsaDatabase; use salsa::plumbing::ZalsaDatabase;
use ty_project::metadata::options::Options; use ty_project::metadata::options::Options;
use ty_project::watch::ProjectWatcher; use ty_project::watch::ProjectWatcher;
@ -298,7 +299,11 @@ impl MainLoop {
let diagnostics_count = result.len(); let diagnostics_count = result.len();
for diagnostic in result { for diagnostic in result {
write!(stdout, "{}", diagnostic.display(db, &display_config))?; write!(
stdout,
"{}",
diagnostic.display(&db.upcast(), &display_config)
)?;
max_severity = max_severity.max(diagnostic.severity()); max_severity = max_severity.max(diagnostic.severity());
} }

View file

@ -136,6 +136,7 @@ mod tests {
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig, LintName, Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig, LintName,
Severity, Span, Severity, Span,
}; };
use ruff_db::Upcast;
use ruff_text_size::{Ranged, TextRange}; use ruff_text_size::{Ranged, TextRange};
#[test] #[test]
@ -773,7 +774,7 @@ mod tests {
.message("Cursor offset"), .message("Cursor offset"),
); );
write!(buf, "{}", diagnostic.display(&self.db, &config)).unwrap(); write!(buf, "{}", diagnostic.display(&self.db.upcast(), &config)).unwrap();
buf buf
} }

View file

@ -204,6 +204,7 @@ mod tests {
use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig}; use ruff_db::diagnostic::{Diagnostic, DiagnosticFormat, DisplayDiagnosticConfig};
use ruff_db::files::{system_path_to_file, File}; use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf}; use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
use ruff_db::Upcast;
use ruff_python_ast::PythonVersion; use ruff_python_ast::PythonVersion;
use ruff_text_size::TextSize; use ruff_text_size::TextSize;
use ty_python_semantic::{ use ty_python_semantic::{
@ -285,7 +286,7 @@ mod tests {
.format(DiagnosticFormat::Full); .format(DiagnosticFormat::Full);
for diagnostic in diagnostics { for diagnostic in diagnostics {
let diag = diagnostic.into_diagnostic(); let diag = diagnostic.into_diagnostic();
write!(buf, "{}", diag.display(&self.db, &config)).unwrap(); write!(buf, "{}", diag.display(&self.db.upcast(), &config)).unwrap();
} }
buf buf

View file

@ -126,6 +126,16 @@ impl Upcast<dyn IdeDb> for ProjectDatabase {
} }
} }
impl Upcast<dyn Db> for ProjectDatabase {
fn upcast(&self) -> &(dyn Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
self
}
}
#[salsa::db] #[salsa::db]
impl IdeDb for ProjectDatabase {} impl IdeDb for ProjectDatabase {}

View file

@ -521,7 +521,7 @@ impl Drop for DiagnosticGuard<'_, '_> {
}; };
let expected_file = self.ctx.file(); let expected_file = self.ctx.file();
let got_file = ann.get_span().file(); let got_file = ann.get_span().expect_ty_file();
assert_eq!( assert_eq!(
expected_file, expected_file,
got_file, got_file,

View file

@ -76,8 +76,9 @@ fn to_lsp_diagnostic(
encoding: crate::PositionEncoding, encoding: crate::PositionEncoding,
) -> Diagnostic { ) -> Diagnostic {
let range = if let Some(span) = diagnostic.primary_span() { let range = if let Some(span) = diagnostic.primary_span() {
let index = line_index(db.upcast(), span.file()); let file = span.expect_ty_file();
let source = source_text(db.upcast(), span.file()); let index = line_index(db.upcast(), file);
let source = source_text(db.upcast(), file);
span.range() span.range()
.map(|range| range.to_lsp_range(&source, &index, encoding)) .map(|range| range.to_lsp_range(&source, &index, encoding))

View file

@ -13,6 +13,7 @@ use ruff_db::panic::catch_unwind;
use ruff_db::parsed::parsed_module; use ruff_db::parsed::parsed_module;
use ruff_db::system::{DbWithWritableSystem as _, SystemPath, SystemPathBuf}; use ruff_db::system::{DbWithWritableSystem as _, SystemPath, SystemPathBuf};
use ruff_db::testing::{setup_logging, setup_logging_with_filter}; use ruff_db::testing::{setup_logging, setup_logging_with_filter};
use ruff_db::Upcast;
use ruff_source_file::{LineIndex, OneIndexed}; use ruff_source_file::{LineIndex, OneIndexed};
use std::backtrace::BacktraceStatus; use std::backtrace::BacktraceStatus;
use std::fmt::Write; use std::fmt::Write;
@ -464,7 +465,7 @@ fn create_diagnostic_snapshot(
writeln!(snapshot).unwrap(); writeln!(snapshot).unwrap();
} }
writeln!(snapshot, "```").unwrap(); writeln!(snapshot, "```").unwrap();
write!(snapshot, "{}", diag.display(db, &display_config)).unwrap(); write!(snapshot, "{}", diag.display(&db.upcast(), &display_config)).unwrap();
writeln!(snapshot, "```").unwrap(); writeln!(snapshot, "```").unwrap();
} }
snapshot snapshot

View file

@ -373,7 +373,7 @@ impl Diagnostic {
self.inner.primary_span().and_then(|span| { self.inner.primary_span().and_then(|span| {
Some(Range::from_file_range( Some(Range::from_file_range(
&workspace.db, &workspace.db,
FileRange::new(span.file(), span.range()?), FileRange::new(span.expect_ty_file(), span.range()?),
workspace.position_encoding, workspace.position_encoding,
)) ))
}) })
@ -383,7 +383,7 @@ impl Diagnostic {
pub fn display(&self, workspace: &Workspace) -> JsString { pub fn display(&self, workspace: &Workspace) -> JsString {
let config = DisplayDiagnosticConfig::default().color(false); let config = DisplayDiagnosticConfig::default().color(false);
self.inner self.inner
.display(workspace.db.upcast(), &config) .display(&workspace.db.upcast(), &config)
.to_string() .to_string()
.into() .into()
} }