refactor: prepare for linting on syntaxes (#1640)

* refactor: for query

* refactor: for diag

* feat: lazy hash expr info

* feat: hash resolves

* fix: update snapshot
This commit is contained in:
Myriad-Dreamin 2025-04-09 04:10:47 +08:00 committed by GitHub
parent 01afa463f4
commit 054d3aecc0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 261 additions and 241 deletions

View file

@ -1,7 +1,6 @@
//! Semantic static and dynamic analysis of the source code. //! Semantic static and dynamic analysis of the source code.
mod bib; mod bib;
use std::path::Path;
pub(crate) use bib::*; pub(crate) use bib::*;
pub mod call; pub mod call;
@ -24,11 +23,6 @@ pub mod signature;
pub use signature::*; pub use signature::*;
pub mod semantic_tokens; pub mod semantic_tokens;
pub use semantic_tokens::*; pub use semantic_tokens::*;
use tinymist_std::ImmutPath;
use tinymist_world::vfs::WorkspaceResolver;
use tinymist_world::WorldDeps;
use typst::syntax::Source;
use typst::World;
mod post_tyck; mod post_tyck;
mod tyck; mod tyck;
pub(crate) use crate::ty::*; pub(crate) use crate::ty::*;
@ -39,13 +33,21 @@ mod prelude;
mod global; mod global;
pub use global::*; pub use global::*;
use std::path::Path;
use std::sync::Arc;
use ecow::{eco_format, EcoVec}; use ecow::{eco_format, EcoVec};
use lsp_types::Url; use lsp_types::Url;
use tinymist_project::LspComputeGraph;
use tinymist_std::{bail, ImmutPath, Result};
use tinymist_world::vfs::WorkspaceResolver;
use tinymist_world::{EntryReader, TaskInputs, WorldDeps};
use typst::diag::{FileError, FileResult}; use typst::diag::{FileError, FileResult};
use typst::foundations::{Func, Value}; use typst::foundations::{Func, Value};
use typst::syntax::FileId; use typst::syntax::{FileId, Source};
use typst::World;
use crate::path_res_to_url; use crate::{path_res_to_url, CompilerQueryResponse, SemanticRequest, StatefulRequest};
pub(crate) trait ToFunc { pub(crate) trait ToFunc {
fn to_func(&self) -> Option<Func>; fn to_func(&self) -> Option<Func>;
@ -124,6 +126,64 @@ impl LspWorldExt for tinymist_project::LspWorld {
} }
} }
/// A snapshot for LSP queries.
pub struct LspQuerySnapshot {
/// The using snapshot.
pub snap: LspComputeGraph,
/// The global shared analysis data.
analysis: Arc<Analysis>,
/// The revision lock for the analysis (cache).
rev_lock: AnalysisRevLock,
}
impl std::ops::Deref for LspQuerySnapshot {
type Target = LspComputeGraph;
fn deref(&self) -> &Self::Target {
&self.snap
}
}
impl LspQuerySnapshot {
/// Runs a query for another task.
pub fn task(mut self, inputs: TaskInputs) -> Self {
self.snap = self.snap.task(inputs);
self
}
/// Runs a stateful query.
pub fn run_stateful<T: StatefulRequest>(
self,
query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> Result<CompilerQueryResponse> {
let graph = self.snap.clone();
self.run_analysis(|ctx| query.request(ctx, graph))
.map(wrapper)
}
/// Runs a semantic query.
pub fn run_semantic<T: SemanticRequest>(
self,
query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> Result<CompilerQueryResponse> {
self.run_analysis(|ctx| query.request(ctx)).map(wrapper)
}
/// Runs a query.
pub fn run_analysis<T>(self, f: impl FnOnce(&mut LocalContextGuard) -> T) -> Result<T> {
let world = self.snap.world().clone();
let Some(..) = world.main_id() else {
log::error!("Project: main file is not set");
bail!("main file is not set");
};
let mut ctx = self.analysis.enter_(world, self.rev_lock);
Ok(f(&mut ctx))
}
}
#[cfg(test)] #[cfg(test)]
mod matcher_tests { mod matcher_tests {
@ -611,9 +671,7 @@ mod lint_tests {
let source = ctx.source_by_path(&path).unwrap(); let source = ctx.source_by_path(&path).unwrap();
let result = tinymist_lint::lint_source(&source); let result = tinymist_lint::lint_source(&source);
let result = let result = crate::diagnostics::DiagWorker::new(ctx).convert_all(result.iter());
crate::diagnostics::CheckDocWorker::new(&ctx.world, ctx.position_encoding())
.convert_all(result.iter());
let result = result let result = result
.into_iter() .into_iter()
.map(|(k, v)| (file_path_(&k), v)) .map(|(k, v)| (file_path_(&k), v))

View file

@ -354,7 +354,7 @@ fn value_to_def(value: Value, name: impl FnOnce() -> Option<Interned<str>>) -> O
} }
struct DefResolver { struct DefResolver {
ei: Arc<ExprInfo>, ei: ExprInfo,
} }
impl DefResolver { impl DefResolver {

View file

@ -11,7 +11,7 @@ use rustc_hash::FxHashMap;
use tinymist_analysis::stats::AllocStats; use tinymist_analysis::stats::AllocStats;
use tinymist_analysis::ty::term_value; use tinymist_analysis::ty::term_value;
use tinymist_analysis::{analyze_expr_, analyze_import_}; use tinymist_analysis::{analyze_expr_, analyze_import_};
use tinymist_project::LspWorld; use tinymist_project::{LspComputeGraph, LspWorld};
use tinymist_std::hash::{hash128, FxDashMap}; use tinymist_std::hash::{hash128, FxDashMap};
use tinymist_std::typst::TypstDocument; use tinymist_std::typst::TypstDocument;
use tinymist_world::debug_loc::DataSource; use tinymist_world::debug_loc::DataSource;
@ -24,6 +24,7 @@ use typst::syntax::package::{PackageManifest, PackageSpec};
use typst::syntax::{Span, VirtualPath}; use typst::syntax::{Span, VirtualPath};
use typst_shim::eval::{eval_compat, Eval}; use typst_shim::eval::{eval_compat, Eval};
use super::{LspQuerySnapshot, TypeEnv};
use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot}; use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot};
use crate::analysis::prelude::*; use crate::analysis::prelude::*;
use crate::analysis::{ use crate::analysis::{
@ -42,8 +43,6 @@ use crate::{
ColorTheme, CompilerQueryRequest, LspPosition, LspRange, LspWorldExt, PositionEncoding, ColorTheme, CompilerQueryRequest, LspPosition, LspRange, LspWorldExt, PositionEncoding,
}; };
use super::TypeEnv;
macro_rules! interned_str { macro_rules! interned_str {
($name:ident, $value:expr) => { ($name:ident, $value:expr) => {
static $name: LazyLock<Interned<str>> = LazyLock::new(|| $value.into()); static $name: LazyLock<Interned<str>> = LazyLock::new(|| $value.into());
@ -80,13 +79,13 @@ pub struct Analysis {
} }
impl Analysis { impl Analysis {
/// Get a snapshot of the analysis data. /// Enters the analysis context.
pub fn snapshot(&self, world: LspWorld) -> LocalContextGuard { pub fn enter(&self, world: LspWorld) -> LocalContextGuard {
self.snapshot_(world, self.lock_revision(None)) self.enter_(world, self.lock_revision(None))
} }
/// Get a snapshot of the analysis data. /// Enters the analysis context.
pub fn snapshot_(&self, world: LspWorld, mut lg: AnalysisRevLock) -> LocalContextGuard { pub(crate) fn enter_(&self, world: LspWorld, mut lg: AnalysisRevLock) -> LocalContextGuard {
let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst); let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst);
let slot = self let slot = self
.analysis_rev_cache .analysis_rev_cache
@ -94,7 +93,7 @@ impl Analysis {
.find_revision(world.revision(), &lg); .find_revision(world.revision(), &lg);
let tokens = lg.tokens.take(); let tokens = lg.tokens.take();
LocalContextGuard { LocalContextGuard {
rev_lock: lg, _rev_lock: lg,
local: LocalContext { local: LocalContext {
tokens, tokens,
caches: AnalysisLocalCaches::default(), caches: AnalysisLocalCaches::default(),
@ -108,7 +107,21 @@ impl Analysis {
} }
} }
/// Lock the revision in *main thread*. /// Gets a snapshot for language queries.
pub fn query_snapshot(
self: Arc<Self>,
snap: LspComputeGraph,
req: Option<&CompilerQueryRequest>,
) -> LspQuerySnapshot {
let rev_lock = self.lock_revision(req);
LspQuerySnapshot {
snap,
analysis: self,
rev_lock,
}
}
/// Locks the revision in *main thread*.
#[must_use] #[must_use]
pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock { pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
let mut grid = self.analysis_rev_cache.lock(); let mut grid = self.analysis_rev_cache.lock();
@ -209,7 +222,7 @@ pub struct LocalContextGuard {
/// The guarded local context /// The guarded local context
pub local: LocalContext, pub local: LocalContext,
/// The revision lock /// The revision lock
pub rev_lock: AnalysisRevLock, _rev_lock: AnalysisRevLock,
} }
impl Deref for LocalContextGuard { impl Deref for LocalContextGuard {
@ -430,12 +443,12 @@ impl LocalContext {
} }
/// Get the expression information of a source file. /// Get the expression information of a source file.
pub(crate) fn expr_stage_by_id(&mut self, fid: TypstFileId) -> Option<Arc<ExprInfo>> { pub(crate) fn expr_stage_by_id(&mut self, fid: TypstFileId) -> Option<ExprInfo> {
Some(self.expr_stage(&self.source_by_id(fid).ok()?)) Some(self.expr_stage(&self.source_by_id(fid).ok()?))
} }
/// Get the expression information of a source file. /// Get the expression information of a source file.
pub(crate) fn expr_stage(&mut self, source: &Source) -> Arc<ExprInfo> { pub(crate) fn expr_stage(&mut self, source: &Source) -> ExprInfo {
let id = source.id(); let id = source.id();
let cache = &self.caches.modules.entry(id).or_default().expr_stage; let cache = &self.caches.modules.entry(id).or_default().expr_stage;
cache.get_or_init(|| self.shared.expr_stage(source)).clone() cache.get_or_init(|| self.shared.expr_stage(source)).clone()
@ -692,12 +705,12 @@ impl SharedContext {
} }
/// Get the expression information of a source file. /// Get the expression information of a source file.
pub(crate) fn expr_stage_by_id(self: &Arc<Self>, fid: TypstFileId) -> Option<Arc<ExprInfo>> { pub(crate) fn expr_stage_by_id(self: &Arc<Self>, fid: TypstFileId) -> Option<ExprInfo> {
Some(self.expr_stage(&self.source_by_id(fid).ok()?)) Some(self.expr_stage(&self.source_by_id(fid).ok()?))
} }
/// Get the expression information of a source file. /// Get the expression information of a source file.
pub(crate) fn expr_stage(self: &Arc<Self>, source: &Source) -> Arc<ExprInfo> { pub(crate) fn expr_stage(self: &Arc<Self>, source: &Source) -> ExprInfo {
let mut route = ExprRoute::default(); let mut route = ExprRoute::default();
self.expr_stage_(source, &mut route) self.expr_stage_(source, &mut route)
} }
@ -707,7 +720,7 @@ impl SharedContext {
self: &Arc<Self>, self: &Arc<Self>,
source: &Source, source: &Source,
route: &mut ExprRoute, route: &mut ExprRoute,
) -> Arc<ExprInfo> { ) -> ExprInfo {
use crate::syntax::expr_of; use crate::syntax::expr_of;
let guard = self.query_stat(source.id(), "expr_stage"); let guard = self.query_stat(source.id(), "expr_stage");
self.slot.expr_stage.compute(hash128(&source), |prev| { self.slot.expr_stage.compute(hash128(&source), |prev| {
@ -1155,7 +1168,7 @@ pub struct AnalysisLocalCaches {
/// change. /// change.
#[derive(Default)] #[derive(Default)]
pub struct ModuleAnalysisLocalCache { pub struct ModuleAnalysisLocalCache {
expr_stage: OnceLock<Arc<ExprInfo>>, expr_stage: OnceLock<ExprInfo>,
type_check: OnceLock<Arc<TypeInfo>>, type_check: OnceLock<Arc<TypeInfo>>,
} }
@ -1243,7 +1256,7 @@ impl Drop for AnalysisRevLock {
#[derive(Default, Clone)] #[derive(Default, Clone)]
struct AnalysisRevSlot { struct AnalysisRevSlot {
revision: usize, revision: usize,
expr_stage: IncrCacheMap<u128, Arc<ExprInfo>>, expr_stage: IncrCacheMap<u128, ExprInfo>,
type_check: IncrCacheMap<u128, Arc<TypeInfo>>, type_check: IncrCacheMap<u128, Arc<TypeInfo>>,
} }

View file

@ -309,7 +309,7 @@ pub(crate) struct Tokenizer {
pos_offset: usize, pos_offset: usize,
output: Vec<SemanticToken>, output: Vec<SemanticToken>,
source: Source, source: Source,
ei: Arc<ExprInfo>, ei: ExprInfo,
encoding: PositionEncoding, encoding: PositionEncoding,
allow_multiline_token: bool, allow_multiline_token: bool,
@ -320,7 +320,7 @@ pub(crate) struct Tokenizer {
impl Tokenizer { impl Tokenizer {
pub fn new( pub fn new(
source: Source, source: Source,
ei: Arc<ExprInfo>, ei: ExprInfo,
allow_multiline_token: bool, allow_multiline_token: bool,
encoding: PositionEncoding, encoding: PositionEncoding,
) -> Self { ) -> Self {

View file

@ -25,13 +25,13 @@ pub(crate) use select::*;
#[derive(Default)] #[derive(Default)]
pub struct TypeEnv { pub struct TypeEnv {
visiting: FxHashMap<TypstFileId, Arc<TypeInfo>>, visiting: FxHashMap<TypstFileId, Arc<TypeInfo>>,
exprs: FxHashMap<TypstFileId, Option<Arc<ExprInfo>>>, exprs: FxHashMap<TypstFileId, Option<ExprInfo>>,
} }
/// Type checking at the source unit level. /// Type checking at the source unit level.
pub(crate) fn type_check( pub(crate) fn type_check(
ctx: Arc<SharedContext>, ctx: Arc<SharedContext>,
ei: Arc<ExprInfo>, ei: ExprInfo,
env: &mut TypeEnv, env: &mut TypeEnv,
) -> Arc<TypeInfo> { ) -> Arc<TypeInfo> {
let mut info = TypeInfo::default(); let mut info = TypeInfo::default();
@ -82,7 +82,7 @@ type CallCacheDesc = (
pub(crate) struct TypeChecker<'a> { pub(crate) struct TypeChecker<'a> {
ctx: Arc<SharedContext>, ctx: Arc<SharedContext>,
ei: Arc<ExprInfo>, ei: ExprInfo,
info: TypeInfo, info: TypeInfo,
module_exports: FxHashMap<(TypstFileId, Interned<str>), OnceLock<Option<Ty>>>, module_exports: FxHashMap<(TypstFileId, Interned<str>), OnceLock<Option<Ty>>>,

View file

@ -0,0 +1,19 @@
use tinymist_project::LspCompiledArtifact;
use crate::{prelude::*, DiagWorker, DiagnosticsMap, SemanticRequest};
/// A request to check the document for errors and lints.
#[derive(Clone)]
pub struct CheckRequest {
/// The compilation result of the document.
pub snap: LspCompiledArtifact,
}
impl SemanticRequest for CheckRequest {
type Response = DiagnosticsMap;
fn request(self, ctx: &mut LocalContext) -> Option<Self::Response> {
let worker = DiagWorker::new(ctx);
Some(worker.check().convert_all(self.snap.diagnostics()))
}
}

View file

@ -4,7 +4,7 @@ use tinymist_project::LspWorld;
use tinymist_world::vfs::WorkspaceResolver; use tinymist_world::vfs::WorkspaceResolver;
use typst::{diag::SourceDiagnostic, syntax::Span}; use typst::{diag::SourceDiagnostic, syntax::Span};
use crate::{prelude::*, LspWorldExt}; use crate::{analysis::Analysis, prelude::*, LspWorldExt};
use regex::RegexSet; use regex::RegexSet;
@ -16,52 +16,44 @@ type TypstSeverity = typst::diag::Severity;
/// Converts a list of Typst diagnostics to LSP diagnostics, /// Converts a list of Typst diagnostics to LSP diagnostics,
/// with potential refinements on the error messages. /// with potential refinements on the error messages.
pub fn check_doc<'a>( pub fn convert_diagnostics<'a>(
world: &LspWorld, world: &LspWorld,
errors: impl IntoIterator<Item = &'a TypstDiagnostic>, errors: impl IntoIterator<Item = &'a TypstDiagnostic>,
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
) -> DiagnosticsMap { ) -> DiagnosticsMap {
CheckDocWorker::new(world, position_encoding) let analysis = Analysis {
.check() position_encoding,
.convert_all(errors) ..Analysis::default()
};
let mut ctx = analysis.enter(world.clone());
DiagWorker::new(&mut ctx).convert_all(errors)
} }
/// Context for converting Typst diagnostics to LSP diagnostics. /// The worker for collecting diagnostics.
pub(crate) struct CheckDocWorker<'a> { pub(crate) struct DiagWorker<'a> {
/// The world surface for Typst compiler. /// The world surface for Typst compiler.
pub world: &'a LspWorld, pub ctx: &'a mut LocalContext,
/// The position encoding for the source.
pub position_encoding: PositionEncoding,
/// Results /// Results
pub results: DiagnosticsMap, pub results: DiagnosticsMap,
} }
impl std::ops::Deref for CheckDocWorker<'_> { impl<'w> DiagWorker<'w> {
type Target = LspWorld;
fn deref(&self) -> &Self::Target {
self.world
}
}
impl<'w> CheckDocWorker<'w> {
/// Creates a new `CheckDocWorker` instance. /// Creates a new `CheckDocWorker` instance.
pub fn new(world: &'w LspWorld, position_encoding: PositionEncoding) -> Self { pub fn new(ctx: &'w mut LocalContext) -> Self {
Self { Self {
world, ctx,
position_encoding,
results: DiagnosticsMap::default(), results: DiagnosticsMap::default(),
} }
} }
/// Runs code check on the document. /// Runs code check on the document.
pub fn check(mut self) -> Self { pub fn check(mut self) -> Self {
for dep in self.world.depended_files() { for dep in self.ctx.world.depended_files() {
if WorkspaceResolver::is_package_file(dep) { if WorkspaceResolver::is_package_file(dep) {
continue; continue;
} }
let Ok(source) = self.world.source(dep) else { let Ok(source) = self.ctx.world.source(dep) else {
continue; continue;
}; };
let res = lint_source(&source); let res = lint_source(&source);
@ -89,7 +81,7 @@ impl<'w> CheckDocWorker<'w> {
/// Converts a list of Typst diagnostics to LSP diagnostics. /// Converts a list of Typst diagnostics to LSP diagnostics.
pub fn handle(&mut self, diag: &TypstDiagnostic) { pub fn handle(&mut self, diag: &TypstDiagnostic) {
match convert_diagnostic(self, diag) { match self.convert_diagnostic(diag) {
Ok((uri, diagnostic)) => { Ok((uri, diagnostic)) => {
self.results.entry(uri).or_default().push(diagnostic); self.results.entry(uri).or_default().push(diagnostic);
} }
@ -98,121 +90,92 @@ impl<'w> CheckDocWorker<'w> {
} }
} }
} }
}
fn convert_diagnostic( fn convert_diagnostic(
ctx: &CheckDocWorker, &self,
typst_diagnostic: &TypstDiagnostic, typst_diagnostic: &TypstDiagnostic,
) -> anyhow::Result<(Url, Diagnostic)> { ) -> anyhow::Result<(Url, Diagnostic)> {
let typst_diagnostic = { let typst_diagnostic = {
let mut diag = Cow::Borrowed(typst_diagnostic); let mut diag = Cow::Borrowed(typst_diagnostic);
// Extend more refiners here by adding their instances. // Extend more refiners here by adding their instances.
let refiners: &[&dyn DiagnosticRefiner] = let refiners: &[&dyn DiagnosticRefiner] =
&[&DeprecationRefiner::<13> {}, &OutOfRootHintRefiner {}]; &[&DeprecationRefiner::<13> {}, &OutOfRootHintRefiner {}];
// NOTE: It would be nice to have caching here. // NOTE: It would be nice to have caching here.
for refiner in refiners { for refiner in refiners {
if refiner.matches(&diag) { if refiner.matches(&diag) {
diag = Cow::Owned(refiner.refine(diag.into_owned())); diag = Cow::Owned(refiner.refine(diag.into_owned()));
}
} }
} diag
diag };
};
let (id, span) = diagnostic_span_id(ctx, &typst_diagnostic); let (id, span) = self.diagnostic_span_id(&typst_diagnostic);
let uri = ctx.uri_for_id(id)?; let uri = self.ctx.uri_for_id(id)?;
let source = ctx.source(id)?; let source = self.ctx.source_by_id(id)?;
let lsp_range = diagnostic_range(&source, span, ctx.position_encoding); let lsp_range = self.diagnostic_range(&source, span);
let lsp_severity = diagnostic_severity(typst_diagnostic.severity); let lsp_severity = diagnostic_severity(typst_diagnostic.severity);
let lsp_message = diagnostic_message(&typst_diagnostic); let lsp_message = diagnostic_message(&typst_diagnostic);
let tracepoints = let diagnostic = Diagnostic {
diagnostic_related_information(ctx, &typst_diagnostic, ctx.position_encoding)?; range: lsp_range,
severity: Some(lsp_severity),
message: lsp_message,
source: Some("typst".to_owned()),
related_information: (!typst_diagnostic.trace.is_empty()).then(|| {
typst_diagnostic
.trace
.iter()
.flat_map(|tracepoint| self.to_related_info(tracepoint))
.collect()
}),
..Default::default()
};
let diagnostic = Diagnostic { Ok((uri, diagnostic))
range: lsp_range,
severity: Some(lsp_severity),
message: lsp_message,
source: Some("typst".to_owned()),
related_information: Some(tracepoints),
..Default::default()
};
Ok((uri, diagnostic))
}
fn tracepoint_to_relatedinformation(
ctx: &CheckDocWorker,
tracepoint: &Spanned<Tracepoint>,
position_encoding: PositionEncoding,
) -> anyhow::Result<Option<DiagnosticRelatedInformation>> {
if let Some(id) = tracepoint.span.id() {
let uri = ctx.uri_for_id(id)?;
let source = ctx.source(id)?;
if let Some(typst_range) = source.range(tracepoint.span) {
let lsp_range = to_lsp_range(typst_range, &source, position_encoding);
return Ok(Some(DiagnosticRelatedInformation {
location: LspLocation {
uri,
range: lsp_range,
},
message: tracepoint.v.to_string(),
}));
}
} }
Ok(None) fn to_related_info(
} &self,
tracepoint: &Spanned<Tracepoint>,
) -> Option<DiagnosticRelatedInformation> {
let id = tracepoint.span.id()?;
// todo: expensive uri_for_id
let uri = self.ctx.uri_for_id(id).ok()?;
let source = self.ctx.source_by_id(id).ok()?;
fn diagnostic_related_information( let typst_range = source.range(tracepoint.span)?;
project: &CheckDocWorker, let lsp_range = self.ctx.to_lsp_range(typst_range, &source);
typst_diagnostic: &TypstDiagnostic,
position_encoding: PositionEncoding,
) -> anyhow::Result<Vec<DiagnosticRelatedInformation>> {
let mut tracepoints = vec![];
for tracepoint in &typst_diagnostic.trace { Some(DiagnosticRelatedInformation {
if let Some(info) = location: LspLocation {
tracepoint_to_relatedinformation(project, tracepoint, position_encoding)? uri,
{ range: lsp_range,
tracepoints.push(info); },
} message: tracepoint.v.to_string(),
})
} }
Ok(tracepoints) fn diagnostic_span_id(&self, typst_diagnostic: &TypstDiagnostic) -> (TypstFileId, Span) {
} iter::once(typst_diagnostic.span)
.chain(typst_diagnostic.trace.iter().map(|trace| trace.span))
.find_map(|span| Some((span.id()?, span)))
.unwrap_or_else(|| (self.ctx.world.main(), Span::detached()))
}
fn diagnostic_span_id( fn diagnostic_range(&self, source: &Source, typst_span: Span) -> LspRange {
ctx: &CheckDocWorker, // Due to nvaner/typst-lsp#241 and maybe typst/typst#2035, we sometimes fail to
typst_diagnostic: &TypstDiagnostic, // find the span. In that case, we use a default span as a better
) -> (TypstFileId, Span) { // alternative to panicking.
iter::once(typst_diagnostic.span) //
.chain(typst_diagnostic.trace.iter().map(|trace| trace.span)) // This may have been fixed after Typst 0.7.0, but it's still nice to avoid
.find_map(|span| Some((span.id()?, span))) // panics in case something similar reappears.
.unwrap_or_else(|| (ctx.main(), Span::detached())) match source.find(typst_span) {
} Some(node) => self.ctx.to_lsp_range(node.range(), source),
None => LspRange::new(LspPosition::new(0, 0), LspPosition::new(0, 0)),
fn diagnostic_range(
source: &Source,
typst_span: Span,
position_encoding: PositionEncoding,
) -> LspRange {
// Due to nvaner/typst-lsp#241 and maybe typst/typst#2035, we sometimes fail to
// find the span. In that case, we use a default span as a better
// alternative to panicking.
//
// This may have been fixed after Typst 0.7.0, but it's still nice to avoid
// panics in case something similar reappears.
match source.find(typst_span) {
Some(node) => {
let typst_range = node.range();
to_lsp_range(typst_range, source, position_encoding)
} }
None => LspRange::new(LspPosition::new(0, 0), LspPosition::new(0, 0)),
} }
} }

View file

@ -1,7 +1,6 @@
//! Module documentation. //! Module documentation.
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc;
use ecow::{eco_vec, EcoString, EcoVec}; use ecow::{eco_vec, EcoString, EcoVec};
use itertools::Itertools; use itertools::Itertools;
@ -113,7 +112,7 @@ struct ScanDefCtx<'a> {
} }
impl ScanDefCtx<'_> { impl ScanDefCtx<'_> {
fn defs(&mut self, paths: EcoVec<&str>, ei: Arc<ExprInfo>) -> DefInfo { fn defs(&mut self, paths: EcoVec<&str>, ei: ExprInfo) -> DefInfo {
let name = { let name = {
let stem = ei.fid.vpath().as_rooted_path().file_stem(); let stem = ei.fid.vpath().as_rooted_path().file_stem();
stem.and_then(|s| Some(Interned::new_str(s.to_str()?))) stem.and_then(|s| Some(Interned::new_str(s.to_str()?)))

View file

@ -8,7 +8,6 @@ input_file: crates/tinymist-query/src/fixtures/lint/if_set.typ
{ {
"message": "This set statement doesn't take effect.\nHint: consider changing parent to `set text(red) if (false)`", "message": "This set statement doesn't take effect.\nHint: consider changing parent to `set text(red) if (false)`",
"range": "1:2:1:15", "range": "1:2:1:15",
"relatedInformation": [],
"severity": 2, "severity": 2,
"source": "typst" "source": "typst"
} }

View file

@ -8,7 +8,6 @@ input_file: crates/tinymist-query/src/fixtures/lint/if_show.typ
{ {
"message": "This show statement doesn't take effect.\nHint: consider changing parent to `show : if (false) { .. }`", "message": "This show statement doesn't take effect.\nHint: consider changing parent to `show : if (false) { .. }`",
"range": "1:2:1:17", "range": "1:2:1:17",
"relatedInformation": [],
"severity": 2, "severity": 2,
"source": "typst" "source": "typst"
} }

View file

@ -8,7 +8,6 @@ input_file: crates/tinymist-query/src/fixtures/lint/show_set.typ
{ {
"message": "This set statement doesn't take effect.\nHint: consider changing parent to `show : set text(red)`", "message": "This set statement doesn't take effect.\nHint: consider changing parent to `show : set text(red)`",
"range": "1:2:1:15", "range": "1:2:1:15",
"relatedInformation": [],
"severity": 2, "severity": 2,
"source": "typst" "source": "typst"
} }

View file

@ -8,7 +8,6 @@ input_file: crates/tinymist-query/src/fixtures/lint/show_set2.typ
{ {
"message": "This set statement doesn't take effect.\nHint: consider changing parent to `show raw: set text(red)`", "message": "This set statement doesn't take effect.\nHint: consider changing parent to `show raw: set text(red)`",
"range": "1:2:1:15", "range": "1:2:1:15",
"relatedInformation": [],
"severity": 2, "severity": 2,
"source": "typst" "source": "typst"
} }

View file

@ -12,6 +12,7 @@ pub use completion::{CompletionRequest, PostfixSnippet};
pub use typlite::ColorTheme; pub use typlite::ColorTheme;
pub use upstream::with_vm; pub use upstream::with_vm;
pub use check::*;
pub use code_action::*; pub use code_action::*;
pub use code_context::*; pub use code_context::*;
pub use code_lens::*; pub use code_lens::*;
@ -55,6 +56,7 @@ mod adt;
mod lsp_typst_boundary; mod lsp_typst_boundary;
mod prelude; mod prelude;
mod check;
mod code_action; mod code_action;
mod code_context; mod code_context;
mod code_lens; mod code_lens;

View file

@ -32,8 +32,8 @@ pub(crate) fn expr_of(
source: Source, source: Source,
route: &mut ExprRoute, route: &mut ExprRoute,
guard: QueryStatGuard, guard: QueryStatGuard,
prev: Option<Arc<ExprInfo>>, prev: Option<ExprInfo>,
) -> Arc<ExprInfo> { ) -> ExprInfo {
crate::log_debug_ct!("expr_of: {:?}", source.id()); crate::log_debug_ct!("expr_of: {:?}", source.id());
route.insert(source.id(), None); route.insert(source.id(), None);
@ -76,8 +76,18 @@ pub(crate) fn expr_of(
let docstrings_base = Arc::new(Mutex::new(FxHashMap::default())); let docstrings_base = Arc::new(Mutex::new(FxHashMap::default()));
let docstrings = docstrings_base.clone(); let docstrings = docstrings_base.clone();
let exprs_base = Arc::new(Mutex::new(FxHashMap::default())); let exprs_base: Arc<
let exprs = exprs_base.clone(); parking_lot::lock_api::Mutex<
parking_lot::RawMutex,
HashMap<Span, Expr, rustc_hash::FxBuildHasher>,
>,
> = Arc::new(Mutex::new(FxHashMap::default()));
let exprs: Arc<
parking_lot::lock_api::Mutex<
parking_lot::RawMutex,
HashMap<Span, Expr, rustc_hash::FxBuildHasher>,
>,
> = exprs_base.clone();
let imports_base = Arc::new(Mutex::new(FxHashMap::default())); let imports_base = Arc::new(Mutex::new(FxHashMap::default()));
let imports = imports_base.clone(); let imports = imports_base.clone();
@ -120,7 +130,7 @@ pub(crate) fn expr_of(
(root_scope, root) (root_scope, root)
}; };
let info = ExprInfo { let info = ExprInfoRepr {
fid: source.id(), fid: source.id(),
revision, revision,
source: source.clone(), source: source.clone(),
@ -135,11 +145,22 @@ pub(crate) fn expr_of(
crate::log_debug_ct!("expr_of end {:?}", source.id()); crate::log_debug_ct!("expr_of end {:?}", source.id());
route.remove(&info.fid); route.remove(&info.fid);
Arc::new(info) ExprInfo(Arc::new(LazyHash::new(info)))
}
#[derive(Debug, Clone, Hash)]
pub struct ExprInfo(Arc<LazyHash<ExprInfoRepr>>);
impl Deref for ExprInfo {
type Target = Arc<LazyHash<ExprInfoRepr>>;
fn deref(&self) -> &Self::Target {
&self.0
}
} }
#[derive(Debug)] #[derive(Debug)]
pub struct ExprInfo { pub struct ExprInfoRepr {
pub fid: TypstFileId, pub fid: TypstFileId,
pub revision: usize, pub revision: usize,
pub source: Source, pub source: Source,
@ -152,19 +173,22 @@ pub struct ExprInfo {
pub root: Expr, pub root: Expr,
} }
impl std::hash::Hash for ExprInfo { impl std::hash::Hash for ExprInfoRepr {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) { fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.revision.hash(state); self.revision.hash(state);
self.source.hash(state); self.source.hash(state);
self.exports.hash(state); self.exports.hash(state);
self.root.hash(state); self.root.hash(state);
let mut resolves = self.resolves.iter().collect::<Vec<_>>();
resolves.sort_by_key(|(fid, _)| fid.into_raw());
resolves.hash(state);
let mut imports = self.imports.iter().collect::<Vec<_>>(); let mut imports = self.imports.iter().collect::<Vec<_>>();
imports.sort_by_key(|(fid, _)| *fid); imports.sort_by_key(|(fid, _)| *fid);
imports.hash(state); imports.hash(state);
} }
} }
impl ExprInfo { impl ExprInfoRepr {
pub fn get_def(&self, decl: &Interned<Decl>) -> Option<Expr> { pub fn get_def(&self, decl: &Interned<Decl>) -> Option<Expr> {
if decl.is_def() { if decl.is_def() {
return Some(Expr::Decl(decl.clone())); return Some(Expr::Decl(decl.clone()));

View file

@ -90,7 +90,7 @@ pub fn run_with_ctx<T>(
}, },
..Analysis::default() ..Analysis::default()
}) })
.snapshot(world); .enter(world);
ctx.test_package_list(|| { ctx.test_package_list(|| {
vec![( vec![(

View file

@ -28,10 +28,9 @@ use parking_lot::Mutex;
use reflexo::{hash::FxHashMap, path::unix_slash}; use reflexo::{hash::FxHashMap, path::unix_slash};
use sync_ls::{LspClient, TypedLspClient}; use sync_ls::{LspClient, TypedLspClient};
use tinymist_project::vfs::{FileChangeSet, MemoryEvent}; use tinymist_project::vfs::{FileChangeSet, MemoryEvent};
use tinymist_query::analysis::{Analysis, LspQuerySnapshot, PeriscopeProvider};
use tinymist_query::{ use tinymist_query::{
analysis::{Analysis, AnalysisRevLock, LocalContextGuard, PeriscopeProvider}, CheckRequest, CompilerQueryRequest, DiagnosticsMap, LocalContext, SemanticRequest,
CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, LocalContext, SemanticRequest,
StatefulRequest,
}; };
use tinymist_render::PeriscopeRenderer; use tinymist_render::PeriscopeRenderer;
use tinymist_std::{error::prelude::*, ImmutPath}; use tinymist_std::{error::prelude::*, ImmutPath};
@ -303,14 +302,7 @@ impl ProjectState {
/// Snapshot the compiler thread for language queries /// Snapshot the compiler thread for language queries
pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> Result<LspQuerySnapshot> { pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> Result<LspQuerySnapshot> {
let snap = self.snapshot()?; let snap = self.snapshot()?;
let analysis = self.analysis.clone(); Ok(self.analysis.clone().query_snapshot(snap, q))
let rev_lock = analysis.lock_revision(q);
Ok(LspQuerySnapshot {
snap,
analysis,
rev_lock,
})
} }
pub fn do_interrupt(compiler: &mut LspProjectCompiler, intr: Interrupt<LspCompilerFeat>) { pub fn do_interrupt(compiler: &mut LspProjectCompiler, intr: Interrupt<LspCompilerFeat>) {
@ -451,12 +443,15 @@ impl CompileHandlerImpl {
let snap = snap.clone(); let snap = snap.clone();
let editor_tx = self.editor_tx.clone(); let editor_tx = self.editor_tx.clone();
let enc = self.analysis.position_encoding; let analysis = self.analysis.clone();
rayon::spawn(move || { rayon::spawn(move || {
let world = snap.world(); let world = snap.world().clone();
let mut ctx = analysis.enter(world);
// todo: check all errors in this file // todo: check all errors in this file
let diagnostics = tinymist_query::check_doc(world, snap.diagnostics(), enc); let Some(diagnostics) = CheckRequest { snap }.request(&mut ctx) else {
return;
};
log::trace!("notify diagnostics({dv:?}): {diagnostics:#?}"); log::trace!("notify diagnostics({dv:?}): {diagnostics:#?}");
@ -640,53 +635,3 @@ impl CompileHandler<LspCompilerFeat, ProjectInsStateExt> for CompileHandlerImpl
} }
pub type QuerySnapWithStat = (LspQuerySnapshot, QueryStatGuard); pub type QuerySnapWithStat = (LspQuerySnapshot, QueryStatGuard);
pub struct LspQuerySnapshot {
pub snap: LspComputeGraph,
analysis: Arc<Analysis>,
rev_lock: AnalysisRevLock,
}
impl std::ops::Deref for LspQuerySnapshot {
type Target = LspComputeGraph;
fn deref(&self) -> &Self::Target {
&self.snap
}
}
impl LspQuerySnapshot {
pub fn task(mut self, inputs: TaskInputs) -> Self {
self.snap = self.snap.task(inputs);
self
}
pub fn run_stateful<T: StatefulRequest>(
self,
query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> Result<CompilerQueryResponse> {
let graph = self.snap.clone();
self.run_analysis(|ctx| query.request(ctx, graph))
.map(wrapper)
}
pub fn run_semantic<T: SemanticRequest>(
self,
query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> Result<CompilerQueryResponse> {
self.run_analysis(|ctx| query.request(ctx)).map(wrapper)
}
pub fn run_analysis<T>(self, f: impl FnOnce(&mut LocalContextGuard) -> T) -> Result<T> {
let world = self.snap.world().clone();
let Some(..) = world.main_id() else {
log::error!("Project: main file is not set");
bail!("main file is not set");
};
let mut analysis = self.analysis.snapshot_(world, self.rev_lock);
Ok(f(&mut analysis))
}
}

View file

@ -177,7 +177,8 @@ async fn trace_main(
let timings = writer.into_inner().unwrap(); let timings = writer.into_inner().unwrap();
let handle = &state.project; let handle = &state.project;
let diagnostics = tinymist_query::check_doc(w, diags.iter(), handle.analysis.position_encoding); let diagnostics =
tinymist_query::convert_diagnostics(w, diags.iter(), handle.analysis.position_encoding);
let rpc_kind = rpc_kind.as_str(); let rpc_kind = rpc_kind.as_str();

View file

@ -240,7 +240,7 @@ pub async fn test_main(args: TestArgs) -> Result<()> {
} }
fn test_once(world: &LspWorld, ctx: &TestContext) -> Result<bool> { fn test_once(world: &LspWorld, ctx: &TestContext) -> Result<bool> {
let mut actx = ctx.analysis.snapshot(world.clone()); let mut actx = ctx.analysis.enter(world.clone());
let doc = typst::compile::<TypstPagedDocument>(&actx.world).output?; let doc = typst::compile::<TypstPagedDocument>(&actx.world).output?;
let suites = let suites =