From fc3099a27caed9724436c1259af8346b25145276 Mon Sep 17 00:00:00 2001 From: Myriad-Dreamin <35292584+Myriad-Dreamin@users.noreply.github.com> Date: Thu, 14 Nov 2024 01:40:27 +0800 Subject: [PATCH] feat: lock and snapshot {analysis,token} caches on main thread (#806) * refactor: hide lock in `query_snapshot` * refactor: hide lock in `query_snapshot` * test: update snapshot * dev: update comments * dev: update snapshot --- crates/tinymist-query/src/adt/mod.rs | 1 + crates/tinymist-query/src/adt/revision.rs | 136 ++++++++++++++ crates/tinymist-query/src/analysis/global.rs | 174 ++++++++++-------- .../src/semantic_tokens/delta.rs | 74 -------- .../tinymist-query/src/semantic_tokens/mod.rs | 155 ++++++++++------ .../src/semantic_tokens_delta.rs | 63 +++++-- .../src/semantic_tokens_full.rs | 8 +- crates/tinymist/src/actor/mod.rs | 13 +- crates/tinymist/src/actor/typ_client.rs | 164 +++++++++++------ crates/tinymist/src/cmd.rs | 31 +--- crates/tinymist/src/resource/symbols.rs | 6 +- crates/tinymist/src/server.rs | 56 +++--- crates/tinymist/src/task/export.rs | 4 +- tests/e2e/main.rs | 4 +- 14 files changed, 543 insertions(+), 346 deletions(-) create mode 100644 crates/tinymist-query/src/adt/revision.rs diff --git a/crates/tinymist-query/src/adt/mod.rs b/crates/tinymist-query/src/adt/mod.rs index 56b3275a..74dfb870 100644 --- a/crates/tinymist-query/src/adt/mod.rs +++ b/crates/tinymist-query/src/adt/mod.rs @@ -1,2 +1,3 @@ pub mod interner; +pub mod revision; pub mod snapshot_map; diff --git a/crates/tinymist-query/src/adt/revision.rs b/crates/tinymist-query/src/adt/revision.rs new file mode 100644 index 00000000..a7279075 --- /dev/null +++ b/crates/tinymist-query/src/adt/revision.rs @@ -0,0 +1,136 @@ +use std::{ + collections::HashMap, + num::NonZeroUsize, + sync::{Arc, OnceLock}, +}; + +pub struct RevisionLock { + estimated: usize, + used: OnceLock, +} + +impl RevisionLock { + pub fn access(&self, revision: NonZeroUsize) { + self.used + .set(revision.get()) + .unwrap_or_else(|_| panic!("revision {revision} is determined")) + } +} + +pub struct RevisionSlot { + pub revision: usize, + pub data: T, +} + +impl std::ops::Deref for RevisionSlot { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.data + } +} + +impl std::ops::DerefMut for RevisionSlot { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.data + } +} + +pub struct RevisionManager { + estimated: usize, + locked: HashMap, + slots: Vec>>, +} + +impl Default for RevisionManager { + fn default() -> Self { + Self { + estimated: 0, + locked: Default::default(), + slots: Default::default(), + } + } +} + +impl RevisionManager { + pub fn clear(&mut self) { + self.slots.clear(); + } + + /// Lock the revision in *main thread*. + #[must_use] + pub fn lock(&mut self, used: NonZeroUsize) -> RevisionLock { + let l = self.lock_estimated(); + l.access(used); + l + } + + /// Lock the revision in *main thread*. + #[must_use] + pub fn lock_estimated(&mut self) -> RevisionLock { + let estimated = self.estimated; + *self.locked.entry(estimated).or_default() += 1; + RevisionLock { + estimated, + used: OnceLock::new(), + } + } + + /// Find the last revision slot by revision number. + pub fn find_revision( + &mut self, + revision: NonZeroUsize, + f: impl FnOnce(Option<&Arc>>) -> T, + ) -> Arc> { + let slot_base = self + .slots + .iter() + .filter(|e| e.revision <= revision.get()) + .reduce(|a, b| if a.revision > b.revision { a } else { b }); + + if let Some(slot) = slot_base { + if slot.revision == revision.get() { + return slot.clone(); + } + } + + let slot = Arc::new(RevisionSlot { + revision: revision.get(), + data: f(slot_base), + }); + self.slots.push(slot.clone()); + self.estimated = revision.get().max(self.estimated); + slot + } + + pub fn unlock(&mut self, rev: &mut RevisionLock) -> Option { + let rev = rev.estimated; + let revision_cnt = self + .locked + .entry(rev) + .or_insert_with(|| panic!("revision {rev} is not locked")); + *revision_cnt -= 1; + if *revision_cnt != 0 { + return None; + } + + self.locked.remove(&rev); + let existing = self.locked.keys().min().copied(); + existing.or_else(|| + // if there is no locked revision, we only keep the latest revision + self.slots + .iter() + .map(|e| e.revision) + .max()) + } +} + +pub trait RevisionManagerLike { + fn gc(&mut self, min_rev: usize); +} + +impl RevisionManagerLike for RevisionManager { + fn gc(&mut self, min_rev: usize) { + self.slots.retain(|r| r.revision >= min_rev); + } +} diff --git a/crates/tinymist-query/src/analysis/global.rs b/crates/tinymist-query/src/analysis/global.rs index cbc27e04..5ab0caa5 100644 --- a/crates/tinymist-query/src/analysis/global.rs +++ b/crates/tinymist-query/src/analysis/global.rs @@ -22,6 +22,7 @@ use typst::model::Document; use typst::syntax::package::PackageManifest; use typst::syntax::{package::PackageSpec, Span, VirtualPath}; +use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot}; use crate::analysis::prelude::*; use crate::analysis::{ analyze_bib, analyze_expr_, analyze_import_, analyze_signature, definition, post_type_check, @@ -36,8 +37,9 @@ use crate::syntax::{ }; use crate::upstream::{tooltip_, Tooltip}; use crate::{ - lsp_to_typst, typst_to_lsp, ColorTheme, LspPosition, LspRange, LspWorldExt, PositionEncoding, - SemanticTokenContext, TypstRange, VersionedDocument, + lsp_to_typst, typst_to_lsp, ColorTheme, CompilerQueryRequest, LspPosition, LspRange, + LspWorldExt, PositionEncoding, SemanticTokenCache, SemanticTokenContext, TypstRange, + VersionedDocument, }; use super::TypeEnv; @@ -47,18 +49,22 @@ use super::TypeEnv; pub struct Analysis { /// The position encoding for the workspace. pub position_encoding: PositionEncoding, + /// Whether to allow overlapping semantic tokens. + pub allow_overlapping_token: bool, + /// Whether to allow multiline semantic tokens. + pub allow_multiline_token: bool, /// The editor's color theme. pub color_theme: ColorTheme, /// The periscope provider. pub periscope: Option>, - /// The semantic token context. - pub tokens_ctx: Arc, /// The global worker resources for analysis. pub workers: Arc, + /// The semantic token cache. + pub tokens_caches: Arc>, /// The global caches for analysis. pub caches: AnalysisGlobalCaches, - /// The global cache grid for analysis. - pub cache_grid: Arc>, + /// The revisioned cache for analysis. + pub analysis_rev_cache: Arc>, /// The statistics about the analyzers. pub stats: Arc, } @@ -66,10 +72,21 @@ pub struct Analysis { impl Analysis { /// Get a snapshot of the analysis data. pub fn snapshot(&self, world: LspWorld) -> LocalContextGuard { + self.snapshot_(world, self.lock_revision(None)) + } + + /// Get a snapshot of the analysis data. + pub fn snapshot_(&self, world: LspWorld, mut lg: AnalysisRevLock) -> LocalContextGuard { let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst); - let slot = self.cache_grid.lock().find_revision(world.revision()); + let slot = self + .analysis_rev_cache + .lock() + .find_revision(world.revision(), &lg); + let tokens = lg.tokens.take(); LocalContextGuard { + rev_lock: lg, local: LocalContext { + tokens, caches: AnalysisCaches::default(), shared: Arc::new(SharedContext { slot, @@ -83,22 +100,36 @@ impl Analysis { /// Lock the revision in *main thread*. #[must_use] - pub fn lock_revision(&self) -> RevisionLock { - let mut grid = self.cache_grid.lock(); - let revision = grid.revision; - *grid.locked_revisions.entry(revision).or_default() += 1; - RevisionLock { - grid: self.cache_grid.clone(), - revision, + pub fn lock_revision(&self, q: Option<&CompilerQueryRequest>) -> AnalysisRevLock { + let mut grid = self.analysis_rev_cache.lock(); + + AnalysisRevLock { + tokens: match q { + Some(CompilerQueryRequest::SemanticTokensFull(f)) => Some( + SemanticTokenCache::acquire(self.tokens_caches.clone(), &f.path, None), + ), + Some(CompilerQueryRequest::SemanticTokensDelta(f)) => { + Some(SemanticTokenCache::acquire( + self.tokens_caches.clone(), + &f.path, + Some(&f.previous_result_id), + )) + } + _ => None, + }, + inner: grid.manager.lock_estimated(), + grid: self.analysis_rev_cache.clone(), } } /// Clear all cached resources. pub fn clear_cache(&self) { self.caches.signatures.clear(); + self.caches.def_signatures.clear(); self.caches.static_signatures.clear(); self.caches.terms.clear(); - self.cache_grid.lock().clear(); + self.tokens_caches.lock().clear(); + self.analysis_rev_cache.lock().clear(); } /// Report the statistics of the analysis. @@ -140,6 +171,8 @@ pub struct AnalysisGlobalWorkers { pub struct LocalContextGuard { /// Constructed local context pub local: LocalContext, + /// The revision lock + pub rev_lock: AnalysisRevLock, } impl Deref for LocalContextGuard { @@ -206,6 +239,8 @@ impl LocalContextGuard { /// The local context for analyzers. pub struct LocalContext { + /// The created semantic token context. + pub(crate) tokens: Option, /// Local caches for analysis. pub caches: AnalysisCaches, /// The shared context @@ -387,8 +422,8 @@ pub struct SharedContext { pub world: LspWorld, /// The analysis data pub analysis: Analysis, - /// The using revision slot - slot: Arc, + /// The using analysis revision slot + slot: Arc>, } impl SharedContext { @@ -1047,20 +1082,14 @@ pub struct ModuleAnalysisCache { /// The grid cache for all level of analysis results of a module. #[derive(Default)] -pub struct AnalysisGlobalCacheGrid { - revision: usize, - default_slot: RevisionSlot, - revisions: Vec>, - locked_revisions: HashMap, +pub struct AnalysisRevCache { + default_slot: AnalysisRevSlot, + manager: RevisionManager, } -impl AnalysisGlobalCacheGrid { - fn clear(&mut self) { - self.revisions.clear(); - } - +impl RevisionManagerLike for AnalysisRevCache { fn gc(&mut self, rev: usize) { - self.revisions.retain(|r| r.revision >= rev); + self.manager.gc(rev); self.default_slot .expr_stage .global @@ -1072,79 +1101,64 @@ impl AnalysisGlobalCacheGrid { .lock() .retain(|_, r| r.0 + 60 >= rev); } +} + +impl AnalysisRevCache { + fn clear(&mut self) { + self.manager.clear(); + self.default_slot = Default::default(); + } /// Find the last revision slot by revision number. - fn find_revision(&mut self, revision: NonZeroUsize) -> Arc { - let slot_base = self - .revisions - .iter() - .filter(|e| e.revision <= revision.get()) - .reduce(|a, b| if a.revision > b.revision { a } else { b }); - - if let Some(slot) = slot_base { - if slot.revision == revision.get() { - return slot.clone(); - } - } - - let mut slot = slot_base - .map(|e| RevisionSlot { - revision: e.revision, - expr_stage: e.expr_stage.crawl(revision.get()), - type_check: e.type_check.crawl(revision.get()), - }) - .unwrap_or_else(|| self.default_slot.clone()); - - slot.revision = revision.get(); - let slot = Arc::new(slot); - self.revisions.push(slot.clone()); - self.revision = revision.get().max(self.revision); - slot + fn find_revision( + &mut self, + revision: NonZeroUsize, + lg: &AnalysisRevLock, + ) -> Arc> { + lg.inner.access(revision); + self.manager.find_revision(revision, |slot_base| { + slot_base + .map(|e| AnalysisRevSlot { + revision: e.revision, + expr_stage: e.data.expr_stage.crawl(revision.get()), + type_check: e.data.type_check.crawl(revision.get()), + }) + .unwrap_or_else(|| self.default_slot.clone()) + }) } } /// A lock for revision. -pub struct RevisionLock { - grid: Arc>, - revision: usize, +pub struct AnalysisRevLock { + inner: RevisionLock, + tokens: Option, + grid: Arc>, } -impl Drop for RevisionLock { +impl Drop for AnalysisRevLock { fn drop(&mut self) { - let mut grid = self.grid.lock(); - let revision_cnt = grid - .locked_revisions - .entry(self.revision) - .or_insert_with(|| panic!("revision {} is not locked", self.revision)); - *revision_cnt -= 1; - if *revision_cnt != 0 { - return; - } + let mut mu = self.grid.lock(); + let gc_revision = mu.manager.unlock(&mut self.inner); - grid.locked_revisions.remove(&self.revision); - if grid.revision <= self.revision { - return; + if let Some(gc_revision) = gc_revision { + let grid = self.grid.clone(); + rayon::spawn(move || { + grid.lock().gc(gc_revision); + }); } - let existing = grid.locked_revisions.keys().min().copied(); - let gc_revision = existing.unwrap_or(self.revision); - let grid = self.grid.clone(); - - rayon::spawn(move || { - grid.lock().gc(gc_revision); - }); } } #[derive(Default, Clone)] -struct RevisionSlot { +struct AnalysisRevSlot { revision: usize, expr_stage: IncrCacheMap>, type_check: IncrCacheMap>, } -impl Drop for RevisionSlot { +impl Drop for AnalysisRevSlot { fn drop(&mut self) { - log::info!("revision {} is dropped", self.revision) + log::info!("analysis revision {} is dropped", self.revision) } } diff --git a/crates/tinymist-query/src/semantic_tokens/delta.rs b/crates/tinymist-query/src/semantic_tokens/delta.rs index af24450b..8b137891 100644 --- a/crates/tinymist-query/src/semantic_tokens/delta.rs +++ b/crates/tinymist-query/src/semantic_tokens/delta.rs @@ -1,75 +1 @@ -use lsp_types::{SemanticToken, SemanticTokensEdit}; -#[derive(Debug)] -struct CachedTokens { - tokens: Vec, - id: u64, -} - -#[derive(Default, Debug)] -pub struct CacheInner { - last_sent: Option, - next_id: u64, -} - -impl CacheInner { - pub fn try_take_result(&mut self, id: &str) -> Option> { - let id = id.parse::().ok()?; - match self.last_sent.take() { - Some(cached) if cached.id == id => Some(cached.tokens), - Some(cached) => { - // replace after taking - self.last_sent = Some(cached); - None - } - None => None, - } - } - - pub fn cache_result(&mut self, tokens: Vec) -> String { - let id = self.get_next_id(); - let cached = CachedTokens { tokens, id }; - self.last_sent = Some(cached); - id.to_string() - } - - fn get_next_id(&mut self) -> u64 { - let id = self.next_id; - self.next_id += 1; - id - } -} - -pub fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec { - // Taken from `rust-analyzer`'s algorithm - // https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219 - - let start = from - .iter() - .zip(to.iter()) - .take_while(|(x, y)| x == y) - .count(); - - let (_, from) = from.split_at(start); - let (_, to) = to.split_at(start); - - let dist_from_end = from - .iter() - .rev() - .zip(to.iter().rev()) - .take_while(|(x, y)| x == y) - .count(); - - let (from, _) = from.split_at(from.len() - dist_from_end); - let (to, _) = to.split_at(to.len() - dist_from_end); - - if from.is_empty() && to.is_empty() { - vec![] - } else { - vec![SemanticTokensEdit { - start: 5 * start as u32, - delete_count: 5 * from.len() as u32, - data: Some(to.into()), - }] - } -} diff --git a/crates/tinymist-query/src/semantic_tokens/mod.rs b/crates/tinymist-query/src/semantic_tokens/mod.rs index 671f07ed..cb4827d6 100644 --- a/crates/tinymist-query/src/semantic_tokens/mod.rs +++ b/crates/tinymist-query/src/semantic_tokens/mod.rs @@ -1,95 +1,146 @@ -use std::{ops::Range, sync::Arc}; +use std::{ + num::NonZeroUsize, + ops::Range, + path::Path, + sync::{Arc, OnceLock}, +}; -use lsp_types::{SemanticToken, SemanticTokensEdit}; -use parking_lot::RwLock; +use hashbrown::HashMap; +use lsp_types::SemanticToken; +use parking_lot::Mutex; +use reflexo::ImmutPath; use typst::syntax::{ast, LinkedNode, Source, SyntaxKind}; use crate::{ + adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot}, syntax::{Expr, ExprInfo}, ty::Ty, LocalContext, LspPosition, PositionEncoding, }; -use self::delta::token_delta; use self::modifier_set::ModifierSet; -use self::delta::CacheInner as TokenCacheInner; - mod delta; mod modifier_set; mod typst_tokens; pub use self::typst_tokens::{Modifier, TokenType}; -/// A semantic token context providing incremental semantic tokens rendering. +/// A shared semantic tokens object. +pub type SemanticTokens = Arc>; + +/// A shared semantic tokens cache. #[derive(Default)] -pub struct SemanticTokenContext { - cache: RwLock, - /// Whether to allow overlapping tokens. - pub allow_overlapping_token: bool, - /// Whether to allow multiline tokens. - pub allow_multiline_token: bool, +pub struct SemanticTokenCache { + next_id: usize, + // todo: clear cache after didClose + manager: HashMap>>, +} + +impl SemanticTokenCache { + pub(crate) fn clear(&mut self) { + self.next_id = 0; + self.manager.clear(); + } + + /// Lock the token cache with an optional previous id in *main thread*. + pub(crate) fn acquire( + cache: Arc>, + p: &Path, + prev: Option<&str>, + ) -> SemanticTokenContext { + let that = cache.clone(); + let mut that = that.lock(); + + that.next_id += 1; + let prev = prev.and_then(|id| { + id.parse::() + .inspect_err(|_| { + log::warn!("invalid previous id: {id}"); + }) + .ok() + }); + let next = NonZeroUsize::new(that.next_id).expect("id overflow"); + + let path = ImmutPath::from(p); + let manager = that.manager.entry(path.clone()).or_default(); + let _rev_lock = manager.lock(prev.unwrap_or(next)); + let prev = prev.and_then(|prev| { + manager + .find_revision(prev, |_| OnceLock::new()) + .data + .get() + .cloned() + }); + let next = manager.find_revision(next, |_| OnceLock::new()); + + SemanticTokenContext { + _rev_lock, + cache, + path, + prev, + next, + } + } +} + +/// A semantic token context providing incremental semantic tokens rendering. +pub(crate) struct SemanticTokenContext { + _rev_lock: RevisionLock, + cache: Arc>, + path: ImmutPath, + prev: Option, + next: Arc>>, } impl SemanticTokenContext { - /// Create a new semantic token context. - pub fn new(allow_overlapping_token: bool, allow_multiline_token: bool) -> Self { - Self { - cache: RwLock::new(TokenCacheInner::default()), - allow_overlapping_token, - allow_multiline_token, + pub fn previous(&self) -> Option<&[SemanticToken]> { + self.prev.as_ref().map(|cached| cached.as_slice()) + } + + pub fn cache_result(&self, cached: SemanticTokens) -> String { + let id = self.next.revision; + self.next + .data + .set(cached) + .unwrap_or_else(|_| panic!("unexpected slot overwrite {id}")); + id.to_string() + } +} + +impl Drop for SemanticTokenContext { + fn drop(&mut self) { + let mut cache = self.cache.lock(); + let manager = cache.manager.get_mut(&self.path); + if let Some(manager) = manager { + let min_rev = manager.unlock(&mut self._rev_lock); + if let Some(min_rev) = min_rev { + manager.gc(min_rev); + } } } } /// Get the semantic tokens for a source. -pub(crate) fn semantic_tokens_full( +pub(crate) fn get_semantic_tokens( ctx: &mut LocalContext, source: &Source, ei: Arc, -) -> (Vec, String) { +) -> (SemanticTokens, Option) { let root = LinkedNode::new(source.root()); let mut tokenizer = Tokenizer::new( source.clone(), ei, - ctx.analysis.tokens_ctx.allow_multiline_token, + ctx.analysis.allow_multiline_token, ctx.analysis.position_encoding, ); tokenizer.tokenize_tree(&root, ModifierSet::empty()); - let output = tokenizer.output; + let output = SemanticTokens::new(tokenizer.output); - let result_id = ctx - .analysis - .tokens_ctx - .cache - .write() - .cache_result(output.clone()); + let result_id = ctx.tokens.as_ref().map(|t| t.cache_result(output.clone())); (output, result_id) } -/// Get the semantic tokens delta for a source. -pub(crate) fn semantic_tokens_delta( - ctx: &mut LocalContext, - source: &Source, - ei: Arc, - result_id: &str, -) -> (Result, Vec>, String) { - let cached = ctx - .analysis - .tokens_ctx - .cache - .write() - .try_take_result(result_id); - - // this call will overwrite the cache, so need to read from cache first - let (tokens, result_id) = semantic_tokens_full(ctx, source, ei); - - match cached { - Some(cached) => (Ok(token_delta(&cached, &tokens)), result_id), - None => (Err(tokens), result_id), - } -} - pub(crate) struct Tokenizer { curr_pos: LspPosition, pos_offset: usize, diff --git a/crates/tinymist-query/src/semantic_tokens_delta.rs b/crates/tinymist-query/src/semantic_tokens_delta.rs index 73c0a1f5..08b55697 100644 --- a/crates/tinymist-query/src/semantic_tokens_delta.rs +++ b/crates/tinymist-query/src/semantic_tokens_delta.rs @@ -1,4 +1,6 @@ -use crate::{prelude::*, semantic_tokens_delta}; +use lsp_types::{SemanticToken, SemanticTokensEdit}; + +use crate::{get_semantic_tokens, prelude::*}; /// The [`textDocument/semanticTokens/full/delta`] request is sent from the /// client to the server to resolve the semantic tokens of a given file, @@ -28,24 +30,63 @@ impl SemanticRequest for SemanticTokensDeltaRequest { fn request(self, ctx: &mut LocalContext) -> Option { let source = ctx.source_by_path(&self.path).ok()?; let ei = ctx.expr_stage(&source); + let (tokens, result_id) = get_semantic_tokens(ctx, &source, ei); - let (tokens, result_id) = semantic_tokens_delta(ctx, &source, ei, &self.previous_result_id); + let (tokens, result_id) = match ctx.tokens.as_ref().and_then(|t| t.previous()) { + Some(cached) => (Ok(token_delta(cached, &tokens)), result_id), + None => { + log::warn!( + "No previous tokens found for delta computation in {}, prev_id: {:?}", + self.path.display(), + self.previous_result_id + ); + (Err(tokens), result_id) + } + }; match tokens { - Ok(edits) => Some( - SemanticTokensDelta { - result_id: Some(result_id), - edits, - } - .into(), - ), + Ok(edits) => Some(SemanticTokensDelta { result_id, edits }.into()), Err(tokens) => Some( SemanticTokens { - result_id: Some(result_id), - data: tokens, + result_id, + data: tokens.as_ref().clone(), } .into(), ), } } } + +fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec { + // Taken from `rust-analyzer`'s algorithm + // https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219 + + let start = from + .iter() + .zip(to.iter()) + .take_while(|(x, y)| x == y) + .count(); + + let (_, from) = from.split_at(start); + let (_, to) = to.split_at(start); + + let dist_from_end = from + .iter() + .rev() + .zip(to.iter().rev()) + .take_while(|(x, y)| x == y) + .count(); + + let (from, _) = from.split_at(from.len() - dist_from_end); + let (to, _) = to.split_at(to.len() - dist_from_end); + + if from.is_empty() && to.is_empty() { + vec![] + } else { + vec![SemanticTokensEdit { + start: 5 * start as u32, + delete_count: 5 * from.len() as u32, + data: Some(to.into()), + }] + } +} diff --git a/crates/tinymist-query/src/semantic_tokens_full.rs b/crates/tinymist-query/src/semantic_tokens_full.rs index 4d14caa3..2c75709c 100644 --- a/crates/tinymist-query/src/semantic_tokens_full.rs +++ b/crates/tinymist-query/src/semantic_tokens_full.rs @@ -1,4 +1,4 @@ -use crate::{prelude::*, semantic_tokens_full}; +use crate::{get_semantic_tokens, prelude::*}; /// The [`textDocument/semanticTokens/full`] request is sent from the client to /// the server to resolve the semantic tokens of a given file. @@ -29,12 +29,12 @@ impl SemanticRequest for SemanticTokensFullRequest { fn request(self, ctx: &mut LocalContext) -> Option { let source = ctx.source_by_path(&self.path).ok()?; let ei = ctx.expr_stage(&source); - let (tokens, result_id) = semantic_tokens_full(ctx, &source, ei); + let (tokens, result_id) = get_semantic_tokens(ctx, &source, ei); Some( SemanticTokens { - result_id: Some(result_id), - data: tokens, + result_id, + data: tokens.as_ref().clone(), } .into(), ) diff --git a/crates/tinymist/src/actor/mod.rs b/crates/tinymist/src/actor/mod.rs index 9d7f2ed3..3608f373 100644 --- a/crates/tinymist/src/actor/mod.rs +++ b/crates/tinymist/src/actor/mod.rs @@ -12,7 +12,7 @@ use reflexo::ImmutPath; use reflexo_typst::vfs::notify::{FileChangeSet, MemoryEvent}; use reflexo_typst::world::EntryState; use tinymist_query::analysis::{Analysis, PeriscopeProvider}; -use tinymist_query::{ExportKind, LocalContext, SemanticTokenContext, VersionedDocument}; +use tinymist_query::{ExportKind, LocalContext, VersionedDocument}; use tinymist_render::PeriscopeRenderer; use tokio::sync::mpsc; use typst::layout::Position; @@ -107,6 +107,8 @@ impl LanguageState { stats: Default::default(), analysis: Arc::new(Analysis { position_encoding: const_config.position_encoding, + allow_overlapping_token: const_config.tokens_overlapping_token_support, + allow_multiline_token: const_config.tokens_multiline_token_support, color_theme: match self.compile_config().color_theme.as_deref() { Some("dark") => tinymist_query::ColorTheme::Dark, _ => tinymist_query::ColorTheme::Light, @@ -115,14 +117,11 @@ impl LanguageState { let r = TypstPeriscopeProvider(PeriscopeRenderer::new(args)); Arc::new(r) as Arc }), - tokens_ctx: Arc::new(SemanticTokenContext::new( - const_config.tokens_overlapping_token_support, - const_config.tokens_multiline_token_support, - )), + tokens_caches: Arc::default(), workers: Default::default(), caches: Default::default(), - cache_grid: Default::default(), - stats: Default::default(), + analysis_rev_cache: Arc::default(), + stats: Arc::default(), }), notified_revision: parking_lot::Mutex::new(0), diff --git a/crates/tinymist/src/actor/typ_client.rs b/crates/tinymist/src/actor/typ_client.rs index 59d610b2..9141566e 100644 --- a/crates/tinymist/src/actor/typ_client.rs +++ b/crates/tinymist/src/actor/typ_client.rs @@ -24,7 +24,7 @@ use std::{collections::HashMap, ops::Deref, path::PathBuf, sync::Arc}; -use anyhow::{anyhow, bail}; +use anyhow::bail; use log::{error, info, trace}; use reflexo_typst::{ error::prelude::*, typst::prelude::*, vfs::notify::MemoryEvent, world::EntryState, @@ -32,12 +32,12 @@ use reflexo_typst::{ }; use sync_lsp::{just_future, QueryFuture}; use tinymist_query::{ - analysis::{Analysis, LocalContextGuard}, + analysis::{Analysis, AnalysisRevLock, LocalContextGuard}, CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, ExportKind, SemanticRequest, ServerInfoResponse, StatefulRequest, VersionedDocument, }; use tokio::sync::{mpsc, oneshot}; -use typst::{diag::SourceDiagnostic, World as TypstWorld}; +use typst::{diag::SourceDiagnostic, World}; use super::{ editor::{DocVersion, EditorRequest, TinymistCompileStatusEnum}, @@ -71,13 +71,26 @@ pub struct CompileHandler { impl CompileHandler { /// Snapshot the compiler thread for tasks - pub fn snapshot(&self) -> ZResult { + pub fn snapshot(&self) -> ZResult { let (tx, rx) = oneshot::channel(); self.intr_tx .send(Interrupt::SnapshotRead(tx)) .map_err(map_string_err("failed to send snapshot request"))?; - Ok(QuerySnap { rx }) + Ok(WorldSnapFut { rx }) + } + + /// Snapshot the compiler thread for language queries + pub fn query_snapshot(&self, q: Option<&CompilerQueryRequest>) -> ZResult { + let fut = self.snapshot()?; + let analysis = self.analysis.clone(); + let rev_lock = analysis.lock_revision(q); + + Ok(QuerySnapFut { + fut, + analysis, + rev_lock, + }) } /// Get latest artifact the compiler thread for tasks @@ -144,49 +157,6 @@ impl CompileHandler { self.push_diagnostics(revision, valid.then_some(diagnostics)); } - pub fn run_stateful( - &self, - snap: CompileSnapshot, - query: T, - wrapper: fn(Option) -> CompilerQueryResponse, - ) -> anyhow::Result { - let w = &snap.world; - let doc = snap.success_doc.map(|doc| VersionedDocument { - version: w.revision().get(), - document: doc, - }); - self.run_analysis(w, |ctx| query.request(ctx, doc)) - .map(wrapper) - } - - pub fn run_semantic( - &self, - snap: CompileSnapshot, - query: T, - wrapper: fn(Option) -> CompilerQueryResponse, - ) -> anyhow::Result { - self.run_analysis(&snap.world, |ctx| query.request(ctx)) - .map(wrapper) - } - - pub fn run_analysis( - &self, - w: &LspWorld, - f: impl FnOnce(&mut LocalContextGuard) -> T, - ) -> anyhow::Result { - let Some(main) = w.main_id() else { - error!("TypstActor: main file is not set"); - bail!("main file is not set"); - }; - w.source(main).map_err(|err| { - info!("TypstActor: failed to prepare main file: {err:?}"); - anyhow!("failed to get source: {err}") - })?; - - let mut analysis = self.analysis.snapshot(w.clone()); - Ok(f(&mut analysis)) - } - // todo: multiple preview support #[cfg(feature = "preview")] #[must_use] @@ -313,17 +283,22 @@ impl CompileClientActor { } /// Snapshot the compiler thread for tasks - pub fn snapshot(&self) -> ZResult { + pub fn snapshot(&self) -> ZResult { self.handle.clone().snapshot() } - /// Snapshot the compiler thread for tasks - pub fn snapshot_with_stat(&self, q: &CompilerQueryRequest) -> ZResult { + /// Snapshot the compiler thread for language queries + pub fn query_snapshot(&self) -> ZResult { + self.handle.clone().query_snapshot(None) + } + + /// Snapshot the compiler thread for language queries + pub fn query_snapshot_with_stat(&self, q: &CompilerQueryRequest) -> ZResult { let name: &'static str = q.into(); let path = q.associated_path(); let stat = self.handle.stats.query_stat(path, name); - let snap = self.handle.clone().snapshot()?; - Ok(QuerySnapWithStat { snap, stat }) + let fut = self.handle.clone().query_snapshot(Some(q))?; + Ok(QuerySnapWithStat { fut, stat }) } pub fn add_memory_changes(&self, event: MemoryEvent) { @@ -428,16 +403,16 @@ impl CompileClientActor { } pub struct QuerySnapWithStat { - pub snap: QuerySnap, + pub fut: QuerySnapFut, pub(crate) stat: QueryStatGuard, } -pub struct QuerySnap { +pub struct WorldSnapFut { rx: oneshot::Receiver>, } -impl QuerySnap { - /// Snapshot the compiler thread for tasks +impl WorldSnapFut { + /// wait for the snapshot to be ready pub async fn receive(self) -> ZResult> { self.rx .await @@ -445,6 +420,81 @@ impl QuerySnap { } } +pub struct QuerySnapFut { + fut: WorldSnapFut, + analysis: Arc, + rev_lock: AnalysisRevLock, +} + +impl QuerySnapFut { + /// wait for the snapshot to be ready + pub async fn receive(self) -> ZResult { + let snap = self.fut.receive().await?; + Ok(QuerySnap { + snap, + analysis: self.analysis, + rev_lock: self.rev_lock, + }) + } +} + +pub struct QuerySnap { + pub snap: CompileSnapshot, + analysis: Arc, + rev_lock: AnalysisRevLock, +} + +impl std::ops::Deref for QuerySnap { + type Target = CompileSnapshot; + + fn deref(&self) -> &Self::Target { + &self.snap + } +} + +impl QuerySnap { + pub fn task(mut self, inputs: TaskInputs) -> Self { + self.snap = self.snap.task(inputs); + self + } + + pub fn run_stateful( + self, + query: T, + wrapper: fn(Option) -> CompilerQueryResponse, + ) -> anyhow::Result { + let doc = self.snap.success_doc.as_ref().map(|doc| VersionedDocument { + version: self.world.revision().get(), + document: doc.clone(), + }); + self.run_analysis(|ctx| query.request(ctx, doc)) + .map(wrapper) + } + + pub fn run_semantic( + self, + query: T, + wrapper: fn(Option) -> CompilerQueryResponse, + ) -> anyhow::Result { + self.run_analysis(|ctx| query.request(ctx)).map(wrapper) + } + + pub fn run_analysis(self, f: impl FnOnce(&mut LocalContextGuard) -> T) -> anyhow::Result { + let w = self.world.as_ref(); + let Some(main) = w.main_id() else { + error!("TypstActor: main file is not set"); + bail!("main file is not set"); + }; + w.source(main).map_err(|err| { + info!("TypstActor: failed to prepare main file: {err:?}"); + anyhow::anyhow!("failed to get source: {err}") + })?; + + let mut analysis = self.analysis.snapshot_(w.clone(), self.rev_lock); + Ok(f(&mut analysis)) + } +} + pub struct ArtifactSnap { rx: oneshot::Receiver>, } diff --git a/crates/tinymist/src/cmd.rs b/crates/tinymist/src/cmd.rs index 2e7901d4..ac9e558e 100644 --- a/crates/tinymist/src/cmd.rs +++ b/crates/tinymist/src/cmd.rs @@ -552,27 +552,20 @@ impl LanguageState { &mut self, mut arguments: Vec, ) -> AnySchedulableResponse { - let handle = self.primary().handle.clone(); + let fut = self.primary().query_snapshot().map_err(internal_error)?; let info = get_arg!(arguments[1] as PackageInfo); - // todo: do this in a common place - let rev_lock = handle.analysis.lock_revision(); - - let snap = handle.snapshot().map_err(z_internal_error)?; just_future(async move { - let snap = snap.receive().await.map_err(z_internal_error)?; - let w = snap.world.as_ref(); + let snap = fut.receive().await.map_err(z_internal_error)?; - let symbols = handle - .run_analysis(w, |a| { + let symbols = snap + .run_analysis(|a| { tinymist_query::docs::package_module_docs(a, &info) .map_err(map_string_err("failed to list symbols")) }) .map_err(internal_error)? .map_err(internal_error)?; - drop(rev_lock); - serde_json::to_value(symbols).map_err(internal_error) }) } @@ -619,16 +612,10 @@ impl LanguageState { info: PackageInfo, f: impl FnOnce(&mut LocalContextGuard) -> LspResult + Send + Sync, ) -> LspResult>> { - let handle: std::sync::Arc = - self.primary().handle.clone(); - - // todo: do this in a common place - let rev_lock = handle.analysis.lock_revision(); - - let snap = handle.snapshot().map_err(z_internal_error)?; + let fut = self.primary().query_snapshot().map_err(internal_error)?; Ok(async move { - let snap = snap.receive().await.map_err(z_internal_error)?; + let snap = fut.receive().await.map_err(z_internal_error)?; let w = snap.world.as_ref(); let entry: StrResult = Ok(()).and_then(|_| { @@ -645,14 +632,12 @@ impl LanguageState { }); let entry = entry.map_err(|e| internal_error(e.to_string()))?; - let w = snap.world.task(TaskInputs { + let snap = snap.task(TaskInputs { entry: Some(entry), inputs: None, }); - let res = handle.run_analysis(&w, f).map_err(internal_error)?; - drop(rev_lock); - res + snap.run_analysis(f).map_err(internal_error)? }) } } diff --git a/crates/tinymist/src/resource/symbols.rs b/crates/tinymist/src/resource/symbols.rs index 1babed1a..793f4069 100644 --- a/crates/tinymist/src/resource/symbols.rs +++ b/crates/tinymist/src/resource/symbols.rs @@ -6,7 +6,7 @@ use reflexo_typst::{ }; use sync_lsp::LspResult; -use crate::{actor::typ_client::QuerySnap, z_internal_error}; +use crate::{actor::typ_client::WorldSnapFut, z_internal_error}; pub use super::prelude::*; @@ -52,7 +52,7 @@ enum SymCategory { Harpoon, Tack, // Lowercase Greek and Uppercase Greek - Greek, + Greek, Hebrew, DoubleStruck, } @@ -947,7 +947,7 @@ static CAT_MAP: Lazy> = Lazy::new(|| { impl LanguageState { /// Get the all valid symbols - pub async fn get_symbol_resources(snap: QuerySnap) -> LspResult { + pub async fn get_symbol_resources(snap: WorldSnapFut) -> LspResult { let snap = snap.receive().await.map_err(z_internal_error)?; let mut symbols = ResourceSymbolMap::new(); diff --git a/crates/tinymist/src/server.rs b/crates/tinymist/src/server.rs index 779f674a..83dddc7a 100644 --- a/crates/tinymist/src/server.rs +++ b/crates/tinymist/src/server.rs @@ -1040,8 +1040,7 @@ impl LanguageState { type R = CompilerQueryResponse; assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique); - let snap_stat = client.snapshot_with_stat(&query)?; - let handle = client.handle.clone(); + let fut_stat = client.query_snapshot_with_stat(&query)?; let entry = query .associated_path() .map(|path| client.config.determine_entry(Some(path.into()))) @@ -1050,10 +1049,8 @@ impl LanguageState { Some(EntryState::new_rooted(root, Some(*DETACHED_ENTRY))) }); - let rev_lock = handle.analysis.lock_revision(); - just_future(async move { - let mut snap = snap_stat.snap.receive().await?; + let mut snap = fut_stat.fut.receive().await?; // todo: whether it is safe to inherit success_doc with changed entry if !is_pinning { snap = snap.task(TaskInputs { @@ -1061,34 +1058,31 @@ impl LanguageState { ..Default::default() }); } - snap_stat.stat.snap(); + fut_stat.stat.snap(); - let resp = match query { - SemanticTokensFull(req) => handle.run_semantic(snap, req, R::SemanticTokensFull), - SemanticTokensDelta(req) => handle.run_semantic(snap, req, R::SemanticTokensDelta), - Hover(req) => handle.run_stateful(snap, req, R::Hover), - GotoDefinition(req) => handle.run_stateful(snap, req, R::GotoDefinition), - GotoDeclaration(req) => handle.run_semantic(snap, req, R::GotoDeclaration), - References(req) => handle.run_stateful(snap, req, R::References), - InlayHint(req) => handle.run_semantic(snap, req, R::InlayHint), - DocumentHighlight(req) => handle.run_semantic(snap, req, R::DocumentHighlight), - DocumentColor(req) => handle.run_semantic(snap, req, R::DocumentColor), - DocumentLink(req) => handle.run_semantic(snap, req, R::DocumentLink), - CodeAction(req) => handle.run_semantic(snap, req, R::CodeAction), - CodeLens(req) => handle.run_semantic(snap, req, R::CodeLens), - Completion(req) => handle.run_stateful(snap, req, R::Completion), - SignatureHelp(req) => handle.run_semantic(snap, req, R::SignatureHelp), - Rename(req) => handle.run_stateful(snap, req, R::Rename), - WillRenameFiles(req) => handle.run_stateful(snap, req, R::WillRenameFiles), - PrepareRename(req) => handle.run_stateful(snap, req, R::PrepareRename), - Symbol(req) => handle.run_semantic(snap, req, R::Symbol), - WorkspaceLabel(req) => handle.run_semantic(snap, req, R::WorkspaceLabel), - DocumentMetrics(req) => handle.run_stateful(snap, req, R::DocumentMetrics), + match query { + SemanticTokensFull(req) => snap.run_semantic(req, R::SemanticTokensFull), + SemanticTokensDelta(req) => snap.run_semantic(req, R::SemanticTokensDelta), + Hover(req) => snap.run_stateful(req, R::Hover), + GotoDefinition(req) => snap.run_stateful(req, R::GotoDefinition), + GotoDeclaration(req) => snap.run_semantic(req, R::GotoDeclaration), + References(req) => snap.run_stateful(req, R::References), + InlayHint(req) => snap.run_semantic(req, R::InlayHint), + DocumentHighlight(req) => snap.run_semantic(req, R::DocumentHighlight), + DocumentColor(req) => snap.run_semantic(req, R::DocumentColor), + DocumentLink(req) => snap.run_semantic(req, R::DocumentLink), + CodeAction(req) => snap.run_semantic(req, R::CodeAction), + CodeLens(req) => snap.run_semantic(req, R::CodeLens), + Completion(req) => snap.run_stateful(req, R::Completion), + SignatureHelp(req) => snap.run_semantic(req, R::SignatureHelp), + Rename(req) => snap.run_stateful(req, R::Rename), + WillRenameFiles(req) => snap.run_stateful(req, R::WillRenameFiles), + PrepareRename(req) => snap.run_stateful(req, R::PrepareRename), + Symbol(req) => snap.run_semantic(req, R::Symbol), + WorkspaceLabel(req) => snap.run_semantic(req, R::WorkspaceLabel), + DocumentMetrics(req) => snap.run_stateful(req, R::DocumentMetrics), _ => unreachable!(), - }; - - drop(rev_lock); - resp + } }) } } diff --git a/crates/tinymist/src/task/export.rs b/crates/tinymist/src/task/export.rs index 72655ad8..6aca7e8c 100644 --- a/crates/tinymist/src/task/export.rs +++ b/crates/tinymist/src/task/export.rs @@ -21,7 +21,7 @@ use crate::tool::text::FullTextDigest; use crate::{ actor::{ editor::EditorRequest, - typ_client::QuerySnap, + typ_client::WorldSnapFut, typ_server::{CompiledArtifact, ExportSignal}, }, tool::word_count, @@ -66,7 +66,7 @@ impl ExportTask { pub fn oneshot( &self, - snap: QuerySnap, + snap: WorldSnapFut, entry: Option, kind: ExportKind, ) -> impl Future>> { diff --git a/tests/e2e/main.rs b/tests/e2e/main.rs index a666e22f..81e7810f 100644 --- a/tests/e2e/main.rs +++ b/tests/e2e/main.rs @@ -374,7 +374,7 @@ fn e2e() { }); let hash = replay_log(&tinymist_binary, &root.join("neovim")); - insta::assert_snapshot!(hash, @"siphash128_13:baceda7fb09289fcbced2cc0ae70c5da"); + insta::assert_snapshot!(hash, @"siphash128_13:a75933498ed24db6977a56633c6ddc0c"); } { @@ -385,7 +385,7 @@ fn e2e() { }); let hash = replay_log(&tinymist_binary, &root.join("vscode")); - insta::assert_snapshot!(hash, @"siphash128_13:67d6465eafecd5f1c4dcc85872e714b8"); + insta::assert_snapshot!(hash, @"siphash128_13:e5cf7a1c5f8383af4bae8c757018fae1"); } }