dev: cache expression information correctly (#736)

* dev: cache expression information correctly

* rev
This commit is contained in:
Myriad-Dreamin 2024-10-27 20:20:59 +08:00 committed by GitHub
parent 2c38695b6f
commit 1d49e110e2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 430 additions and 187 deletions

View file

@ -40,6 +40,7 @@ ecow.workspace = true
siphasher.workspace = true
chrono.workspace = true
rpds.workspace = true
rayon.workspace = true
typst.workspace = true

View file

@ -250,10 +250,7 @@ mod type_check_tests {
let source = ctx.source_by_path(&path).unwrap();
let result = ctx.type_check(&source);
let result = result
.as_deref()
.map(|e| format!("{:#?}", TypeCheckSnapshot(&source, e)));
let result = result.as_deref().unwrap_or("<nil>");
let result = format!("{:#?}", TypeCheckSnapshot(&source, &result));
assert_snapshot!(result);
});
@ -323,7 +320,7 @@ mod post_type_check_tests {
let text = node.get().clone().into_text();
let result = ctx.type_check(&source);
let literal_type = result.and_then(|info| post_type_check(ctx.shared_(), &info, node));
let literal_type = post_type_check(ctx.shared_(), &result, node);
with_settings!({
description => format!("Check on {text:?} ({pos:?})"),
@ -359,7 +356,7 @@ mod type_describe_tests {
let text = node.get().clone().into_text();
let result = ctx.type_check(&source);
let literal_type = result.and_then(|info| post_type_check(ctx.shared_(), &info, node));
let literal_type = post_type_check(ctx.shared_(), &result, node);
with_settings!({
description => format!("Check on {text:?} ({pos:?})"),

View file

@ -1,3 +1,4 @@
use std::num::NonZeroUsize;
use std::ops::DerefMut;
use std::sync::atomic::{AtomicU64, Ordering};
use std::{collections::HashSet, ops::Deref};
@ -6,9 +7,10 @@ use comemo::{Track, Tracked};
use lsp_types::Url;
use once_cell::sync::OnceCell;
use parking_lot::Mutex;
use reflexo::debug_loc::DataSource;
use reflexo::hash::{hash128, FxDashMap};
use reflexo::{debug_loc::DataSource, ImmutPath};
use reflexo_typst::WorldDeps;
use reflexo_typst::{EntryReader, WorldDeps};
use rustc_hash::FxHashMap;
use tinymist_world::LspWorld;
use tinymist_world::DETACHED_ENTRY;
use typst::diag::{eco_format, At, FileError, FileResult, SourceResult};
@ -39,7 +41,7 @@ use crate::{
use super::{analyze_expr_, definition, Definition};
/// The analysis data holds globally.
#[derive(Default)]
#[derive(Default, Clone)]
pub struct Analysis {
/// The position encoding for the workspace.
pub position_encoding: PositionEncoding,
@ -48,7 +50,9 @@ pub struct Analysis {
/// The global caches for analysis.
pub caches: AnalysisGlobalCaches,
/// The global caches for analysis.
pub workers: AnalysisGlobalWorkers,
pub workers: Arc<AnalysisGlobalWorkers>,
/// The global cache grid for analysis.
pub cache_grid: Arc<Mutex<AnalysisGlobalCacheGrid>>,
}
impl Analysis {
@ -65,12 +69,11 @@ impl Analysis {
/// Get a snapshot of the analysis data.
pub fn snapshot<'a>(
self: &Arc<Self>,
root: ImmutPath,
&self,
world: LspWorld,
resources: &'a dyn AnalysisResources,
) -> AnalysisContext<'a> {
AnalysisContext::new(root, world, resources, self.clone())
AnalysisContext::new(world, resources, self.clone())
}
/// Clear all cached resources.
@ -78,45 +81,20 @@ impl Analysis {
self.caches.signatures.clear();
self.caches.static_signatures.clear();
self.caches.terms.clear();
self.caches.expr_stage.clear();
self.caches.type_check.clear();
self.cache_grid.lock().clear();
}
}
type CacheMap<T> = FxDashMap<u128, T>;
// Needed by recursive computation
type DeferredCompute<T> = Arc<OnceCell<T>>;
/// A global (compiler server spanned) cache for all level of analysis results
/// of a module.
#[derive(Default)]
pub struct AnalysisGlobalCaches {
lifetime: AtomicU64,
clear_lifetime: AtomicU64,
expr_stage: CacheMap<(u64, DeferredCompute<Arc<ExprInfo>>)>,
type_check: CacheMap<(u64, DeferredCompute<Option<Arc<TypeScheme>>>)>,
def_signatures: CacheMap<(u64, Definition, DeferredCompute<Option<Signature>>)>,
static_signatures: CacheMap<(u64, Source, Span, DeferredCompute<Option<Signature>>)>,
signatures: CacheMap<(u64, Func, DeferredCompute<Option<Signature>>)>,
terms: CacheMap<(u64, Value, Ty)>,
}
/// A cache for all level of analysis results of a module.
#[derive(Default)]
pub struct AnalysisCaches {
modules: HashMap<TypstFileId, ModuleAnalysisCache>,
completion_files: OnceCell<Vec<PathBuf>>,
root_files: OnceCell<Vec<TypstFileId>>,
module_deps: OnceCell<HashMap<TypstFileId, ModuleDependency>>,
}
/// A cache for module-level analysis results of a module.
///
/// You should not holds across requests, because source code may change.
#[derive(Default)]
pub struct ModuleAnalysisCache {
expr_stage: OnceCell<Arc<ExprInfo>>,
type_check: OnceCell<Option<Arc<TypeScheme>>>,
/// Lock the revision in main thread.
#[must_use]
pub fn lock_revision(&self) -> RevisionLock {
let mut grid = self.cache_grid.lock();
let revision = grid.revision;
*grid.locked_revisions.entry(revision).or_default() += 1;
RevisionLock {
grid: self.cache_grid.clone(),
revision,
}
}
}
/// The resources for analysis.
@ -147,10 +125,6 @@ pub struct AnalysisGlobalWorkers {
pub struct AnalysisContext<'a> {
/// The world surface for Typst compiler
pub resources: &'a dyn AnalysisResources,
/// The analysis data
pub analysis: Arc<Analysis>,
/// The caches lifetime tick for analysis.
lifetime: u64,
/// Constructed shared context
pub local: LocalContext,
}
@ -178,22 +152,15 @@ impl<'w> Drop for AnalysisContext<'w> {
impl<'w> AnalysisContext<'w> {
/// Create a new analysis context.
pub fn new(
root: ImmutPath,
world: LspWorld,
resources: &'w dyn AnalysisResources,
a: Arc<Analysis>,
) -> Self {
pub fn new(world: LspWorld, resources: &'w dyn AnalysisResources, a: Analysis) -> Self {
let lifetime = a.caches.lifetime.fetch_add(1, Ordering::SeqCst);
let slot = a.cache_grid.lock().find_revision(world.revision());
Self {
resources,
lifetime,
analysis: a.clone(),
local: LocalContext {
analysis: a.clone(),
caches: AnalysisCaches::default(),
shared: Arc::new(SharedContext {
root,
slot,
lifetime,
world,
analysis: a,
@ -263,13 +230,13 @@ impl<'w> AnalysisContext<'w> {
}
pub(crate) fn type_of_span_(&mut self, source: &Source, s: Span) -> Option<Ty> {
self.type_check(source)?.type_of_span(s)
self.type_check(source).type_of_span(s)
}
pub(crate) fn literal_type_of_node(&mut self, k: LinkedNode) -> Option<Ty> {
let id = k.span().id()?;
let source = self.source_by_id(id).ok()?;
let ty_chk = self.type_check(&source)?;
let ty_chk = self.type_check(&source);
let ty = post_type_check(self.shared_(), &ty_chk, k.clone())
.or_else(|| ty_chk.type_of_span(k.span()))?;
@ -323,21 +290,11 @@ impl<'w> AnalysisContext<'w> {
.caches
.signatures
.retain(|_, (l, _, _)| lifetime - *l < 60);
self.analysis
.caches
.expr_stage
.retain(|_, (l, _)| lifetime - *l < 60);
self.analysis
.caches
.type_check
.retain(|_, (l, _)| lifetime - *l < 60);
}
}
/// The local context for analyzers.
pub struct LocalContext {
/// The analysis data
pub analysis: Arc<Analysis>,
/// Local caches for analysis.
pub caches: AnalysisCaches,
/// Constructed shared context
@ -360,7 +317,7 @@ impl DerefMut for LocalContext {
impl LocalContext {
#[cfg(test)]
pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<PathBuf>) {
pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
self.caches.completion_files.get_or_init(f);
}
@ -370,20 +327,27 @@ impl LocalContext {
}
/// Get all the source files in the workspace.
pub(crate) fn completion_files(&self, pref: &PathPreference) -> impl Iterator<Item = &PathBuf> {
pub(crate) fn completion_files(
&self,
pref: &PathPreference,
) -> impl Iterator<Item = &TypstFileId> {
let r = pref.ext_matcher();
self.caches
.completion_files
.get_or_init(|| {
scan_workspace_files(
&self.root,
PathPreference::Special.ext_matcher(),
|relative_path| relative_path.to_owned(),
)
if let Some(root) = self.world.workspace_root() {
scan_workspace_files(&root, PathPreference::Special.ext_matcher(), |p| {
TypstFileId::new(None, VirtualPath::new(p))
})
} else {
vec![]
}
})
.iter()
.filter(move |p| {
p.extension()
p.vpath()
.as_rooted_path()
.extension()
.and_then(|p| p.to_str())
.is_some_and(|e| r.is_match(e))
})
@ -393,7 +357,7 @@ impl LocalContext {
pub fn source_files(&self) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(|| {
self.completion_files(&PathPreference::Source)
.map(|p| TypstFileId::new(None, VirtualPath::new(p.as_path())))
.copied()
.collect()
})
}
@ -418,7 +382,7 @@ impl LocalContext {
}
/// Get the type check information of a source file.
pub(crate) fn type_check(&mut self, source: &Source) -> Option<Arc<TypeScheme>> {
pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeScheme> {
let id = source.id();
let cache = &self.caches.modules.entry(id).or_default().type_check;
cache.get_or_init(|| self.shared.type_check(source)).clone()
@ -429,17 +393,20 @@ impl LocalContext {
pub struct SharedContext {
/// The caches lifetime tick for analysis.
pub lifetime: u64,
/// The root of the workspace.
/// This means that the analysis result won't be valid if the root directory
/// changes.
pub root: ImmutPath,
/// Get the world surface for Typst compiler.
pub world: LspWorld,
/// The analysis data
pub analysis: Arc<Analysis>,
pub analysis: Analysis,
/// The revision slot
slot: Arc<RevisionSlot>,
}
impl SharedContext {
/// Get revision of current analysis
pub fn revision(&self) -> usize {
self.slot.revision
}
/// Get the position encoding during session.
pub(crate) fn position_encoding(&self) -> PositionEncoding {
self.analysis.position_encoding
@ -504,8 +471,11 @@ impl SharedContext {
/// Get file's id by its path
pub fn file_id_by_path(&self, p: &Path) -> FileResult<TypstFileId> {
// todo: source in packages
let root = &self.root;
let relative_path = p.strip_prefix(root).map_err(|_| {
let root = self.world.workspace_root().ok_or_else(|| {
let reason = eco_format!("workspace root not found");
FileError::Other(Some(reason))
})?;
let relative_path = p.strip_prefix(&root).map_err(|_| {
let reason = eco_format!("access denied, path: {p:?}, root: {root:?}");
FileError::Other(Some(reason))
})?;
@ -620,33 +590,28 @@ impl SharedContext {
pub(crate) fn expr_stage_(
self: &Arc<Self>,
source: &Source,
route: &mut Processing<LexicalScope>,
route: &mut Processing<Option<Arc<LazyHash<LexicalScope>>>>,
) -> Arc<ExprInfo> {
use crate::syntax::expr_of;
let res = {
let entry = self.analysis.caches.expr_stage.entry(hash128(&source));
let res = entry.or_insert_with(|| (self.lifetime, DeferredCompute::default()));
res.1.clone()
};
res.get_or_init(|| expr_of(self.clone(), source.clone(), route))
.clone()
self.slot.expr_stage.compute(hash128(&source), |prev| {
expr_of(self.clone(), source.clone(), route, prev)
})
}
pub(crate) fn exports_of(
self: &Arc<Self>,
source: Source,
route: &mut Processing<LexicalScope>,
) -> LexicalScope {
source: &Source,
route: &mut Processing<Option<Arc<LazyHash<LexicalScope>>>>,
) -> Option<Arc<LazyHash<LexicalScope>>> {
if let Some(s) = route.get(&source.id()) {
return s.clone();
}
self.expr_stage_(&source, route).exports.clone()
Some(self.expr_stage_(source, route).exports.clone())
}
/// Get the type check information of a source file.
pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Option<Arc<TypeScheme>> {
pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeScheme> {
let mut route = Processing::default();
self.type_check_(source, &mut route)
}
@ -656,17 +621,26 @@ impl SharedContext {
self: &Arc<Self>,
source: &Source,
route: &mut Processing<Arc<TypeScheme>>,
) -> Option<Arc<TypeScheme>> {
) -> Arc<TypeScheme> {
use crate::analysis::type_check;
// todo: recursive hash
let expr_info = self.expr_stage(source);
let res = {
let entry = self.analysis.caches.type_check.entry(hash128(&expr_info));
let res = entry.or_insert_with(|| (self.lifetime, Arc::default()));
res.1.clone()
};
res.get_or_init(|| type_check(self.clone(), expr_info, route))
.clone()
let ei = self.expr_stage(source);
self.slot.type_check.compute(hash128(&ei), |prev| {
let cache_hit = prev.and_then(|prev| {
// todo: recursively check changed scheme type
if prev.revision != ei.revision {
return None;
}
Some(prev)
});
if let Some(prev) = cache_hit {
return prev.clone();
}
type_check(self.clone(), ei, route)
})
}
pub(crate) fn definition(
@ -882,7 +856,7 @@ impl SharedContext {
let source = self.shared.source_by_id(fid).ok().unwrap();
let expr = self.shared.expr_stage(&source);
self.shared.type_check(&source);
expr.imports.iter().for_each(|fid| {
expr.imports.iter().for_each(|(fid, _)| {
if !self.analyzed.lock().insert(*fid) {
return;
}
@ -900,6 +874,210 @@ impl SharedContext {
}
}
#[derive(Clone)]
struct IncrCacheMap<K, V> {
revision: usize,
global: Arc<Mutex<FxDashMap<K, (usize, V)>>>,
prev: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
next: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
}
impl<K: Eq + Hash, V> Default for IncrCacheMap<K, V> {
fn default() -> Self {
Self {
revision: 0,
global: Arc::default(),
prev: Arc::default(),
next: Arc::default(),
}
}
}
impl<K, V> IncrCacheMap<K, V> {
fn compute(&self, key: K, compute: impl FnOnce(Option<V>) -> V) -> V
where
K: Clone + Eq + Hash,
V: Clone,
{
let next = self.next.lock().entry(key.clone()).or_default().clone();
next.get_or_init(|| {
let prev = self.prev.lock().get(&key).cloned();
let prev = prev.and_then(|p| p.get().cloned());
let prev = prev.or_else(|| {
let global = self.global.lock();
global.get(&key).map(|global| global.1.clone())
});
let res = compute(prev);
let global = self.global.lock();
let entry = global.entry(key.clone());
use dashmap::mapref::entry::Entry;
match entry {
Entry::Occupied(mut e) => {
let (revision, _) = e.get();
if *revision < self.revision {
e.insert((self.revision, res.clone()));
}
}
Entry::Vacant(e) => {
e.insert((self.revision, res.clone()));
}
}
res
})
.clone()
}
fn crawl(&self, revision: usize) -> Self {
Self {
revision,
prev: self.next.clone(),
global: self.global.clone(),
next: Default::default(),
}
}
}
type CacheMap<T> = Arc<FxDashMap<u128, T>>;
// Needed by recursive computation
type DeferredCompute<T> = Arc<OnceCell<T>>;
/// A global (compiler server spanned) cache for all level of analysis results
/// of a module.
#[derive(Default, Clone)]
pub struct AnalysisGlobalCaches {
lifetime: Arc<AtomicU64>,
clear_lifetime: Arc<AtomicU64>,
def_signatures: CacheMap<(u64, Definition, DeferredCompute<Option<Signature>>)>,
static_signatures: CacheMap<(u64, Source, Span, DeferredCompute<Option<Signature>>)>,
signatures: CacheMap<(u64, Func, DeferredCompute<Option<Signature>>)>,
terms: CacheMap<(u64, Value, Ty)>,
}
/// A cache for all level of analysis results of a module.
#[derive(Default)]
pub struct AnalysisCaches {
modules: HashMap<TypstFileId, ModuleAnalysisCache>,
completion_files: OnceCell<Vec<TypstFileId>>,
root_files: OnceCell<Vec<TypstFileId>>,
module_deps: OnceCell<HashMap<TypstFileId, ModuleDependency>>,
}
/// A cache for module-level analysis results of a module.
///
/// You should not holds across requests, because source code may change.
#[derive(Default)]
pub struct ModuleAnalysisCache {
expr_stage: OnceCell<Arc<ExprInfo>>,
type_check: OnceCell<Arc<TypeScheme>>,
}
/// The grid cache for all level of analysis results of a module.
#[derive(Default)]
pub struct AnalysisGlobalCacheGrid {
revision: usize,
default_slot: RevisionSlot,
revisions: Vec<Arc<RevisionSlot>>,
locked_revisions: HashMap<usize, usize>,
}
impl AnalysisGlobalCacheGrid {
fn clear(&mut self) {
self.revisions.clear();
}
fn gc(&mut self, rev: usize) {
self.revisions.retain(|r| r.revision >= rev);
self.default_slot
.expr_stage
.global
.lock()
.retain(|_, r| r.0 + 60 >= rev);
self.default_slot
.type_check
.global
.lock()
.retain(|_, r| r.0 + 60 >= rev);
}
/// Find the last revision slot by revision number.
fn find_revision(&mut self, revision: NonZeroUsize) -> Arc<RevisionSlot> {
let slot_base = self
.revisions
.iter()
.filter(|e| e.revision <= revision.get())
.reduce(|a, b| if a.revision > b.revision { a } else { b });
if let Some(slot) = slot_base {
if slot.revision == revision.get() {
return slot.clone();
}
}
let mut slot = slot_base
.map(|e| RevisionSlot {
revision: e.revision,
expr_stage: e.expr_stage.crawl(revision.get()),
type_check: e.type_check.crawl(revision.get()),
})
.unwrap_or_else(|| self.default_slot.clone());
slot.revision = revision.get();
let slot = Arc::new(slot);
self.revisions.push(slot.clone());
self.revision = revision.get().max(self.revision);
slot
}
}
/// A lock for revision.
pub struct RevisionLock {
grid: Arc<Mutex<AnalysisGlobalCacheGrid>>,
revision: usize,
}
impl Drop for RevisionLock {
fn drop(&mut self) {
let mut grid = self.grid.lock();
let revision_cnt = grid
.locked_revisions
.entry(self.revision)
.or_insert_with(|| panic!("revision {} is not locked", self.revision));
*revision_cnt -= 1;
if *revision_cnt != 0 {
return;
}
grid.locked_revisions.remove(&self.revision);
if grid.revision <= self.revision {
return;
}
let existing = grid.locked_revisions.keys().min().copied();
let gc_revision = existing.unwrap_or(self.revision);
let grid = self.grid.clone();
rayon::spawn(move || {
grid.lock().gc(gc_revision);
});
}
}
#[derive(Default, Clone)]
struct RevisionSlot {
revision: usize,
expr_stage: IncrCacheMap<u128, Arc<ExprInfo>>,
type_check: IncrCacheMap<u128, Arc<TypeScheme>>,
}
impl Drop for RevisionSlot {
fn drop(&mut self) {
log::info!("revision {} is dropped", self.revision)
}
}
fn ceil_char_boundary(text: &str, mut cursor: usize) -> usize {
// while is not char boundary, move cursor to right
while cursor < text.len() && !text.is_char_boundary(cursor) {

View file

@ -254,13 +254,13 @@ fn analyze_type_signature(
let (type_info, ty) = match callee_node {
SignatureTarget::Convert(..) => return None,
SignatureTarget::SyntaxFast(source, span) | SignatureTarget::Syntax(source, span) => {
let type_info = ctx.type_check(source)?;
let type_info = ctx.type_check(source);
let ty = type_info.type_of_span(*span)?;
Some((type_info, ty))
}
SignatureTarget::Def(source, def) => {
let span = def.decl.span();
let type_info = ctx.type_check(source.as_ref()?)?;
let type_info = ctx.type_check(source.as_ref()?);
let ty = type_info.type_of_span(span)?;
Some((type_info, ty))
}
@ -268,7 +268,7 @@ fn analyze_type_signature(
let source = ctx.source_by_id(f.span().id()?).ok()?;
let node = source.find(f.span())?;
let def = get_non_strict_def_target(node.parent()?.clone())?;
let type_info = ctx.type_check(&source)?;
let type_info = ctx.type_check(&source);
let ty = type_info.type_of_span(def.name()?.span())?;
Some((type_info, ty))
}

View file

@ -26,8 +26,9 @@ pub(crate) fn type_check(
ctx: Arc<SharedContext>,
expr_info: Arc<ExprInfo>,
route: &mut Processing<Arc<TypeScheme>>,
) -> Option<Arc<TypeScheme>> {
) -> Arc<TypeScheme> {
let mut info = TypeScheme::default();
info.revision = expr_info.revision;
route.insert(expr_info.fid, Arc::new(TypeScheme::default()));
@ -48,7 +49,7 @@ pub(crate) fn type_check(
checker.route.remove(&checker.ei.fid);
Some(Arc::new(info))
Arc::new(info)
}
#[derive(BindTyCtx)]
@ -126,7 +127,7 @@ impl<'a> TypeChecker<'a> {
let ext_type_info = if let Some(route) = self.route.get(&source.id()) {
route.clone()
} else {
self.ctx.type_check_(&source, self.route)?
self.ctx.type_check_(&source, self.route)
};
let ext_def = ext_def_use_info.exports.get(&name)?;

View file

@ -114,13 +114,12 @@ impl StatefulRequest for CompletionRequest {
let parent = cano_expr.parent()?;
if matches!(parent.kind(), SyntaxKind::Named | SyntaxKind::Args) {
let ty_chk = ctx.type_check(&source);
if let Some(ty_chk) = ty_chk {
let ty = ty_chk.type_of_span(cano_expr.span());
log::debug!("check string ty: {ty:?}");
if let Some(Ty::Builtin(BuiltinTy::Path(path_filter))) = ty {
completion_result =
complete_path(ctx, Some(cano_expr), &source, cursor, &path_filter);
}
let ty = ty_chk.type_of_span(cano_expr.span());
log::debug!("check string ty: {ty:?}");
if let Some(Ty::Builtin(BuiltinTy::Path(path_filter))) = ty {
completion_result =
complete_path(ctx, Some(cano_expr), &source, cursor, &path_filter);
}
}
}

View file

@ -1,3 +1,5 @@
use reflexo_typst::EntryReader;
use crate::prelude::*;
/// Stores diagnostics for files.
@ -32,7 +34,11 @@ fn convert_diagnostic(
let source = ctx.world().source(id)?;
lsp_range = diagnostic_range(&source, span, ctx.position_encoding());
} else {
uri = path_to_url(&ctx.local.root)?;
let root = ctx
.world
.workspace_root()
.ok_or_else(|| anyhow::anyhow!("no workspace root"))?;
uri = path_to_url(&root)?;
lsp_range = LspRange::default();
};

View file

@ -310,7 +310,7 @@ fn format_ty(ty: Option<&Ty>, doc_ty: Option<&mut ShowTypeRepr>) -> TypeRepr {
pub(crate) fn variable_docs(ctx: &mut AnalysisContext, pos: &LinkedNode) -> Option<VarDocs> {
let source = ctx.source_by_id(pos.span().id()?).ok()?;
let type_info = ctx.type_check(&source)?;
let type_info = ctx.type_check(&source);
let ty = type_info.type_of_span(pos.span())?;
// todo multiple sources

View file

@ -1,9 +1,11 @@
use std::ops::DerefMut;
use parking_lot::Mutex;
use reflexo::hash::hash128;
use reflexo_typst::LazyHash;
use rpds::RedBlackTreeMapSync;
use rustc_hash::{FxHashMap, FxHashSet};
use std::collections::HashSet;
use rustc_hash::FxHashMap;
use std::ops::Deref;
use tinymist_analysis::import::resolve_id_by_path;
use typst::{
foundations::{Element, NativeElement, Value},
@ -26,10 +28,43 @@ pub type Processing<T> = FxHashMap<TypstFileId, T>;
pub(crate) fn expr_of(
ctx: Arc<SharedContext>,
source: Source,
route: &mut Processing<LexicalScope>,
route: &mut Processing<Option<Arc<LazyHash<LexicalScope>>>>,
prev: Option<Arc<ExprInfo>>,
) -> Arc<ExprInfo> {
log::debug!("expr_of: {:?}", source.id());
route.insert(source.id(), None);
let cache_hit = prev.and_then(|prev| {
if prev.source.len_bytes() != source.len_bytes()
|| hash128(&prev.source) != hash128(&source)
{
return None;
}
for (i, prev_exports) in &prev.imports {
let ei = ctx.exports_of(&ctx.source_by_id(*i).ok()?, route);
// If there is a cycle, the expression will be stable as the source is
// unchanged.
if let Some(exports) = ei {
if prev_exports.size() != exports.size()
|| hash128(&prev_exports) != hash128(&exports)
{
return None;
}
}
}
Some(prev)
});
if let Some(prev) = cache_hit {
route.remove(&source.id());
return prev;
}
let revision = ctx.revision();
let resolves_base = Arc::new(Mutex::new(vec![]));
let resolves = resolves_base.clone();
@ -40,7 +75,7 @@ pub(crate) fn expr_of(
let exprs_base = Arc::new(Mutex::new(FxHashMap::default()));
let exprs = exprs_base.clone();
let imports_base = Arc::new(Mutex::new(FxHashSet::default()));
let imports_base = Arc::new(Mutex::new(FxHashMap::default()));
let imports = imports_base.clone();
let module_docstring = Arc::new(
@ -69,11 +104,10 @@ pub(crate) fn expr_of(
route,
};
w.route.insert(w.fid, LexicalScope::default());
let root = source.root().cast::<ast::Markup>().unwrap();
let root = w.check_in_mode(root.to_untyped().children(), InterpretMode::Markup);
let root_scope = w.summarize_scope();
w.route.insert(w.fid, root_scope.clone());
let root_scope = Arc::new(LazyHash::new(w.summarize_scope()));
w.route.insert(w.fid, Some(root_scope.clone()));
while let Some((node, lexical)) = w.defers.pop() {
w.lexical = lexical;
@ -87,10 +121,12 @@ pub(crate) fn expr_of(
let info = ExprInfo {
fid: source.id(),
revision,
source: source.clone(),
resolves: HashMap::from_iter(std::mem::take(resolves_base.lock().deref_mut())),
module_docstring,
docstrings: std::mem::take(docstrings_base.lock().deref_mut()),
imports: HashSet::from_iter(std::mem::take(imports_base.lock().deref_mut())),
imports: HashMap::from_iter(std::mem::take(imports_base.lock().deref_mut())),
exports,
exprs: std::mem::take(exprs_base.lock().deref_mut()),
root,
@ -104,17 +140,22 @@ pub(crate) fn expr_of(
#[derive(Debug)]
pub struct ExprInfo {
pub fid: TypstFileId,
pub revision: usize,
pub source: Source,
pub resolves: FxHashMap<Span, Interned<RefExpr>>,
pub module_docstring: Arc<DocString>,
pub docstrings: FxHashMap<DeclExpr, Arc<DocString>>,
pub exprs: FxHashMap<Span, Expr>,
pub imports: FxHashSet<TypstFileId>,
pub exports: LexicalScope,
pub imports: FxHashMap<TypstFileId, Arc<LazyHash<LexicalScope>>>,
pub exports: Arc<LazyHash<LexicalScope>>,
pub root: Expr,
}
impl std::hash::Hash for ExprInfo {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.revision.hash(state);
self.source.hash(state);
self.exports.hash(state);
self.root.hash(state);
}
}
@ -185,8 +226,8 @@ struct LexicalContext {
pub(crate) struct ExprWorker<'a> {
fid: TypstFileId,
ctx: Arc<SharedContext>,
imports: Arc<Mutex<FxHashSet<TypstFileId>>>,
import_buffer: Vec<TypstFileId>,
imports: Arc<Mutex<FxHashMap<TypstFileId, Arc<LazyHash<LexicalScope>>>>>,
import_buffer: Vec<(TypstFileId, Arc<LazyHash<LexicalScope>>)>,
docstrings: Arc<Mutex<FxHashMap<DeclExpr, Arc<DocString>>>>,
exprs: Arc<Mutex<FxHashMap<Span, Expr>>>,
resolves: Arc<Mutex<ResolveVec>>,
@ -194,7 +235,7 @@ pub(crate) struct ExprWorker<'a> {
lexical: LexicalContext,
defers: Vec<(SyntaxNode, LexicalContext)>,
route: &'a mut Processing<LexicalScope>,
route: &'a mut Processing<Option<Arc<LazyHash<LexicalScope>>>>,
comment_matcher: DocCommentMatcher,
}
@ -582,16 +623,10 @@ impl<'a> ExprWorker<'a> {
if let Some(f) = fid {
log::debug!("prefetch type check: {f:?}");
self.ctx.prefetch_type_check(f);
self.import_buffer.push(f);
}
let scope = if let Some(fid) = &fid {
let source = self.ctx.source_by_id(*fid);
if let Ok(source) = source {
Some(ExprScope::Lexical(self.ctx.exports_of(source, self.route)))
} else {
None
}
Some(ExprScope::Lexical(self.exports_of(*fid)))
} else {
match &mod_expr {
Some(Expr::Type(Ty::Value(v))) => match &v.val {
@ -1105,6 +1140,18 @@ impl<'a> ExprWorker<'a> {
_ => None,
}
}
fn exports_of(&mut self, fid: TypstFileId) -> LexicalScope {
let imported = self
.ctx
.source_by_id(fid)
.ok()
.and_then(|src| self.ctx.exports_of(&src, self.route))
.unwrap_or_default();
let res = imported.as_ref().deref().clone();
self.import_buffer.push((fid, imported));
res
}
}
fn extract_ref(step: Option<Expr>) -> (Option<Expr>, Option<Expr>) {

View file

@ -43,10 +43,10 @@ pub fn construct_module_dependencies(
dependencies
.entry(file_id)
.or_insert_with(|| ModuleDependency {
dependencies: ei.imports.iter().cloned().collect(),
dependencies: ei.imports.keys().cloned().collect(),
dependents: EcoVec::default(),
});
for dep in ei.imports.clone() {
for (dep, _) in ei.imports.clone() {
dependents
.entry(dep)
.or_insert_with(EcoVec::new)

View file

@ -61,7 +61,7 @@ pub fn run_with_ctx<T>(
.map(|p| TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap())))
.collect::<Vec<_>>();
let mut ctx = Arc::new(Analysis::default()).snapshot(root, w.snapshot(), &());
let mut ctx = Arc::new(Analysis::default()).snapshot(w.snapshot(), &());
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p)

View file

@ -957,6 +957,8 @@ impl IfTy {
/// A type scheme on a group of syntax structures (typing)
#[derive(Default)]
pub struct TypeScheme {
/// The revision used
pub revision: usize,
/// The typing on definitions
pub vars: FxHashMap<DeclExpr, TypeVarBounds>,
/// The checked documentation of definitions

View file

@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use ecow::{eco_format, EcoString};
use lsp_types::{CompletionItem, CompletionTextEdit, InsertTextFormat, TextEdit};
use once_cell::sync::OnceCell;
use reflexo::path::{unix_slash, PathClean};
use reflexo::path::unix_slash;
use tinymist_world::LspWorld;
use typst::foundations::{AutoValue, Func, Label, NoneValue, Repr, Type, Value};
use typst::layout::{Dir, Length};
@ -62,7 +62,7 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
let types = (|| {
let id = self.root.span().id()?;
let src = self.ctx.source_by_id(id).ok()?;
self.ctx.type_check(&src)
Some(self.ctx.type_check(&src))
})();
let types = types.as_ref();
@ -1135,7 +1135,7 @@ pub fn complete_path(
let has_root = path.has_root();
let src_path = id.vpath();
let base = src_path.resolve(&ctx.local.root)?;
let base = id;
let dst_path = src_path.join(path);
let mut compl_path = dst_path.as_rootless_path();
if !compl_path.is_dir() {
@ -1148,39 +1148,35 @@ pub fn complete_path(
return None;
}
let dirs = ctx.local.root.clone();
log::debug!("compl_dirs: {dirs:?}");
// find directory or files in the path
let mut folder_completions = vec![];
let folder_completions = vec![];
let mut module_completions = vec![];
// todo: test it correctly
for path in ctx.completion_files(p) {
log::debug!("compl_check_path: {path:?}");
// diff with root
let path = dirs.join(path);
// Skip self smartly
if path.clean() == base.clean() {
if *path == base {
continue;
}
let label = if has_root {
// diff with root
let w = path.strip_prefix(&ctx.local.root).ok()?;
eco_format!("/{}", unix_slash(w))
unix_slash(path.vpath().as_rooted_path())
} else {
let base = base.parent()?;
let w = pathdiff::diff_paths(&path, base)?;
unix_slash(&w).into()
let base = base.vpath().as_rooted_path();
let path = path.vpath().as_rooted_path();
let w = pathdiff::diff_paths(path, base)?;
unix_slash(&w)
};
log::debug!("compl_label: {label:?}");
if path.is_dir() {
folder_completions.push((label, CompletionKind::Folder));
} else {
module_completions.push((label, CompletionKind::File));
}
module_completions.push((label, CompletionKind::File));
// todo: looks like the folder completion is broken
// if path.is_dir() {
// folder_completions.push((label, CompletionKind::Folder));
// }
}
let replace_range = ctx.to_lsp_range(rng, source);
@ -1199,7 +1195,7 @@ pub fn complete_path(
};
module_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
folder_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
// folder_completions.sort_by(|a, b| path_priority_cmp(&a.0, &b.0));
let mut sorter = 0;
let digits = (module_completions.len() + folder_completions.len())