feat: complete function parameters on signatures inferred by type checking (#203)

* dev: check upper bound types in assigning positions

* dev: check template signatures

* feat: completion with type sigatures

* dev: document a bit

* dev: save todo work on auto completion on user functions

* dev: fix document target

* dev: remove playground snaps

* dev: fix ident target

* dev: update snapshot
This commit is contained in:
Myriad-Dreamin 2024-04-20 15:03:46 +08:00 committed by GitHub
parent 4ec4305fd5
commit b35d897919
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 850 additions and 113 deletions

View file

@ -63,7 +63,7 @@ mod type_check_tests {
vars.sort_by(|x, y| x.0.cmp(&y.0));
for (name, var) in vars {
writeln!(f, "{:?} = {:?}", name, info.simplify(var.get_ref()))?;
writeln!(f, "{:?} = {:?}", name, info.simplify(var.get_ref(), true))?;
}
writeln!(f, "---")?;
@ -88,6 +88,49 @@ mod type_check_tests {
}
}
#[cfg(test)]
mod literal_type_check_tests {
use insta::with_settings;
use typst::syntax::LinkedNode;
use crate::analysis::ty;
use crate::syntax::get_check_target;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("literal_type_check", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let pos = ctx
.to_typst_pos(find_test_position(&source), &source)
.unwrap();
let root = LinkedNode::new(source.root());
let node = root.leaf_at(pos + 1).unwrap();
let target = get_check_target(node).unwrap_or_else(|| {
panic!(
"Failed to get check target at {pos:?} in {:?}",
source.text()
)
});
let text = target.node().clone().map(|n| n.get().clone().into_text());
let text = text.unwrap_or_default();
let result = ty::type_check(ctx, source.clone());
let literal_type = result.and_then(|info| ty::literal_type_check(ctx, &info, target));
with_settings!({
description => format!("Check on {text:?} ({pos:?})"),
}, {
let literal_type = literal_type.map(|e| format!("{e:#?}"))
.unwrap_or_else(|| "<nil>".to_string());
assert_snapshot!(literal_type);
})
});
}
}
#[cfg(test)]
mod module_tests {
use reflexo::path::unix_slash;

View file

@ -13,7 +13,7 @@ use reflexo::hash::hash128;
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
use typst::eval::Eval;
use typst::foundations;
use typst::syntax::SyntaxNode;
use typst::syntax::{LinkedNode, SyntaxNode};
use typst::{
diag::{eco_format, FileError, FileResult, PackageError},
syntax::{package::PackageSpec, Source, Span, VirtualPath},
@ -23,8 +23,10 @@ use typst::{foundations::Value, syntax::ast, text::Font};
use typst::{layout::Position, syntax::FileId as TypstFileId};
use super::{
DefUseInfo, FlowType, ImportInfo, PathPreference, Signature, SignatureTarget, TypeCheckInfo,
literal_type_check, DefUseInfo, FlowType, ImportInfo, PathPreference, Signature,
SignatureTarget, TypeCheckInfo,
};
use crate::syntax::get_check_target;
use crate::{
lsp_to_typst,
syntax::{
@ -645,6 +647,25 @@ impl<'w> AnalysisContext<'w> {
let ty_chk = self.type_check(source)?;
ty_chk.mapping.get(&s).cloned()
}
pub(crate) fn literal_type_of_span(&mut self, s: Span) -> Option<FlowType> {
let id = s.id()?;
let source = self.source_by_id(id).ok()?;
let k = LinkedNode::new(source.root()).find(s)?;
self.literal_type_of_node(k)
}
pub(crate) fn literal_type_of_node(&mut self, k: LinkedNode) -> Option<FlowType> {
let id = k.span().id()?;
let source = self.source_by_id(id).ok()?;
let ty_chk = self.type_check(source.clone())?;
let check_target = get_check_target(k.clone())?;
literal_type_check(self, &ty_chk, check_target.clone())
.or_else(|| ty_chk.mapping.get(&k.span()).cloned())
}
}
/// The context for searching in the workspace.

View file

@ -19,9 +19,7 @@ use crate::analysis::resolve_callee;
use crate::syntax::{get_def_target, get_deref_target, DefTarget};
use crate::AnalysisContext;
use super::{
find_definition, DefinitionLink, FlowType, FlowVar, LexicalKind, LexicalVarKind, TypeCheckInfo,
};
use super::{find_definition, DefinitionLink, FlowType, FlowVar, LexicalKind, LexicalVarKind};
// pub fn analyze_signature
@ -300,7 +298,6 @@ fn resolve_callee_v2(
let _t = ctx.type_check(source)?;
let _ = FlowVar::name;
let _ = FlowVar::id;
let _ = TypeCheckInfo::simplify;
let root = LinkedNode::new(def_source.root());
let def_node = root.leaf_at(def_at.1.start + 1)?;

View file

@ -1,7 +1,7 @@
//! Top-level evaluation of a source file.
use std::{
collections::{HashMap, HashSet},
collections::{BTreeMap, HashMap, HashSet},
sync::Arc,
};
@ -25,44 +25,16 @@ mod def;
pub(crate) use def::*;
mod builtin;
pub(crate) use builtin::*;
mod literal_flow;
pub(crate) use literal_flow::*;
pub(crate) struct TypeCheckInfo {
pub vars: HashMap<DefId, FlowVar>,
pub mapping: HashMap<Span, FlowType>,
cano_cache: Mutex<TypeCanoStore>,
}
impl TypeCheckInfo {
pub fn simplify(&self, ty: FlowType) -> FlowType {
let mut c = self.cano_cache.lock();
let c = &mut *c;
c.cano_local_cache.clear();
c.positives.clear();
c.negatives.clear();
let mut worker = TypeSimplifier {
vars: &self.vars,
cano_cache: &mut c.cano_cache,
cano_local_cache: &mut c.cano_local_cache,
positives: &mut c.positives,
negatives: &mut c.negatives,
};
worker.simplify(ty)
}
}
/// Type checking at the source unit level.
pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<TypeCheckInfo>> {
let def_use_info = ctx.def_use(source.clone())?;
let mut info = TypeCheckInfo {
vars: HashMap::new(),
mapping: HashMap::new(),
let mut info = TypeCheckInfo::default();
// Retrieve def-use information for the source.
let def_use_info = ctx.def_use(source.clone())?;
cano_cache: Mutex::new(TypeCanoStore::default()),
};
let mut type_checker = TypeChecker {
ctx,
source: source.clone(),
@ -72,16 +44,48 @@ pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Ar
};
let lnk = LinkedNode::new(source.root());
let current = std::time::Instant::now();
let type_check_start = std::time::Instant::now();
type_checker.check(lnk);
let elapsed = current.elapsed();
log::info!("Type checking on {:?} took {:?}", source.id(), elapsed);
let elapsed = type_check_start.elapsed();
log::info!("Type checking on {:?} took {elapsed:?}", source.id());
// todo: cross-file unit type checking
let _ = type_checker.source;
Some(Arc::new(info))
}
#[derive(Default)]
pub(crate) struct TypeCheckInfo {
pub vars: HashMap<DefId, FlowVar>,
pub mapping: HashMap<Span, FlowType>,
cano_cache: Mutex<TypeCanoStore>,
}
impl TypeCheckInfo {
pub fn simplify(&self, ty: FlowType, principal: bool) -> FlowType {
let mut c = self.cano_cache.lock();
let c = &mut *c;
c.cano_local_cache.clear();
c.positives.clear();
c.negatives.clear();
let mut worker = TypeSimplifier {
principal,
vars: &self.vars,
cano_cache: &mut c.cano_cache,
cano_local_cache: &mut c.cano_local_cache,
positives: &mut c.positives,
negatives: &mut c.negatives,
};
worker.simplify(ty, principal)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum InterpretMode {
Markup,
@ -362,12 +366,46 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
let op = binary.op();
let lhs = self.check_expr_in(binary.lhs().span(), root.clone());
let rhs = self.check_expr_in(binary.rhs().span(), root);
let operands = Box::new((lhs, rhs));
let lhs_span = binary.lhs().span();
let lhs = self.check_expr_in(lhs_span, root.clone());
let rhs_span = binary.rhs().span();
let rhs = self.check_expr_in(rhs_span, root);
// Some(FlowType::Binary(ty))
Some(FlowType::Binary(FlowBinaryType { op, operands }))
match op {
ast::BinOp::Add | ast::BinOp::Sub | ast::BinOp::Mul | ast::BinOp::Div => {}
ast::BinOp::Eq | ast::BinOp::Neq | ast::BinOp::Leq | ast::BinOp::Geq => {
self.check_comparable(&lhs, &rhs);
self.possible_ever_be(&lhs, &rhs);
self.possible_ever_be(&rhs, &lhs);
}
ast::BinOp::Lt | ast::BinOp::Gt => {
self.check_comparable(&lhs, &rhs);
}
ast::BinOp::And | ast::BinOp::Or => {
self.constrain(&lhs, &FlowType::Boolean(None));
self.constrain(&rhs, &FlowType::Boolean(None));
}
ast::BinOp::NotIn | ast::BinOp::In => {
self.check_containing(&rhs, &lhs, op == ast::BinOp::In);
}
ast::BinOp::Assign => {
self.check_assignable(&lhs, &rhs);
self.possible_ever_be(&lhs, &rhs);
}
ast::BinOp::AddAssign
| ast::BinOp::SubAssign
| ast::BinOp::MulAssign
| ast::BinOp::DivAssign => {
self.check_assignable(&lhs, &rhs);
}
}
let res = FlowType::Binary(FlowBinaryType {
op,
operands: Box::new((lhs, rhs)),
});
Some(res)
}
fn check_field_access(&mut self, root: LinkedNode<'_>) -> Option<FlowType> {
@ -436,7 +474,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
// let _params = self.check_expr_in(closure.params().span(), root.clone());
let mut pos = vec![];
let mut named = HashMap::new();
let mut named = BTreeMap::new();
let mut rest = None;
for param in closure.params().children() {
@ -445,8 +483,8 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
pos.push(self.check_pattern(pattern, FlowType::Any, root.clone()));
}
ast::Param::Named(e) => {
let exp = self.check_expr_in(e.span(), root.clone());
let v = self.get_var(e.span(), to_ident_ref(&root, e.name())?)?;
let exp = self.check_expr_in(e.expr().span(), root.clone());
let v = self.get_var(e.name().span(), to_ident_ref(&root, e.name())?)?;
v.ever_be(exp);
named.insert(e.name().get().clone(), v.get_ref());
}
@ -507,6 +545,9 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
let callee = self.check_expr_in(set_rule.target().span(), root.clone());
let args = self.check_expr_in(set_rule.args().span(), root.clone());
let _cond = set_rule
.condition()
.map(|cond| self.check_expr_in(cond.span(), root.clone()));
let mut candidates = Vec::with_capacity(1);
log::debug!("set rule: {callee:?} with {args:?}");
@ -532,7 +573,9 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
let _selector = show_rule
.selector()
.map(|sel| self.check_expr_in(sel.span(), root.clone()));
// let _args = self.check_expr_in(show_rule.args().span(), root)?;
let t = show_rule.transform();
// todo: infer it type by selector
let _transform = self.check_expr_in(t.span(), root.clone());
Some(FlowType::Any)
}
@ -737,6 +780,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
FlowType::FlowNone => {}
FlowType::Auto => {}
FlowType::Builtin(_) => {}
FlowType::Boolean(_) => {}
FlowType::At(e) => {
let primary_type = self.check_primary_type(e.0 .0.clone());
self.check_apply_method(primary_type, e.0 .1.clone(), args, candidates);
@ -774,6 +818,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
let _ = w.0 .0;
}
(FlowType::Var(v), rhs) => {
log::debug!("constrain var {v:?} ⪯ {rhs:?}");
let w = self.info.vars.get_mut(&v.0).unwrap();
match &w.kind {
FlowVarKind::Weak(w) => {
@ -783,6 +828,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
(lhs, FlowType::Var(v)) => {
log::debug!("constrain var {lhs:?} ⪯ {v:?}");
let v = self.info.vars.get(&v.0).unwrap();
match &v.kind {
FlowVarKind::Weak(v) => {
@ -927,6 +973,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
FlowType::At(e) => self.check_primary_type(e.0 .0.clone()),
FlowType::Unary(_) => e,
FlowType::Binary(_) => e,
FlowType::Boolean(_) => e,
FlowType::If(_) => e,
FlowType::Element(_) => e,
}
@ -1043,30 +1090,67 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
vec![args.clone()],
)))
}
fn check_comparable(&self, lhs: &FlowType, rhs: &FlowType) {
let _ = lhs;
let _ = rhs;
}
fn check_assignable(&self, lhs: &FlowType, rhs: &FlowType) {
let _ = lhs;
let _ = rhs;
}
fn check_containing(&self, container: &FlowType, elem: &FlowType, expected_in: bool) {
let _ = container;
let _ = elem;
let _ = expected_in;
}
fn possible_ever_be(&mut self, lhs: &FlowType, rhs: &FlowType) {
// todo: instantiataion
match rhs {
FlowType::Undef
| FlowType::Content
| FlowType::None
| FlowType::FlowNone
| FlowType::Auto
| FlowType::Element(..)
| FlowType::Builtin(..)
| FlowType::Value(..)
| FlowType::Boolean(..)
| FlowType::ValueDoc(..) => {
self.constrain(rhs, lhs);
}
_ => {}
}
}
}
#[derive(Default)]
struct TypeCanoStore {
cano_cache: HashMap<u128, FlowType>,
cano_local_cache: HashMap<DefId, FlowType>,
cano_cache: HashMap<(u128, bool), FlowType>,
cano_local_cache: HashMap<(DefId, bool), FlowType>,
negatives: HashSet<DefId>,
positives: HashSet<DefId>,
}
struct TypeSimplifier<'a, 'b> {
principal: bool,
vars: &'a HashMap<DefId, FlowVar>,
cano_cache: &'b mut HashMap<u128, FlowType>,
cano_local_cache: &'b mut HashMap<DefId, FlowType>,
cano_cache: &'b mut HashMap<(u128, bool), FlowType>,
cano_local_cache: &'b mut HashMap<(DefId, bool), FlowType>,
negatives: &'b mut HashSet<DefId>,
positives: &'b mut HashSet<DefId>,
}
impl<'a, 'b> TypeSimplifier<'a, 'b> {
fn simplify(&mut self, ty: FlowType) -> FlowType {
fn simplify(&mut self, ty: FlowType, principal: bool) -> FlowType {
// todo: hash safety
let ty_key = hash128(&ty);
if let Some(cano) = self.cano_cache.get(&ty_key) {
if let Some(cano) = self.cano_cache.get(&(ty_key, principal)) {
return cano.clone();
}
@ -1175,6 +1259,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
FlowType::Infer => {}
FlowType::FlowNone => {}
FlowType::Auto => {}
FlowType::Boolean(_) => {}
FlowType::Builtin(_) => {}
FlowType::Element(_) => {}
}
@ -1183,25 +1268,31 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
fn transform(&mut self, ty: &FlowType, pol: bool) -> FlowType {
match ty {
FlowType::Var(v) => {
if let Some(cano) = self.cano_local_cache.get(&v.0) {
if let Some(cano) = self.cano_local_cache.get(&(v.0, self.principal)) {
return cano.clone();
}
// todo: avoid cycle
self.cano_local_cache
.insert((v.0, self.principal), FlowType::Any);
match &self.vars.get(&v.0).unwrap().kind {
let res = match &self.vars.get(&v.0).unwrap().kind {
FlowVarKind::Weak(w) => {
let w = w.read();
// log::debug!("transform var {:?} {pol}", v.0);
let mut lbs = Vec::with_capacity(w.lbs.len());
let mut ubs = Vec::with_capacity(w.ubs.len());
if pol && !self.negatives.contains(&v.0) {
log::info!(
"transform var [principal={}] {v:?} with {w:?}",
self.principal
);
if !self.principal || ((pol) && !self.negatives.contains(&v.0)) {
for lb in w.lbs.iter() {
lbs.push(self.transform(lb, pol));
}
}
if !pol && !self.positives.contains(&v.0) {
if !self.principal || ((!pol) && !self.positives.contains(&v.0)) {
for ub in w.ubs.iter() {
ubs.push(self.transform(ub, !pol));
}
@ -1216,12 +1307,14 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
}
}
FlowType::Let(Arc::new(FlowVarStore {
lbs: w.lbs.clone(),
ubs: w.ubs.clone(),
}))
}
FlowType::Let(Arc::new(FlowVarStore { lbs, ubs }))
}
};
self.cano_local_cache
.insert((v.0, self.principal), res.clone());
res
}
FlowType::Func(f) => {
let pos = f.pos.iter().map(|p| self.transform(p, !pol)).collect();
@ -1314,6 +1407,7 @@ impl<'a, 'b> TypeSimplifier<'a, 'b> {
FlowType::Infer => FlowType::Infer,
FlowType::FlowNone => FlowType::FlowNone,
FlowType::Auto => FlowType::Auto,
FlowType::Boolean(b) => FlowType::Boolean(*b),
FlowType::Builtin(b) => FlowType::Builtin(b.clone()),
}
}
@ -1401,6 +1495,8 @@ impl Joiner {
(FlowType::Union(..), _) => self.definite = FlowType::Undef,
(FlowType::Let(w), FlowType::None) => self.definite = FlowType::Let(w),
(FlowType::Let(..), _) => self.definite = FlowType::Undef,
(FlowType::Boolean(b), FlowType::None) => self.definite = FlowType::Boolean(b),
(FlowType::Boolean(..), _) => self.definite = FlowType::Undef,
}
}
}

View file

@ -11,7 +11,7 @@ use typst::{
use crate::analysis::ty::param_mapping;
use super::FlowBuiltinType;
use super::{FlowBuiltinType, TypeCheckInfo};
struct RefDebug<'a>(&'a FlowType);
@ -35,6 +35,7 @@ pub(crate) enum FlowType {
Infer,
FlowNone,
Auto,
Boolean(Option<bool>),
Builtin(FlowBuiltinType),
Value(Box<(Value, Span)>),
ValueDoc(Box<(Value, &'static str)>),
@ -99,6 +100,13 @@ impl fmt::Debug for FlowType {
FlowType::Value(v) => write!(f, "{v:?}", v = v.0),
FlowType::ValueDoc(v) => write!(f, "{v:?}"),
FlowType::Element(e) => write!(f, "{e:?}"),
FlowType::Boolean(b) => {
if let Some(b) = b {
write!(f, "{b}")
} else {
f.write_str("Boolean")
}
}
}
}
}
@ -156,6 +164,41 @@ impl FlowType {
pub(crate) fn is_dict(&self) -> bool {
matches!(self, FlowType::Dict(..))
}
pub(crate) fn from_types(e: impl ExactSizeIterator<Item = FlowType>) -> Self {
if e.len() == 0 {
FlowType::Any
} else if e.len() == 1 {
let mut e = e;
e.next().unwrap()
} else {
FlowType::Union(Box::new(e.collect()))
}
}
pub(crate) fn signatures(
&self,
ty_chk: &TypeCheckInfo,
principal: bool,
) -> Option<Vec<FlowSignature>> {
let mut res = Vec::new();
check_signatures(self, &mut res, ty_chk, principal);
if res.is_empty() {
None
} else {
// todo: bad performance
for sig in &mut res {
for pos in &mut sig.pos {
*pos = ty_chk.simplify(pos.clone(), principal);
}
for (_, ty) in &mut sig.named {
*ty = ty_chk.simplify(ty.clone(), principal);
}
}
Some(res)
}
}
}
#[derive(Debug, Clone, Hash)]
@ -408,3 +451,60 @@ impl fmt::Debug for FlowRecord {
f.write_str("}")
}
}
fn instantiate_signature(
f: &FlowType,
args: Vec<FlowArgs>,
sigs: &mut Vec<FlowSignature>,
ty_chk: &TypeCheckInfo,
principal: bool,
) {
let sigs_checkpoint = sigs.len();
check_signatures(f, sigs, ty_chk, principal);
if sigs.len() == sigs_checkpoint {
return;
}
for sig in &mut sigs[sigs_checkpoint..] {
// consume the positional arguments
sig.pos = if sig.pos.len() > args.len() {
sig.pos.split_off(args.len())
} else {
Vec::new()
};
}
}
fn check_signatures(
ty: &FlowType,
res: &mut Vec<FlowSignature>,
ty_chk: &TypeCheckInfo,
principal: bool,
) {
match ty {
FlowType::Func(s) => res.push(*s.clone()),
FlowType::With(w) => {
instantiate_signature(&w.0, w.1.clone(), res, ty_chk, principal);
}
FlowType::Union(u) => {
for ty in u.iter() {
check_signatures(ty, res, ty_chk, principal);
}
}
FlowType::Var(u) => {
let var = ty_chk.vars.get(&u.0);
if let Some(var) = var {
let FlowVarKind::Weak(w) = &var.kind;
let w = w.read();
for lb in &w.ubs {
check_signatures(lb, res, ty_chk, principal);
}
if !principal {
for ub in &w.lbs {
check_signatures(ub, res, ty_chk, principal);
}
}
}
}
_ => {}
}
}

View file

@ -0,0 +1,98 @@
//! Infer more than the principal type of some expression.
use typst::syntax::{
ast::{self, AstNode},
LinkedNode, SyntaxKind,
};
use crate::{syntax::CheckTarget, AnalysisContext};
use super::{FlowType, FlowVarKind, TypeCheckInfo};
// todo: detect recursive usage
pub(crate) fn literal_type_check(
_ctx: &mut AnalysisContext,
info: &TypeCheckInfo,
node: CheckTarget<'_>,
) -> Option<FlowType> {
let node = node.node()?;
let mut worker = LiteralTypeCheckWorker { _ctx, info };
worker.check(node)
}
struct LiteralTypeCheckWorker<'a, 'w> {
_ctx: &'a mut AnalysisContext<'w>,
info: &'a TypeCheckInfo,
}
impl<'a, 'w> LiteralTypeCheckWorker<'a, 'w> {
fn check(&mut self, node: LinkedNode) -> Option<FlowType> {
let parent = node.parent()?;
match parent.kind() {
SyntaxKind::LetBinding => {
let p = parent.cast::<ast::LetBinding>()?;
let exp = p.init()?;
if exp.span() == node.span() {
match p.kind() {
ast::LetBindingKind::Closure(_c) => {
return None;
}
ast::LetBindingKind::Normal(pattern) => {
return self.destruct_let(pattern, node.clone())
}
}
}
}
SyntaxKind::Named => {
let p = parent.cast::<ast::Named>()?;
let exp = p.expr();
if exp.span() == node.span() {
let ty = self.info.mapping.get(&p.span())?;
return self.ubs(ty);
}
}
_ => return None,
}
None
}
fn destruct_let(&self, pattern: ast::Pattern<'_>, node: LinkedNode<'_>) -> Option<FlowType> {
match pattern {
ast::Pattern::Placeholder(_) => None,
ast::Pattern::Normal(n) => {
let ast::Expr::Ident(ident) = n else {
return None;
};
let ty = self.info.mapping.get(&ident.span())?;
self.ubs(ty)
}
ast::Pattern::Parenthesized(p) => {
self.destruct_let(p.expr().to_untyped().cast()?, node)
}
// todo: pattern matching
ast::Pattern::Destructuring(_d) => {
let _ = node;
None
}
}
}
fn ubs(&self, ty: &FlowType) -> Option<FlowType> {
match ty {
FlowType::Let(ty) => Some(FlowType::from_types(ty.ubs.iter().cloned())),
FlowType::Var(ty) => {
let v = self.info.vars.get(&ty.0)?;
match &v.kind {
FlowVarKind::Weak(w) => {
let r = w.read();
Some(FlowType::from_types(r.ubs.iter().cloned()))
}
}
}
_ => Some(ty.clone()),
}
}
}

View file

@ -84,9 +84,14 @@ impl StatefulRequest for CompletionRequest {
if let Some(d) = &deref_target {
let node = d.node();
// skip if is the let binding item, todo, check whether the pattern is exact
// todo: check if the pattern(span) is exact, instead of just checking the
// parent kind
if matches!(
node.parent_kind(),
(d, node.parent_kind()),
(
DerefTarget::VarAccess(..),
Some(SyntaxKind::LetBinding | SyntaxKind::Closure)
)
) {
return None;
}

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"\\\"Test\\\"\" (30)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/text_font.typ
---
(TextFont | Array<TextFont>)

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"(\\\"Test\\\",)\" (30)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/text_font2.typ
---
(TextFont | Array<TextFont>)

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"(\\\"Test\\\",)\" (33)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/text_font3.typ
---
(TextFont | Array<TextFont>)

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"(\\\"Test\\\",)\" (31)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/text_font4.typ
---
(TextFont | Array<TextFont>)

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"(\\\"Test\\\",)\" (47)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/text_font5.typ
---
(TextFont | Array<TextFont>)

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
description: "Check on \"(\\\"Test\\\",)\" (105)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/literal_type_check/user_func.typ
---
<nil>

View file

@ -0,0 +1,2 @@
#let x = /* position after */ "Test"
#text(font: x)[]

View file

@ -0,0 +1,2 @@
#let y = /* position after */ ("Test",)
#text(font: y)[]

View file

@ -0,0 +1 @@
#text(font: /* position after */ ("Test",))[]

View file

@ -0,0 +1,2 @@
#let fa = /* position after */ ("Test",)
#show raw: set text(font: fa)

View file

@ -0,0 +1 @@
#show raw: set text(font: /* position after */ ("Test",))

View file

@ -0,0 +1,7 @@
#let tmpl(content, font: none) = {
set text(font: font)
content
}
#tmpl(font: /* position after */ ("Test",))[]

View file

@ -0,0 +1,2 @@
#let font = "Times New Roman";
#set text(font: font)

View file

@ -0,0 +1 @@
#let tmpl(content) = content

View file

@ -0,0 +1,13 @@
#let tmpl(content, authors: (), font: none, class: "article") = {
if class != "article" and class != "letter" {
panic("")
}
set document(author: authors)
set text(font: font)
set page(paper: "a4") if class == "article"
set page(paper: "us-letter") if class == "letter"
content
}

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/type_check/set_font.typ
---
"font" = "Times New Roman"
---
5..9 -> @font
41..51 -> (TextFont | Array<TextFont>)
47..51 -> (TextFont | Array<TextFont>)

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/type_check/sig_template.typ
---
"content" = Any
"tmpl" = (Any) -> Any
---
5..9 -> @tmpl
10..17 -> @content

File diff suppressed because one or more lines are too long

View file

@ -81,7 +81,7 @@ pub fn find_document_before(src: &Source, cursor: usize) -> Option<String> {
let root = LinkedNode::new(src.root());
let leaf = root.leaf_at(cursor)?;
let def_target = get_def_target(leaf.clone())?;
log::info!("found docs target: {:?}", def_target.node().kind());
log::debug!("found docs target: {:?}", def_target.node().kind());
// todo: import
let target = def_target.node().clone();
let mut node = target.clone();

View file

@ -1,4 +1,4 @@
use log::debug;
use ecow::EcoVec;
use typst::{
foundations::{Func, ParamInfo},
syntax::{
@ -65,7 +65,7 @@ pub enum DerefTarget<'a> {
}
impl<'a> DerefTarget<'a> {
pub fn node(&self) -> &LinkedNode {
pub fn node(&self) -> &LinkedNode<'a> {
match self {
DerefTarget::Label(node) => node,
DerefTarget::Ref(node) => node,
@ -78,7 +78,7 @@ impl<'a> DerefTarget<'a> {
}
}
pub fn get_deref_target(node: LinkedNode, cursor: usize) -> Option<DerefTarget> {
pub fn get_deref_target(node: LinkedNode, cursor: usize) -> Option<DerefTarget<'_>> {
/// Skips trivia nodes that are on the same line as the cursor.
fn skippable_trivia(node: &LinkedNode, cursor: usize) -> bool {
// A non-trivia node is our target so we stop at it.
@ -101,7 +101,14 @@ pub fn get_deref_target(node: LinkedNode, cursor: usize) -> Option<DerefTarget>
// Move to the first non-trivia node before the cursor.
let mut node = node;
if skippable_trivia(&node, cursor) || is_mark(node.kind()) {
if skippable_trivia(&node, cursor) || {
is_mark(node.kind())
&& (!matches!(node.kind(), SyntaxKind::LeftParen)
|| !matches!(
node.parent_kind(),
Some(SyntaxKind::Array | SyntaxKind::Dict | SyntaxKind::Parenthesized)
))
} {
node = node.prev_sibling()?;
}
@ -110,11 +117,11 @@ pub fn get_deref_target(node: LinkedNode, cursor: usize) -> Option<DerefTarget>
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?.clone();
}
debug!("deref expr: {ancestor:?}");
log::debug!("deref expr: {ancestor:?}");
// Unwrap all parentheses to get the actual expression.
let cano_expr = deref_lvalue(ancestor)?;
debug!("deref lvalue: {cano_expr:?}");
log::debug!("deref lvalue: {cano_expr:?}");
// Identify convenient expression kinds.
let expr = cano_expr.cast::<ast::Expr>()?;
@ -160,7 +167,16 @@ impl<'a> DefTarget<'a> {
}
}
// todo: whether we should distinguish between strict and non-strict def targets
pub fn get_non_strict_def_target(node: LinkedNode) -> Option<DefTarget<'_>> {
get_def_target_(node, false)
}
pub fn get_def_target(node: LinkedNode) -> Option<DefTarget<'_>> {
get_def_target_(node, true)
}
fn get_def_target_(node: LinkedNode, strict: bool) -> Option<DefTarget<'_>> {
let mut ancestor = node;
if ancestor.kind().is_trivia() || is_mark(ancestor.kind()) {
ancestor = ancestor.prev_sibling()?;
@ -169,12 +185,12 @@ pub fn get_def_target(node: LinkedNode) -> Option<DefTarget<'_>> {
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?.clone();
}
debug!("def expr: {ancestor:?}");
log::debug!("def expr: {ancestor:?}");
let ancestor = deref_lvalue(ancestor)?;
debug!("def lvalue: {ancestor:?}");
log::debug!("def lvalue: {ancestor:?}");
let may_ident = ancestor.cast::<ast::Expr>()?;
if !may_ident.hash() && !matches!(may_ident, ast::Expr::MathIdent(_)) {
if strict && !may_ident.hash() && !matches!(may_ident, ast::Expr::MathIdent(_)) {
return None;
}
@ -206,13 +222,113 @@ pub fn get_def_target(node: LinkedNode) -> Option<DefTarget<'_>> {
DefTarget::Import(parent.clone())
}
_ if may_ident.hash() => return None,
_ => {
debug!("unsupported kind {kind:?}", kind = ancestor.kind());
log::debug!("unsupported kind {kind:?}", kind = ancestor.kind());
return None;
}
})
}
#[derive(Debug, Clone)]
pub enum ParamTarget<'a> {
Positional {
spreads: EcoVec<LinkedNode<'a>>,
positional: usize,
is_spread: bool,
},
Named(LinkedNode<'a>),
}
#[derive(Debug, Clone)]
pub enum CheckTarget<'a> {
Param {
target: ParamTarget<'a>,
is_set: bool,
},
Normal(LinkedNode<'a>),
}
impl<'a> CheckTarget<'a> {
pub fn node(&self) -> Option<LinkedNode<'a>> {
Some(match self {
CheckTarget::Param { target, .. } => match target {
ParamTarget::Positional { .. } => return None,
ParamTarget::Named(node) => node.clone(),
},
CheckTarget::Normal(node) => node.clone(),
})
}
}
pub fn get_check_target(node: LinkedNode) -> Option<CheckTarget<'_>> {
let mut node = node;
while node.kind().is_trivia() {
node = node.prev_sibling()?;
}
let deref_target = get_deref_target(node.clone(), node.offset())?;
match deref_target {
DerefTarget::Callee(callee) => {
let parent = callee.parent()?;
let args = match parent.cast::<ast::Expr>() {
Some(ast::Expr::FuncCall(call)) => call.args(),
Some(ast::Expr::Set(set)) => set.args(),
_ => return None,
};
let args_node = node.find(args.span())?;
let param_target = get_param_target(args_node, node)?;
Some(CheckTarget::Param {
target: param_target,
is_set: parent.kind() == SyntaxKind::Set,
})
}
deref_target => Some(CheckTarget::Normal(deref_target.node().clone())),
}
}
fn get_param_target<'a>(
args_node: LinkedNode<'a>,
node: LinkedNode<'a>,
) -> Option<ParamTarget<'a>> {
match node.kind() {
SyntaxKind::Colon => {
let prev = node.prev_leaf()?;
let param_ident = prev.cast::<ast::Ident>()?;
Some(ParamTarget::Named(args_node.find(param_ident.span())?))
}
SyntaxKind::Spread | SyntaxKind::Comma | SyntaxKind::LeftParen => {
let mut spreads = EcoVec::new();
let mut positional = 0;
let is_spread = node.kind() == SyntaxKind::Spread;
let args_before = args_node
.children()
.take_while(|arg| arg.range().end <= node.offset());
for ch in args_before {
match ch.cast::<ast::Arg>() {
Some(ast::Arg::Pos(..)) => {
positional += 1;
}
Some(ast::Arg::Spread(..)) => {
spreads.push(ch);
}
Some(ast::Arg::Named(..)) | None => {}
}
}
Some(ParamTarget::Positional {
spreads,
positional,
is_spread,
})
}
_ => None,
}
}
pub fn param_index_at_leaf(leaf: &LinkedNode, function: &Func, args: ast::Args) -> Option<usize> {
let deciding = deciding_syntax(leaf);
let params = function.params()?;

View file

@ -719,10 +719,10 @@ fn complete_params(ctx: &mut CompletionContext) -> bool {
ctx.from = ctx.cursor.min(next.offset());
}
let parent = deciding.parent().unwrap();
log::info!("named param parent: {:?}", parent);
log::debug!("named param parent: {:?}", parent);
// get type of this param
let ty = ctx.ctx.type_of(param.to_untyped());
log::info!("named param type: {:?}", ty);
log::debug!("named param type: {:?}", ty);
named_param_value_completions(ctx, callee, &param, ty.as_ref());
return true;
@ -1280,8 +1280,6 @@ impl<'a, 'w> CompletionContext<'a, 'w> {
}
fn safe_str_slice(s: &str, mut start: usize, mut end: usize) -> &str {
// todo: bad slicing
// &self.before[self.cursor.saturating_sub(size)..]
while start < s.len() && !s.is_char_boundary(start) {
start += 1;
}

View file

@ -16,7 +16,7 @@ use crate::analysis::{
PathPreference, FLOW_INSET_DICT, FLOW_MARGIN_DICT, FLOW_OUTSET_DICT, FLOW_RADIUS_DICT,
FLOW_STROKE_DICT,
};
use crate::syntax::param_index_at_leaf;
use crate::syntax::{get_non_strict_def_target, param_index_at_leaf, DefTarget};
use crate::upstream::plain_docs_sentence;
use crate::{prelude::*, typst_to_lsp::completion_kind, LspCompletion};
@ -205,6 +205,26 @@ pub fn param_completions<'a>(
let signature = analyze_dyn_signature(ctx.ctx, func.clone());
let def = func.span();
let type_sig = def.id().and_then(|id| {
let source = ctx.ctx.source_by_id(id).ok()?;
let def = get_non_strict_def_target(source.find(def)?)?;
let DefTarget::Let(l) = def else {
return None;
};
let lb = l.cast::<ast::LetBinding>()?;
let ast::LetBindingKind::Closure(c) = lb.kind() else {
return None;
};
let fn_ty = ctx.ctx.type_of_span(c.span());
let info = ctx.ctx.type_check(source)?;
log::info!("function sig by type checking: {:?}", fn_ty);
fn_ty.and_then(|ty| ty.signatures(&info, false))
});
// Exclude named arguments which are already present.
let exclude: Vec<_> = args
.items()
@ -222,22 +242,29 @@ pub fn param_completions<'a>(
let pos = primary_sig.pos.get(pos_index);
log::debug!("pos_param_completion_to: {:?}", pos);
let mut doc = None;
if let Some(pos) = pos {
if set && !pos.settable {
return;
}
// Some(&plain_docs_sentence(&pos.docs))
doc = Some(plain_docs_sentence(&pos.docs));
if pos.positional
&& type_completion(
ctx,
pos.infer_type.as_ref(),
Some(&plain_docs_sentence(&pos.docs)),
)
.is_none()
&& type_completion(ctx, pos.infer_type.as_ref(), doc.as_deref()).is_none()
{
ctx.cast_completions(&pos.input);
}
}
for sig in type_sig.iter().flatten() {
if let Some(pos) = sig.pos.get(pos_index) {
log::info!("pos_param_completion by type: {:?}", pos);
type_completion(ctx, Some(pos), doc.as_deref());
}
}
}
for (name, param) in &primary_sig.named {
@ -339,6 +366,10 @@ fn type_completion(
FlowType::Auto => {
ctx.snippet_completion("auto", "auto", "A smart default.");
}
FlowType::Boolean(_b) => {
ctx.snippet_completion("false", "false", "No / Disabled.");
ctx.snippet_completion("true", "true", "Yes / Enabled.");
}
FlowType::Builtin(v) => match v {
FlowBuiltinType::Path(p) => {
let source = ctx.ctx.source_by_id(ctx.root.span().id()?).ok()?;
@ -473,12 +504,24 @@ fn type_completion(
type_completion(ctx, Some(info), docs);
}
}
FlowType::Let(_) => return None,
FlowType::Let(e) => {
for ut in e.ubs.iter() {
type_completion(ctx, Some(ut), docs);
}
for lt in e.lbs.iter() {
type_completion(ctx, Some(lt), docs);
}
}
FlowType::Var(_) => return None,
FlowType::Unary(_) => return None,
FlowType::Binary(_) => return None,
FlowType::If(_) => return None,
FlowType::Value(v) => {
// Prevent duplicate completions from appearing.
if !ctx.seen_casts.insert(typst::util::hash128(&v.0)) {
return Some(());
}
if let Value::Type(ty) = &v.0 {
if *ty == Type::of::<NoneValue>() {
type_completion(ctx, Some(&FlowType::None), docs);
@ -555,6 +598,26 @@ pub fn named_param_value_completions<'a>(
return;
};
let def = func.span();
let type_sig = def.id().and_then(|id| {
let source = ctx.ctx.source_by_id(id).ok()?;
let def = get_non_strict_def_target(source.find(def)?)?;
let DefTarget::Let(l) = def else {
return None;
};
let lb = l.cast::<ast::LetBinding>()?;
let ast::LetBindingKind::Closure(c) = lb.kind() else {
return None;
};
let fn_ty = ctx.ctx.type_of_span(c.span());
let info = ctx.ctx.type_check(source)?;
log::info!("function sig by type checking: {:?}", fn_ty);
fn_ty.and_then(|ty| ty.signatures(&info, false))
});
use typst::foundations::func::Repr;
let mut func = func;
while let Repr::With(f) = func.inner() {
@ -574,21 +637,35 @@ pub fn named_param_value_completions<'a>(
return;
}
let doc = Some(plain_docs_sentence(&param.docs));
// static analysis
if let Some(ty) = ty {
type_completion(ctx, Some(ty), Some(&plain_docs_sentence(&param.docs)));
type_completion(ctx, Some(ty), doc.as_deref());
}
let mut completed = false;
for sig in type_sig.iter().flatten() {
let named = sig.named.iter().find(|(n, _)| n.as_str() == name);
if let Some((_, param)) = named {
log::info!("named_param_completion by type: {:?}", param);
type_completion(ctx, Some(param), doc.as_deref());
completed = true;
}
}
if !completed {
if let Some(expr) = &param.expr {
ctx.completions.push(Completion {
kind: CompletionKind::Constant,
label: expr.clone(),
apply: None,
detail: Some(plain_docs_sentence(&param.docs)),
detail: doc.map(Into::into),
label_detail: None,
command: None,
});
}
}
if type_completion(
ctx,
@ -613,7 +690,7 @@ pub fn complete_literal(ctx: &mut CompletionContext) -> Option<()> {
} else {
parent
};
log::debug!("check complete_literal 2: {:?}", ctx.leaf);
log::debug!("check complete_literal 2: {:?}", parent);
let parent = &parent;
let parent = match parent.kind() {
SyntaxKind::Colon => parent.parent()?,
@ -624,7 +701,7 @@ pub fn complete_literal(ctx: &mut CompletionContext) -> Option<()> {
SyntaxKind::LeftParen | SyntaxKind::Comma => (None, parent.parent()?),
_ => (None, parent),
};
log::debug!("check complete_literal 3: {:?}", ctx.leaf);
log::debug!("check complete_literal 3: {:?}", parent);
// or empty array
let lit_span;
@ -645,8 +722,8 @@ pub fn complete_literal(ctx: &mut CompletionContext) -> Option<()> {
// query type of the dict
let named_span = named.map(|n| n.span()).unwrap_or_else(Span::detached);
let named_ty = ctx.ctx.type_of_span(named_span);
let lit_ty = ctx.ctx.type_of_span(lit_span);
let named_ty = ctx.ctx.literal_type_of_span(named_span);
let lit_ty = ctx.ctx.literal_type_of_span(lit_span);
log::info!("complete_literal: {lit_ty:?} {named_ty:?}");
enum LitComplAction<'a> {
@ -928,3 +1005,5 @@ mod tests {
}
}
}
// todo: doesn't complete parameter now, which is not good.

65
docs/thinking-ide.md Normal file
View file

@ -0,0 +1,65 @@
I have implemented nearly all LSP features. The implementations are incomplete but bring some insights. The requirement of analysis may unveil some shortcomings to building incremental analysis (computation) for typst-ide with existing crates, e.g. comemo.
First, to get LSP features, we have two approaches to extract information from Typst's source code:
- dynamic analysis, that triggers `analyze_{expr,import,...}`, which is heavily used by current typst-ide.
- static analysis, which iterates `SyntaxNode/`LinkedNode/Content/Frame`, which is also used by the current typst-ide but no cache is taken.
I list some typical ways to get LSP features. "Go to function definition" can be implemented partially with dynamic analysis, but "Go to variable definition" looks impossible, so we need to have static analysis eventually. "Go to references" initially acquires both dynamic and static analysis. "Inlay hint" can be easily implemented with dynamic analysis, but it has really poor performance since a single inlay hint request triggers many compilations.
The dynamic analysis part in Typst is attractive in comparison with other programming languages from my view. We can usually get nice and wonderful analysis results (autocompletion) by tracking some span to get corresponding values dynamically, but it is not perfect.
- Lossy information: For a non-function value, we cannot track its definition (assignment) site currently.
- Performance overhead: A single compilation is proven to be fast, but to get the best analysis result we may trigger multiple times of compilation and get exhausted memory and long time (observed in implemented inlay hint and goto references feature).
My solution is to introduce more static analysis and cache them heavily. But I find it problematic to build one with comemo. There are limitations observed for implementing performant static analysis.
- Bad performance is caused by hashing, especially on recursive structures. comemo compares input and constraints by hash, which is inefficient enough. I encounter the following cases frequently:
```rs
#[comemo::memorize]
fn analyze(input: SomeRecursiveStructure) -> Output {}
```
It is elegant, but causes poor performance frequently, as the CPUs are tired of calculating hashes on the recursive structures repeatedly. Specifically, the leaf will be hashed by `O(d)` times, where `d` is the number of wrapped `LazyHash` from the root node to the leaf node. Even though we have `LazyHash`, the overhead is not acceptable when the calculation is cheaper than hashing the input.
- Lack of revision management (comemo is global), hence causing poor memory efficiency. Now we have a keep-running compilation task along with multiple incoming IDE tasks at some random times, how do we run the global `comemo::evict` for best performance? When we trigger `comemo::evict` frequently for each IDE task, the compilation caches all go away.
- Cannot do task cancellation when analyses are nested by `comemo::memorized`:
- When some expensive analysis task is running in the background, how do we cancel if it is nested in deep `comemo::memorized`? The cancellation will cause "side effects".
- This problem also occurs with a long-time compilation for canceling.
I compared salsa (used by rust analyzer) and comemo, and found salsa solves some problems, but I'm not yet sure whether we should use salsa for caching analyses and It is also not actively maintained.
---
I think we have reached some consensus and can land a basic go-to definition feature. For the comemo part, we can just keep discussing and seek opportunities for optimization if possible.
> I think by extending Scope (storing a span) and the Tracer a bit, we could maybe solve that. Though intra-file go-to-definition is also fairly simple on the syntax tree I think, and more efficient. So, we could also go with a hybrid approach: Intra-file syntax-based and inter-file using dynamic analysis on the module and checks the span in the Scope.
This is what I'm doing. The approach may not fit in some other LSP features, but is totally fine with a go to definition feature. We can forget other features first.
> Maybe we can let the Tracer trace more things in a single compilation, so that you can run various analyses at once?
We can do it, but I'm worrying whether it will affect performance on regular compilation. Furthermore, there are some cases will extremely extend time to dynamic analysis. I can give a simple example:
```js
for i in range(5000000) {}
```
When you try to get a `typst::foundations::Func` instance for the `range` function by `analyze_expr`, it will cost 11s on my computer! IMO we should prefer static analysis whenever possible, even if the dynamic analysis is quite wonderful, as we may not like to get user reports that they randomly click some identifier for definition and the browser stucks or crashes... The performance and static analysis are usually more reliable and predictable.
> If you have intermediate LazyHash structures, every leaf should be hashed just once
For example, let's think of an extreme case: `pub enum Either { A(Box<Either>), B }`, and the analysis:
```rs
#[comemo::memorize]
fn analyze(input: Either) -> Output {}
```
We have several simple options:
+ doesn't use lazy hash inside `pub enum Either { A(Box<Either>), B }`, then the `analyze` function will have poor performance when the `Either` is big.
+ intrude a lazy hash inside `pub enum Either { A(Box<LazyHash<Either>>), B }`, then the `analyze` function will trigger for `d` times if the depth of `B` is `d`.
+ add an extra variant, and make a "HashA" when it is big enough `pub enum Either { A(Box<Either>), B, HashA(Box<LazyHash<Either>>) }`. it is possible but not operatable for me.
Overall, I feel salsa's intern-based solution is more efficient for comparing and computing many small inputs than comemo's hash-based solution.
> That's a problem indeed. If you have any proposals how we could extend comemo to make eviction smarter, I'd be interested to hear them. Maybe we can steal some stuff from salsa here, e.g. the durability concept.
> Yeah, I think that's a more fundamental problem. Maybe we could build some cancellation feature into comemo. How big is this problem in practice? Do you frequently notice a lack of cancellation or would it just be cleaner to be able to do it?
> I looked at salsa before building comemo. Maybe I just didn't "get it", but it felt a lot more complicated and it also forces you to write the compiler in a very specific way. Do you think there are some things to its approach that are fundamentally impossible in the comemo approach, rather than just not implemented?
I believe you have investigated salsa and there should be wonderful stuff to steal, for example, the durability concept, which also looks matter for optimizing the performance when the incoming events are mostly user editions. I mentioned these Cons of comemo, because I want to let you know which problems I've encountered when I try to cache static analysis with comemo. We can revisit them when they're becoming a big problem during building our LSP/IDE features.

View file

@ -385,7 +385,7 @@ fn e2e() {
});
let hash = replay_log(&tinymist_binary, &root.join("vscode"));
insta::assert_snapshot!(hash, @"siphash128_13:fe9362131962ba0be4a3e5bea9ccd8e8");
insta::assert_snapshot!(hash, @"siphash128_13:84c6ca3a1d93e6af063b86bb18a9561f");
}
}