feat: implement expression checker (#714)

* feat: implements expression checker

* dev: resolve information

* dev: delete def_use

* stage

* stage

* stage

* stage concurrent

* stage concurrent

* dev: better concurrency

* dev: final constant evaluation improvement

* dev: change reference site

* dev: handle comments

* dev: remove indirect import structure

* dev: adjust linked_def impl

* dev: finalize goto definition impl

* dev: replace all old import and def_use analyses with expr analysis

* dev: update expr_of snapshots

* dev: split def/expr, refactor definition

* dev: more consistent definition solver

* dev: rename definition crate

* dev: references work again

* dev: resolve root decl

* dev: resolve root decl

* dev: resolve global definitions

* dev: resolve tokens with world

* feat: render semantic tokens with expression information

* dev: loop detection

* dev: recover type checking

* dev: recover more type checking

* dev: refactor analysis context

* fix: process case of spread left

* dev: label inference

* dev: recover more signature checking

* dev: recover more ident reference checking

* dev: pass all tests

* Revert "dev: dirty changes"

This reverts commit 9ae2dacd0c96851e088feea76c61c184a1cf9722.

* test: update snapshot

* fix: bad cached signatures

* fix: slash problem
This commit is contained in:
Myriad-Dreamin 2024-10-25 23:52:11 +08:00 committed by GitHub
parent 136b162360
commit 81ebc8a635
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
173 changed files with 5529 additions and 4370 deletions

View file

@ -46,3 +46,53 @@ pub fn bind_ty_ctx(input: TokenStream) -> TokenStream {
// Hand the output tokens back to the compiler
TokenStream::from(expanded)
}
#[proc_macro_derive(DeclEnum)]
pub fn gen_decl_enum(input: TokenStream) -> TokenStream {
// In form of
// ```
// pub enum Decl {
// Sub1(X),
// Sub2(Y),
// }
// ```
// Parse the input tokens into a list of variants
let input = parse_macro_input!(input as DeriveInput);
let variants = match input.data {
syn::Data::Enum(data) => data.variants,
_ => panic!("only enums are supported"),
};
let names = variants.iter().map(|v| &v.ident).collect::<Vec<_>>();
let input_name = &input.ident;
let expanded = quote! {
impl #input_name {
pub fn name(&self) -> &Interned<str> {
match self {
#(Self::#names(x) => x.name()),*
}
}
pub fn span(&self) -> Span {
match self {
#(Self::#names(x) => x.span()),*
}
}
}
impl fmt::Debug for Decl {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
#(Self::#names(x) => write!(f, concat!(stringify!(#names), "({:?})"), x)),*
}
}
}
};
TokenStream::from(expanded)
}

View file

@ -39,6 +39,7 @@ indexmap.workspace = true
ecow.workspace = true
siphasher.workspace = true
chrono.workspace = true
rpds.workspace = true
typst.workspace = true

View file

@ -102,6 +102,12 @@ impl Default for Interned<str> {
}
}
impl Interned<str> {
pub fn empty() -> &'static Self {
&EMPTY
}
}
impl From<&str> for Interned<str> {
fn from(s: &str) -> Self {
Interned::new_str(s)

View file

@ -8,12 +8,8 @@ pub mod color_exprs;
pub use color_exprs::*;
pub mod link_exprs;
pub use link_exprs::*;
pub mod def_use;
pub use def_use::*;
pub mod import;
pub use import::*;
pub mod linked_def;
pub use linked_def::*;
pub mod definition;
pub use definition::*;
pub mod signature;
pub use signature::*;
mod post_tyck;
@ -28,6 +24,215 @@ mod prelude;
mod global;
pub use global::*;
use typst::foundations::{Func, Value};
pub(crate) trait ToFunc {
fn to_func(&self) -> Option<Func>;
}
impl ToFunc for Value {
fn to_func(&self) -> Option<Func> {
match self {
Value::Func(f) => Some(f.clone()),
Value::Type(t) => t.constructor().ok(),
_ => None,
}
}
}
#[cfg(test)]
mod matcher_tests {
use typst::syntax::LinkedNode;
use typst_shim::syntax::LinkedNodeExt;
use crate::{syntax::get_def_target, tests::*};
#[test]
fn test() {
snapshot_testing("match_def", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let pos = ctx
.to_typst_pos(find_test_position(&source), &source)
.unwrap();
let root = LinkedNode::new(source.root());
let node = root.leaf_at_compat(pos).unwrap();
let result = get_def_target(node).map(|e| format!("{:?}", e.node().range()));
let result = result.as_deref().unwrap_or("<nil>");
assert_snapshot!(result);
});
}
}
#[cfg(test)]
mod expr_tests {
use reflexo::path::unix_slash;
use typst::syntax::Source;
use crate::syntax::{Expr, RefExpr};
use crate::tests::*;
trait ShowExpr {
fn show_expr(&self, expr: &Expr) -> String;
}
impl ShowExpr for Source {
fn show_expr(&self, node: &Expr) -> String {
match node {
Expr::Decl(decl) => {
let range = self.range(decl.span()).unwrap_or_default();
let fid = if let Some(fid) = decl.file_id() {
let vpath = fid.vpath().as_rooted_path();
match fid.package() {
Some(package) => format!(" in {package:?}{}", unix_slash(vpath)),
None => format!(" in {}", unix_slash(vpath)),
}
} else {
"".to_string()
};
format!("{decl:?}@{range:?}{fid}")
}
_ => format!("{node}"),
}
}
}
#[test]
fn docs() {
snapshot_testing("docs", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = ctx.shared_().expr_stage(&source);
let mut docstrings = result.docstrings.iter().collect::<Vec<_>>();
docstrings.sort_by(|x, y| x.0.weak_cmp(y.0));
let mut docstrings = docstrings
.into_iter()
.map(|(ident, expr)| {
format!(
"{} -> {expr:?}",
source.show_expr(&Expr::Decl(ident.clone())),
)
})
.collect::<Vec<_>>();
let mut snap = vec![];
snap.push("= docstings".to_owned());
snap.append(&mut docstrings);
assert_snapshot!(snap.join("\n"));
});
}
#[test]
fn scope() {
snapshot_testing("expr_of", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = ctx.shared_().expr_stage(&source);
let mut resolves = result.resolves.iter().collect::<Vec<_>>();
resolves.sort_by(|x, y| x.1.decl.weak_cmp(&y.1.decl));
let mut resolves = resolves
.into_iter()
.map(|(_, expr)| {
let RefExpr {
decl: ident,
step,
root,
val,
} = expr.as_ref();
format!(
"{} -> {}, root {}, val: {val:?}",
source.show_expr(&Expr::Decl(ident.clone())),
step.as_ref()
.map(|e| source.show_expr(e))
.unwrap_or_default(),
root.as_ref()
.map(|e| source.show_expr(e))
.unwrap_or_default()
)
})
.collect::<Vec<_>>();
let mut exports = result.exports.iter().collect::<Vec<_>>();
exports.sort_by(|x, y| x.0.cmp(y.0));
let mut exports = exports
.into_iter()
.map(|(ident, node)| {
let node = source.show_expr(node);
format!("{ident} -> {node}",)
})
.collect::<Vec<_>>();
let mut snap = vec![];
snap.push("= resolves".to_owned());
snap.append(&mut resolves);
snap.push("= exports".to_owned());
snap.append(&mut exports);
assert_snapshot!(snap.join("\n"));
});
}
}
#[cfg(test)]
mod module_tests {
use reflexo::path::unix_slash;
use serde_json::json;
use crate::prelude::*;
use crate::syntax::module::*;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("modules", &|ctx, _| {
fn ids(ids: EcoVec<TypstFileId>) -> Vec<String> {
let mut ids: Vec<String> = ids
.into_iter()
.map(|id| unix_slash(id.vpath().as_rooted_path()))
.collect();
ids.sort();
ids
}
let dependencies = construct_module_dependencies(&mut ctx.local);
let mut dependencies = dependencies
.into_iter()
.map(|(id, v)| {
(
unix_slash(id.vpath().as_rooted_path()),
ids(v.dependencies),
ids(v.dependents),
)
})
.collect::<Vec<_>>();
dependencies.sort();
// remove /main.typ
dependencies.retain(|(p, _, _)| p != "/main.typ");
let dependencies = dependencies
.into_iter()
.map(|(id, deps, dependents)| {
let mut mp = serde_json::Map::new();
mp.insert("id".to_string(), json!(id));
mp.insert("dependencies".to_string(), json!(deps));
mp.insert("dependents".to_string(), json!(dependents));
json!(mp)
})
.collect::<Vec<_>>();
assert_snapshot!(JsonRepr::new_pure(dependencies));
});
}
}
#[cfg(test)]
mod type_check_tests {
@ -35,7 +240,6 @@ mod type_check_tests {
use typst::syntax::Source;
use crate::analysis::*;
use crate::tests::*;
use super::{Ty, TypeScheme};
@ -45,7 +249,7 @@ mod type_check_tests {
snapshot_testing("type_check", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = type_check(ctx, source.clone());
let result = ctx.type_check(&source);
let result = result
.as_deref()
.map(|e| format!("{:#?}", TypeCheckSnapshot(&source, e)));
@ -118,8 +322,8 @@ mod post_type_check_tests {
let node = root.leaf_at_compat(pos + 1).unwrap();
let text = node.get().clone().into_text();
let result = type_check(ctx, source.clone());
let literal_type = result.and_then(|info| post_type_check(ctx, &info, node));
let result = ctx.type_check(&source);
let literal_type = result.and_then(|info| post_type_check(ctx.shared_(), &info, node));
with_settings!({
description => format!("Check on {text:?} ({pos:?})"),
@ -154,8 +358,8 @@ mod type_describe_tests {
let node = root.leaf_at_compat(pos + 1).unwrap();
let text = node.get().clone().into_text();
let result = type_check(ctx, source.clone());
let literal_type = result.and_then(|info| post_type_check(ctx, &info, node));
let result = ctx.type_check(&source);
let literal_type = result.and_then(|info| post_type_check(ctx.shared_(), &info, node));
with_settings!({
description => format!("Check on {text:?} ({pos:?})"),
@ -168,220 +372,6 @@ mod type_describe_tests {
}
}
#[cfg(test)]
mod module_tests {
use reflexo::path::unix_slash;
use serde_json::json;
use crate::prelude::*;
use crate::syntax::module::*;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("modules", &|ctx, _| {
fn ids(ids: EcoVec<TypstFileId>) -> Vec<String> {
let mut ids: Vec<String> = ids
.into_iter()
.map(|id| unix_slash(id.vpath().as_rooted_path()))
.collect();
ids.sort();
ids
}
let dependencies = construct_module_dependencies(ctx);
let mut dependencies = dependencies
.into_iter()
.map(|(id, v)| {
(
unix_slash(id.vpath().as_rooted_path()),
ids(v.dependencies),
ids(v.dependents),
)
})
.collect::<Vec<_>>();
dependencies.sort();
// remove /main.typ
dependencies.retain(|(p, _, _)| p != "/main.typ");
let dependencies = dependencies
.into_iter()
.map(|(id, deps, dependents)| {
let mut mp = serde_json::Map::new();
mp.insert("id".to_string(), json!(id));
mp.insert("dependencies".to_string(), json!(deps));
mp.insert("dependents".to_string(), json!(dependents));
json!(mp)
})
.collect::<Vec<_>>();
assert_snapshot!(JsonRepr::new_pure(dependencies));
});
}
}
#[cfg(test)]
mod matcher_tests {
use typst::syntax::LinkedNode;
use typst_shim::syntax::LinkedNodeExt;
use crate::{syntax::get_def_target, tests::*};
#[test]
fn test() {
snapshot_testing("match_def", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let pos = ctx
.to_typst_pos(find_test_position(&source), &source)
.unwrap();
let root = LinkedNode::new(source.root());
let node = root.leaf_at_compat(pos).unwrap();
let result = get_def_target(node).map(|e| format!("{:?}", e.node().range()));
let result = result.as_deref().unwrap_or("<nil>");
assert_snapshot!(result);
});
}
}
#[cfg(test)]
mod document_tests {
use crate::syntax::find_docs_before;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("docs", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let pos = ctx
.to_typst_pos(find_test_position(&source), &source)
.unwrap();
let result = find_docs_before(&source, pos);
let result = result.as_deref().unwrap_or("<nil>");
assert_snapshot!(result);
});
}
}
#[cfg(test)]
mod lexical_hierarchy_tests {
use std::collections::HashMap;
use def_use::DefUseInfo;
use lexical_hierarchy::LexicalKind;
use reflexo::path::unix_slash;
use reflexo::vector::ir::DefId;
use crate::analysis::def_use;
// use crate::prelude::*;
use crate::syntax::{lexical_hierarchy, IdentDef, IdentRef};
use crate::tests::*;
/// A snapshot of the def-use information for testing.
pub struct DefUseSnapshot<'a>(pub &'a DefUseInfo);
impl<'a> Serialize for DefUseSnapshot<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeMap;
// HashMap<IdentRef, DefId>
let mut references: HashMap<DefId, Vec<IdentRef>> = {
let mut map = HashMap::new();
for (k, v) in &self.0.ident_refs {
map.entry(*v).or_insert_with(Vec::new).push(k.clone());
}
map
};
// sort
for (_, v) in references.iter_mut() {
v.sort();
}
#[derive(Serialize)]
struct DefUseEntry<'a> {
def: &'a IdentDef,
refs: &'a Vec<IdentRef>,
}
let mut state = serializer.serialize_map(None)?;
for (k, (ident_ref, ident_def)) in self.0.ident_defs.as_slice().iter().enumerate() {
let id = DefId(k as u64);
let empty_ref = Vec::new();
let entry = DefUseEntry {
def: ident_def,
refs: references.get(&id).unwrap_or(&empty_ref),
};
state.serialize_entry(
&format!(
"{}@{}",
ident_ref.1,
unix_slash(ident_ref.0.vpath().as_rootless_path())
),
&entry,
)?;
}
if !self.0.undefined_refs.is_empty() {
let mut undefined_refs = self.0.undefined_refs.clone();
undefined_refs.sort();
let entry = DefUseEntry {
def: &IdentDef {
name: "<nil>".into(),
kind: LexicalKind::Block,
range: 0..0,
},
refs: &undefined_refs,
};
state.serialize_entry("<nil>", &entry)?;
}
state.end()
}
}
#[test]
fn scope() {
snapshot_testing("lexical_hierarchy", &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = lexical_hierarchy::get_lexical_hierarchy(
source,
lexical_hierarchy::LexicalScopeKind::DefUse,
);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}
#[test]
fn test_def_use() {
fn def_use(set: &str) {
snapshot_testing(set, &|ctx, path| {
let source = ctx.source_by_path(&path).unwrap();
let result = ctx.def_use(source);
let result = result.as_deref().map(DefUseSnapshot);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}
def_use("lexical_hierarchy");
def_use("def_use");
}
}
#[cfg(test)]
mod signature_tests {
@ -410,8 +400,8 @@ mod signature_tests {
let callee_node = callee_node.node();
let result = analyze_signature(
ctx,
SignatureTarget::Syntax(source.clone(), callee_node.clone()),
ctx.shared(),
SignatureTarget::Syntax(source.clone(), callee_node.span()),
);
assert_snapshot!(SignatureSnapshot(result.as_ref()));

View file

@ -69,7 +69,10 @@ pub fn analyze_call_no_cache(
callee_node: LinkedNode,
args: ast::Args<'_>,
) -> Option<CallInfo> {
let signature = analyze_signature(ctx, SignatureTarget::SyntaxFast(source, callee_node))?;
let signature = analyze_signature(
ctx.shared(),
SignatureTarget::SyntaxFast(source, callee_node.span()),
)?;
log::trace!("got signature {signature:?}");
let mut info = CallInfo {

View file

@ -1,372 +0,0 @@
//! Static analysis for def-use relations.
use reflexo::hash::hash128;
use super::{prelude::*, ImportInfo};
use crate::adt::snapshot_map::SnapshotMap;
/// The type namespace of def-use relations
///
/// The symbols from different namespaces are not visible to each other.
enum Ns {
/// Def-use for labels
Label,
/// Def-use for values
Value,
}
type ExternalRefMap = HashMap<(TypstFileId, Option<EcoString>), Vec<(Option<DefId>, IdentRef)>>;
/// The def-use information of a source file.
#[derive(Default)]
pub struct DefUseInfo {
/// The definitions of symbols.
pub ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
external_refs: ExternalRefMap,
/// The references to defined symbols.
pub ident_refs: HashMap<IdentRef, DefId>,
/// The references of labels.
pub label_refs: HashMap<EcoString, Vec<Range<usize>>>,
/// The references to undefined symbols.
pub undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
exports_defs: HashMap<EcoString, DefId>,
self_id: Option<TypstFileId>,
self_hash: u128,
all_hash: once_cell::sync::OnceCell<u128>,
}
impl Hash for DefUseInfo {
fn hash<H: Hasher>(&self, state: &mut H) {
self.dep_hash(self.self_id.unwrap()).hash(state);
}
}
impl DefUseInfo {
/// Get the estimated memory usage of the def-use information.
pub fn estimated_memory(&self) -> usize {
std::mem::size_of::<Self>()
+ self.ident_defs.capacity()
* (std::mem::size_of::<IdentDef>() + std::mem::size_of::<IdentRef>() + 32)
+ self.external_refs.capacity()
* (std::mem::size_of::<(TypstFileId, Option<String>)>()
+ std::mem::size_of::<Vec<(Option<DefId>, IdentRef)>>()
+ 32)
+ self.ident_refs.capacity()
* (std::mem::size_of::<IdentRef>() + std::mem::size_of::<DefId>() + 32)
+ self.label_refs.capacity() * (std::mem::size_of::<Range<usize>>() + 32)
+ self.undefined_refs.capacity() * (std::mem::size_of::<IdentRef>() + 32)
+ self.exports_refs.capacity() * (std::mem::size_of::<DefId>() + 32)
+ self.exports_defs.capacity()
* (std::mem::size_of::<String>() + std::mem::size_of::<DefId>() + 32)
}
/// Get the definition id of a symbol by its name reference.
pub fn get_ref(&self, ident: &IdentRef) -> Option<DefId> {
self.ident_refs.get(ident).copied()
}
/// Get the definition of a symbol by its unique id.
pub fn get_def_by_id(&self, id: DefId) -> Option<(TypstFileId, &IdentDef)> {
let ((fid, _), def) = self.ident_defs.get_index(id.0 as usize)?;
Some((*fid, def))
}
/// Get the definition of a symbol by its name reference.
pub fn get_def(&self, fid: TypstFileId, ident: &IdentRef) -> Option<(DefId, &IdentDef)> {
let (id, _, def) = self.ident_defs.get_full(&(fid, ident.clone()))?;
Some((DefId(id as u64), def))
}
/// Get the references of a symbol by its unique id.
pub fn get_refs(&self, id: DefId) -> impl Iterator<Item = &IdentRef> {
self.ident_refs
.iter()
.filter_map(move |(k, v)| if *v == id { Some(k) } else { None })
}
/// Get external references of a symbol by its name reference.
pub fn get_external_refs(
&self,
ext_id: TypstFileId,
ext_name: Option<EcoString>,
) -> impl Iterator<Item = &(Option<DefId>, IdentRef)> {
self.external_refs
.get(&(ext_id, ext_name))
.into_iter()
.flatten()
}
/// Check if a symbol is exported.
pub fn is_exported(&self, id: DefId) -> bool {
self.exports_refs.contains(&id)
}
/// Get the definition id of an exported symbol by its name.
pub fn dep_hash(&self, fid: TypstFileId) -> u128 {
*self.all_hash.get_or_init(|| {
use siphasher::sip128::Hasher128;
let mut hasher = reflexo::hash::FingerprintSipHasherBase::default();
self.self_hash.hash(&mut hasher);
for (dep_fid, def) in self.ident_defs.keys() {
if fid == *dep_fid {
continue;
}
fid.hash(&mut hasher);
def.hash(&mut hasher);
}
hasher.finish128().into()
})
}
}
pub(super) fn get_def_use_inner(
ctx: &mut SearchCtx,
source: Source,
e: EcoVec<LexicalHierarchy>,
import: Arc<ImportInfo>,
) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
let info = DefUseInfo {
self_hash: hash128(&source),
self_id: Some(current_id),
..Default::default()
};
let mut collector = DefUseCollector {
ctx,
info,
id_scope: SnapshotMap::default(),
label_scope: SnapshotMap::default(),
import,
current_id,
ext_src: None,
};
collector.scan(&e);
collector.calc_exports();
Some(Arc::new(collector.info))
}
struct DefUseCollector<'a, 'b, 'w> {
ctx: &'a mut SearchCtx<'b, 'w>,
info: DefUseInfo,
label_scope: SnapshotMap<EcoString, DefId>,
id_scope: SnapshotMap<EcoString, DefId>,
import: Arc<ImportInfo>,
current_id: TypstFileId,
ext_src: Option<Source>,
}
impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
fn enter<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let id_snap = self.id_scope.snapshot();
let res = f(self);
self.id_scope.rollback_to(id_snap);
res
}
fn calc_exports(&mut self) {
self.info.exports_refs = self.id_scope.values().copied().collect();
self.info.exports_defs = self
.id_scope
.entries()
.map(|(k, v)| (k.clone(), *v))
.collect();
}
fn import_name(&mut self, name: &str) -> Option<()> {
let source = self.ext_src.as_ref()?;
log::debug!("import for def use: {:?}, name: {name}", source.id());
let (_, external_info) =
Some(source.id()).zip(AnalysisContext::def_use_(self.ctx, source.clone()))?;
let ext_id = external_info.exports_defs.get(name)?;
self.import_from(&external_info, *ext_id);
Some(())
}
fn import_from(&mut self, external_info: &DefUseInfo, v: DefId) {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) = external_info.ident_defs.get_index(v.0 as usize).unwrap();
let name = ext_sym.name.clone();
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let id = DefId(id as u64);
self.id_scope.insert(name, id);
}
fn scan(&mut self, e: &'a [LexicalHierarchy]) -> Option<()> {
for e in e {
match &e.info.kind {
LexicalKind::Var(LexicalVarKind::BibKey) | LexicalKind::Heading(..) => {
unreachable!()
}
LexicalKind::Var(LexicalVarKind::Label) => {
self.insert(Ns::Label, e);
}
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_label_ref(e),
LexicalKind::Var(LexicalVarKind::Function)
| LexicalKind::Var(LexicalVarKind::Variable) => {
self.insert(Ns::Value, e);
}
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_value_ref(e),
LexicalKind::Block => {
if let Some(e) = &e.children {
self.enter(|this| this.scan(e.as_slice()))?;
}
}
LexicalKind::Mod(LexicalModKind::Module(..)) => {
let mut src = self.import.imports.get(&e.info.range)?.clone();
log::debug!("check import: {info:?} => {src:?}", info = e.info);
std::mem::swap(&mut self.ext_src, &mut src);
// todo: process import star
if let Some(e) = &e.children {
self.scan(e.as_slice())?;
}
std::mem::swap(&mut self.ext_src, &mut src);
}
LexicalKind::Mod(LexicalModKind::Star) => {
if let Some(source) = &self.ext_src {
log::debug!("diving source for def use: {:?}", source.id());
let (_, external_info) = Some(source.id())
.zip(AnalysisContext::def_use_(self.ctx, source.clone()))?;
for ext_id in &external_info.exports_refs {
self.import_from(&external_info, *ext_id);
}
}
}
LexicalKind::Mod(LexicalModKind::PathInclude) => {}
LexicalKind::Mod(LexicalModKind::PathVar)
| LexicalKind::Mod(LexicalModKind::ModuleAlias) => self.insert_module(Ns::Value, e),
LexicalKind::Mod(LexicalModKind::Ident) => match self.import_name(&e.info.name) {
Some(()) => {
self.insert_value_ref(e);
}
None => {
let def_id = self.insert(Ns::Value, e);
self.insert_extern(e.info.name.clone(), e.info.range.clone(), Some(def_id));
}
},
LexicalKind::Mod(LexicalModKind::Alias { target }) => {
match self.import_name(&target.name) {
Some(()) => {
self.insert_value_ref_(IdentRef {
name: target.name.clone(),
range: target.range.clone(),
});
self.insert(Ns::Value, e);
}
None => {
let def_id = self.insert(Ns::Value, e);
self.insert_extern(
target.name.clone(),
target.range.clone(),
Some(def_id),
);
}
}
}
}
}
Some(())
}
fn insert_module(&mut self, label: Ns, e: &LexicalHierarchy) {
self.insert(label, e);
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), None),
vec![(
None,
IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
},
)],
);
}
}
fn insert_extern(&mut self, name: EcoString, range: Range<usize>, redefine_id: Option<DefId>) {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), Some(name.clone())),
vec![(redefine_id, IdentRef { name, range })],
);
}
}
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) -> DefId {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
let (id, ..) = self.info.ident_defs.insert_full(
(self.current_id, id_ref.clone()),
IdentDef {
name: e.info.name.clone(),
kind: e.info.kind.clone(),
range: e.info.range.clone(),
},
);
let id = DefId(id as u64);
snap.insert(e.info.name.clone(), id);
id
}
fn insert_value_ref_(&mut self, id_ref: IdentRef) {
match self.id_scope.get(&id_ref.name) {
Some(id) => {
self.info.ident_refs.insert(id_ref, *id);
}
None => {
self.info.undefined_refs.push(id_ref);
}
}
}
fn insert_value_ref(&mut self, e: &LexicalHierarchy) {
self.insert_value_ref_(IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
});
}
fn insert_label_ref(&mut self, e: &LexicalHierarchy) {
let refs = self.info.label_refs.entry(e.info.name.clone()).or_default();
refs.push(e.info.range.clone());
}
}

View file

@ -0,0 +1,433 @@
//! Linked definition analysis
use typst::foundations::{IntoValue, Label, Selector, Type};
use typst::introspection::Introspector;
use typst::model::BibliographyElem;
use super::{prelude::*, BuiltinTy, InsTy, SharedContext};
use crate::syntax::{get_deref_target, Decl, DeclExpr, DerefTarget, Expr, ExprInfo};
use crate::VersionedDocument;
/// A linked definition in the source code
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct Definition {
/// The declaration identifier of the definition.
pub decl: DeclExpr,
/// A possible instance of the definition.
pub term: Option<Ty>,
}
impl Definition {
/// Creates a definition
pub fn new(decl: DeclExpr, term: Option<Ty>) -> Self {
Self { decl, term }
}
/// Creates a definition according to some term
pub fn new_var(name: Interned<str>, term: Ty) -> Self {
let decl = Decl::lit_(name);
Self::new(decl.into(), Some(term))
}
/// The name of the definition.
pub fn name(&self) -> &Interned<str> {
self.decl.name()
}
/// The location of the definition.
// todo: cache
pub(crate) fn def_at(&self, ctx: &SharedContext) -> Option<(TypstFileId, Range<usize>)> {
let fid = self.decl.file_id()?;
let span = self.decl.span();
let range = (!span.is_detached()).then(|| ctx.source_by_id(fid).ok()?.range(span));
Some((fid, range.flatten().unwrap_or_default()))
}
/// The range of the name of the definition.
pub fn name_range(&self, ctx: &SharedContext) -> Option<Range<usize>> {
self.decl.name_range(ctx)
}
pub(crate) fn value(&self) -> Option<Value> {
self.term.as_ref()?.value()
}
}
// todo: field definition
/// Finds the definition of a symbol.
pub fn definition(
ctx: &Arc<SharedContext>,
source: &Source,
document: Option<&VersionedDocument>,
deref_target: DerefTarget<'_>,
) -> Option<Definition> {
match deref_target {
// todi: field access
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => {
find_ident_definition(ctx, source, node)
}
DerefTarget::ImportPath(path) | DerefTarget::IncludePath(path) => {
DefResolver::new(ctx, source)?.of_span(path.span())
}
DerefTarget::Label(r) | DerefTarget::Ref(r) => {
let ref_expr: ast::Expr = r.cast()?;
let name = match ref_expr {
ast::Expr::Ref(r) => r.target(),
ast::Expr::Label(r) => r.get(),
_ => return None,
};
let introspector = &document?.document.introspector;
find_bib_definition(ctx, introspector, name)
.or_else(|| find_ref_definition(introspector, name, ref_expr))
}
DerefTarget::Normal(..) => None,
}
}
fn find_ident_definition(
ctx: &Arc<SharedContext>,
source: &Source,
use_site: LinkedNode,
) -> Option<Definition> {
let mut proj = vec![];
// Lexical reference
let ident_store = use_site.clone();
let ident_ref = match ident_store.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => e.span(),
ast::Expr::MathIdent(e) => e.span(),
ast::Expr::FieldAccess(s) => {
proj.push(s.field());
let mut i = s.target();
while let ast::Expr::FieldAccess(f) = i {
proj.push(f.field());
i = f.target();
}
match i {
ast::Expr::Ident(e) => e.span(),
ast::Expr::MathIdent(e) => e.span(),
_ => Span::detached(),
}
}
_ => {
log::debug!("unsupported kind {kind:?}", kind = use_site.kind());
Span::detached()
}
};
// Syntactic definition
let mut def_worker = DefResolver::new(ctx, source)?;
let expr = def_worker.of_span(ident_ref)?;
let ty = expr.term.as_ref();
use Decl::*;
match expr.decl.as_ref() {
ModuleAlias(..) | PathStem(..) | Module(..) => {
if !proj.is_empty() {
let val = ty.and_then(|ty| match ty {
Ty::Value(v) => Some(v.val.clone()),
Ty::Builtin(BuiltinTy::Type(ty)) => Some(Value::Type(*ty)),
Ty::Builtin(BuiltinTy::Element(e)) => Some(Value::Func((*e).into())),
_ => None,
});
proj.reverse();
// let def_fid = def_fid?;
// let m = ctx.module_ins_at(def_fid, def.range.start + 1)?;
let m = val?;
let val = project_value(&m, proj.as_slice())?;
// todo: name range
let name = proj.last().map(|e| e.get().into());
return value_to_def(val.clone(), || name, None);
}
Some(expr)
}
_ => Some(expr),
}
}
fn project_value<'a>(m: &'a Value, proj: &[ast::Ident<'_>]) -> Option<&'a Value> {
if proj.is_empty() {
return Some(m);
}
let scope = m.scope()?;
let (ident, proj) = proj.split_first()?;
let v = scope.get(ident.as_str())?;
project_value(v, proj)
}
fn find_bib_definition(
ctx: &Arc<SharedContext>,
introspector: &Introspector,
key: &str,
) -> Option<Definition> {
let bib_elem = BibliographyElem::find(introspector.track()).ok()?;
let Value::Array(arr) = bib_elem.path().clone().into_value() else {
return None;
};
let bib_paths = arr.into_iter().map(Value::cast).flat_map(|e| e.ok());
let bib_info = ctx.analyze_bib(bib_elem.span(), bib_paths)?;
let entry = bib_info.entries.get(key)?;
log::debug!("find_bib_definition: {key} => {entry:?}");
// todo: rename with regard to string format: yaml-key/bib etc.
let decl = Decl::bib_entry(key.into(), entry.file_id, entry.span.clone());
Some(Definition::new(decl.into(), None))
}
fn find_ref_definition(
introspector: &Introspector,
name: &str,
ref_expr: ast::Expr,
) -> Option<Definition> {
let label = Label::new(name);
let sel = Selector::Label(label);
// if it is a label, we put the selection range to itself
let (decl, ty) = match ref_expr {
ast::Expr::Label(label) => (Decl::label(name, label.span()), None),
ast::Expr::Ref(..) => {
let elem = introspector.query_first(&sel)?;
let span = elem.labelled_at();
let decl = if !span.is_detached() {
Decl::label(name, span)
} else {
// otherwise, it is estimated to the span of the pointed content
Decl::content(elem.span())
};
(decl, Some(Ty::Value(InsTy::new(Value::Content(elem)))))
}
_ => return None,
};
Some(Definition::new(decl.into(), ty))
}
/// The call of a function with calling convention identified.
#[derive(Debug, Clone)]
pub enum CallConvention {
/// A static function.
Static(Func),
/// A method call with a this.
Method(Value, Func),
/// A function call by with binding.
With(Func),
/// A function call by where binding.
Where(Func),
}
impl CallConvention {
/// Get the function pointer of the call.
pub fn method_this(&self) -> Option<&Value> {
match self {
CallConvention::Static(_) => None,
CallConvention::Method(this, _) => Some(this),
CallConvention::With(_) => None,
CallConvention::Where(_) => None,
}
}
/// Get the function pointer of the call.
pub fn callee(self) -> Func {
match self {
CallConvention::Static(f) => f,
CallConvention::Method(_, f) => f,
CallConvention::With(f) => f,
CallConvention::Where(f) => f,
}
}
}
/// Resolve a call target to a function or a method with a this.
pub fn resolve_call_target(ctx: &Arc<SharedContext>, node: &SyntaxNode) -> Option<CallConvention> {
let callee = (|| {
let source = ctx.source_by_id(node.span().id()?).ok()?;
let node = source.find(node.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = ctx.definition(&source, None, deref_target)?;
let func_ptr = match def.term.and_then(|val| val.value()) {
Some(Value::Func(f)) => Some(f),
Some(Value::Type(ty)) => ty.constructor().ok(),
_ => None,
}?;
Some((None, func_ptr))
})();
let callee = callee.or_else(|| {
let values = ctx.analyze_expr(node);
if let Some(access) = node.cast::<ast::FieldAccess>() {
let target = access.target();
let field = access.field().get();
let values = ctx.analyze_expr(target.to_untyped());
if let Some((this, func_ptr)) = values.into_iter().find_map(|(this, _styles)| {
if let Some(Value::Func(f)) = this.ty().scope().get(field) {
return Some((this, f.clone()));
}
None
}) {
return Some((Some(this), func_ptr));
}
}
if let Some(func) = values.into_iter().find_map(|v| v.0.to_func()) {
return Some((None, func));
};
None
})?;
let (this, func_ptr) = callee;
Some(match this {
Some(Value::Func(func)) if is_same_native_func(*WITH_FUNC, &func_ptr) => {
CallConvention::With(func)
}
Some(Value::Func(func)) if is_same_native_func(*WHERE_FUNC, &func_ptr) => {
CallConvention::Where(func)
}
Some(this) => CallConvention::Method(this, func_ptr),
None => CallConvention::Static(func_ptr),
})
}
fn is_same_native_func(x: Option<&Func>, y: &Func) -> bool {
let Some(x) = x else {
return false;
};
use typst::foundations::func::Repr;
match (x.inner(), y.inner()) {
(Repr::Native(x), Repr::Native(y)) => x == y,
(Repr::Element(x), Repr::Element(y)) => x == y,
_ => false,
}
}
static WITH_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
let fn_ty = Type::of::<Func>();
let Some(Value::Func(f)) = fn_ty.scope().get("with") else {
return None;
};
Some(f)
});
static WHERE_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
let fn_ty = Type::of::<Func>();
let Some(Value::Func(f)) = fn_ty.scope().get("where") else {
return None;
};
Some(f)
});
fn value_to_def(
value: Value,
name: impl FnOnce() -> Option<Interned<str>>,
name_range: Option<Range<usize>>,
) -> Option<Definition> {
let val = Ty::Value(InsTy::new(value.clone()));
// DefKind::Closure | DefKind::Func => {
// let value = def_fid.and_then(|fid| {
// let def_source = ctx.source_by_id(fid).ok()?;
// let root = LinkedNode::new(def_source.root());
// let def_name = root.find(def?.span()?)?;
// log::info!("def_name for function: {def_name:?}");
// let values = ctx.analyze_expr(def_name.get());
// let func = values
// .into_iter()
// .find(|v| matches!(v.0, Value::Func(..)))?;
// log::info!("okay for function: {func:?}");
// Some(func.0)
// });
Some(match value {
Value::Func(func) => {
let name = func.name().map(|e| e.into()).or_else(name)?;
let mut s = SyntaxNode::leaf(SyntaxKind::Ident, &name);
s.synthesize(func.span());
let decl = Decl::func(s.cast().unwrap());
Definition::new(decl.into(), Some(val))
}
Value::Module(module) => Definition::new_var(module.name().into(), val),
_v => {
// todo name_range
let _ = name_range;
Definition::new_var(name()?, val)
}
})
}
struct DefResolver {
ei: Arc<ExprInfo>,
}
impl DefResolver {
fn new(ctx: &Arc<SharedContext>, source: &Source) -> Option<Self> {
let ei = ctx.expr_stage(source);
Some(Self { ei })
}
fn of_span(&mut self, span: Span) -> Option<Definition> {
if span.is_detached() {
return None;
}
let expr = self.ei.resolves.get(&span).cloned()?;
match (&expr.root, &expr.val) {
(Some(expr), ty) => self.of_expr(expr, ty.as_ref()),
(None, Some(term)) => self.of_term(term),
(None, None) => None,
}
}
fn of_expr(&mut self, expr: &Expr, term: Option<&Ty>) -> Option<Definition> {
log::debug!("of_expr: {expr:?}");
match expr {
Expr::Decl(decl) => self.of_decl(decl, term),
Expr::Ref(r) => self.of_expr(r.root.as_ref()?, r.val.as_ref().or(term)),
_ => None,
}
}
fn of_term(&mut self, term: &Ty) -> Option<Definition> {
log::debug!("of_term: {term:?}");
// Get the type of the type node
let better_def = match term {
Ty::Value(v) => value_to_def(v.val.clone(), || None, None),
// Ty::Var(..) => DeclKind::Var,
// Ty::Func(..) => DeclKind::Func,
// Ty::With(..) => DeclKind::Func,
_ => None,
};
better_def.or_else(|| {
let constant = Decl::constant(Span::detached());
Some(Definition::new(constant.into(), Some(term.clone())))
})
}
fn of_decl(&mut self, decl: &Interned<Decl>, term: Option<&Ty>) -> Option<Definition> {
log::debug!("of_decl: {decl:?}");
// todo:
match decl.as_ref() {
Decl::Import(..) | Decl::ImportAlias(..) => {
let next = self.of_span(decl.span());
Some(next.unwrap_or_else(|| Definition::new(decl.clone(), term.cloned())))
}
_ => Some(Definition::new(decl.clone(), term.cloned())),
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,127 +0,0 @@
//! Import analysis
use super::prelude::*;
use crate::analysis::analyze_import_;
use crate::syntax::{find_expr_in_import, resolve_id_by_path};
/// The import information of a source file.
#[derive(Default)]
pub struct ImportInfo {
/// The source files that this source file depends on.
pub deps: EcoVec<TypstFileId>,
/// The source file that this source file imports.
pub imports: indexmap::IndexMap<Range<usize>, Option<Source>>,
}
impl Hash for ImportInfo {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_usize(self.imports.len());
// todo: import star is stateful
for item in &self.imports {
item.hash(state);
}
}
}
pub(super) fn get_import_info(
ctx: comemo::Tracked<dyn World + '_>,
source: Source,
e: EcoVec<LexicalHierarchy>,
) -> Option<Arc<ImportInfo>> {
let current_id = source.id();
let root = LinkedNode::new(source.root());
let mut collector = ImportCollector {
ctx,
info: ImportInfo::default(),
current_id,
root,
};
collector.scan(&e);
let mut deps: Vec<_> = collector
.info
.imports
.values()
.filter_map(|x| x.as_ref().map(|x| x.id()))
.collect();
deps.sort();
deps.dedup();
collector.info.deps = deps.into();
Some(Arc::new(collector.info))
}
struct ImportCollector<'a, 'w> {
ctx: comemo::Tracked<'w, dyn World + 'w>,
info: ImportInfo,
current_id: TypstFileId,
root: LinkedNode<'a>,
}
impl<'a, 'w> ImportCollector<'a, 'w> {
fn scan(&mut self, e: &'a [LexicalHierarchy]) {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(..) => {}
LexicalKind::Block => {
if let Some(e) = &e.children {
self.scan(e.as_slice());
}
}
LexicalKind::Mod(
LexicalModKind::PathInclude
| LexicalModKind::PathVar
| LexicalModKind::ModuleAlias
| LexicalModKind::Ident
| LexicalModKind::Alias { .. }
| LexicalModKind::Star,
) => {}
LexicalKind::Mod(LexicalModKind::Module(p)) => {
let id = match p {
ModSrc::Expr(exp) => {
let exp = self
.root
.leaf_at_compat(exp.range.end)
.and_then(find_expr_in_import);
let val = exp
.as_ref()
.and_then(|exp| analyze_import_(self.ctx.deref(), exp));
match val {
Some(Value::Module(m)) => {
log::debug!(
"current id {:?} exp {exp:?} => id: {:?}",
self.current_id,
m.file_id()
);
m.file_id()
}
Some(Value::Str(m)) => resolve_id_by_path(
self.ctx.deref(),
self.current_id,
m.as_str(),
),
_ => None,
}
}
ModSrc::Path(p) => {
resolve_id_by_path(self.ctx.deref(), self.current_id, p.deref())
}
};
log::debug!(
"current id {:?} range {:?} => id: {id:?}",
self.current_id,
e.info.range,
);
let source = id.and_then(|id| self.ctx.source(id).ok());
self.info.imports.insert(e.info.range.clone(), source);
}
}
}
}
}

View file

@ -1,552 +0,0 @@
//! Linked definition analysis
use typst::foundations::{IntoValue, Label, Selector, Type};
use typst::introspection::Introspector;
use typst::model::BibliographyElem;
use super::prelude::*;
use crate::syntax::{find_source_by_expr, get_deref_target, DerefTarget};
use crate::VersionedDocument;
/// A linked definition in the source code
pub struct DefinitionLink {
/// The kind of the definition.
pub kind: LexicalKind,
/// A possible instance of the definition.
pub value: Option<Value>,
/// The name of the definition.
pub name: EcoString,
/// The location of the definition.
pub def_at: Option<(TypstFileId, Range<usize>)>,
/// The range of the name of the definition.
pub name_range: Option<Range<usize>>,
}
impl DefinitionLink {
/// Convert the definition to an identifier reference.
pub fn to_ident_ref(&self) -> Option<IdentRef> {
Some(IdentRef {
name: self.name.clone(),
range: self.name_range.clone()?,
})
}
}
// todo: field definition
/// Finds the definition of a symbol.
pub fn find_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
document: Option<&VersionedDocument>,
deref_target: DerefTarget<'_>,
) -> Option<DefinitionLink> {
match deref_target {
// todi: field access
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => {
find_ident_definition(ctx, source, node)
}
// todo: better support (rename import path?)
DerefTarget::ImportPath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let import_node = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_expr(ctx.world(), def_fid, import_node.source())?;
Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), LinkedNode::new(source.root()).range())),
name_range: None,
})
}
DerefTarget::IncludePath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let include_node = parent.cast::<ast::ModuleInclude>()?;
let source = find_source_by_expr(ctx.world(), def_fid, include_node.source())?;
Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathInclude),
name: EcoString::new(),
value: None,
def_at: Some((source.id(), (LinkedNode::new(source.root())).range())),
name_range: None,
})
}
DerefTarget::Label(r) | DerefTarget::Ref(r) => {
let ref_expr: ast::Expr = r.cast()?;
let (ref_node, is_label) = match ref_expr {
ast::Expr::Ref(r) => (r.target(), false),
ast::Expr::Label(r) => (r.get(), true),
_ => return None,
};
let introspector = &document?.document.introspector;
find_bib_definition(ctx, introspector, ref_node)
.or_else(|| find_ref_definition(ctx, introspector, ref_node, is_label, r.span()))
}
DerefTarget::Normal(..) => None,
}
}
fn find_ident_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
mut use_site: LinkedNode,
) -> Option<DefinitionLink> {
let mut proj = vec![];
// Lexical reference
let ident_store = use_site.clone();
let ident_ref = match ident_store.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => Some(IdentRef {
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::MathIdent(e) => Some(IdentRef {
name: e.get().clone(),
range: use_site.range(),
}),
ast::Expr::FieldAccess(s) => {
proj.push(s.field());
let mut i = s.target();
while let ast::Expr::FieldAccess(f) = i {
proj.push(f.field());
i = f.target();
}
match i {
ast::Expr::Ident(e) => {
use_site = use_site.find(e.span())?;
Some(IdentRef {
name: e.get().clone(),
range: use_site.range(),
})
}
ast::Expr::MathIdent(e) => {
use_site = use_site.find(e.span())?;
Some(IdentRef {
name: e.get().clone(),
range: use_site.range(),
})
}
_ => None,
}
}
_ => {
log::debug!("unsupported kind {kind:?}", kind = use_site.kind());
None
}
};
// Syntactic definition
let source_id = source.id();
let def_use = ctx.def_use(source);
let def_info = ident_ref
.as_ref()
.zip(def_use.as_ref())
.and_then(|(ident_ref, def_use)| {
let def_id = def_use.get_ref(ident_ref);
let def_id = def_id.or_else(|| Some(def_use.get_def(source_id, ident_ref)?.0))?;
def_use.get_def_by_id(def_id)
});
// Global definition
let Some((def_fid, def)) = def_info else {
return resolve_global_value(ctx, use_site.clone(), false).and_then(move |f| {
value_to_def(ctx, f, || Some(use_site.get().clone().into_text()), None)
});
};
match &def.kind {
LexicalKind::Var(LexicalVarKind::BibKey)
| LexicalKind::Heading(..)
| LexicalKind::Block => unreachable!(),
LexicalKind::Mod(
LexicalModKind::Module(..) | LexicalModKind::PathVar | LexicalModKind::ModuleAlias,
) => {
if !proj.is_empty() {
proj.reverse();
let m = ctx.module_ins_at(def_fid, def.range.start + 1)?;
let val = project_value(&m, proj.as_slice())?;
// todo: name range
let name = proj.last().map(|e| e.get().clone());
return value_to_def(ctx, val.clone(), || name, None);
}
Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: None,
def_at: Some((def_fid, def.range.clone())),
name_range: Some(def.range.clone()),
})
}
LexicalKind::Var(
LexicalVarKind::Variable
| LexicalVarKind::ValRef
| LexicalVarKind::Label
| LexicalVarKind::LabelRef,
)
| LexicalKind::Mod(
LexicalModKind::PathInclude | LexicalModKind::Alias { .. } | LexicalModKind::Ident,
) => Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: None,
def_at: Some((def_fid, def.range.clone())),
name_range: Some(def.range.clone()),
}),
LexicalKind::Var(LexicalVarKind::Function) => {
let def_source = ctx.source_by_id(def_fid).ok()?;
let root = LinkedNode::new(def_source.root());
let def_name = root.leaf_at_compat(def.range.start + 1)?;
log::info!("def_name for function: {def_name:?}", def_name = def_name);
let values = ctx.analyze_expr(&def_name);
let func = values.into_iter().find(|v| matches!(v.0, Value::Func(..)));
log::info!("okay for function: {func:?}");
Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: func.map(|v| v.0),
// value: None,
def_at: Some((def_fid, def.range.clone())),
name_range: Some(def.range.clone()),
})
}
LexicalKind::Mod(LexicalModKind::Star) => {
log::info!("unimplemented star import {ident_ref:?}");
None
}
}
}
fn project_value<'a>(m: &'a Value, proj: &[ast::Ident<'_>]) -> Option<&'a Value> {
if proj.is_empty() {
return Some(m);
}
let scope = m.scope()?;
let (ident, proj) = proj.split_first()?;
let v = scope.get(ident.as_str())?;
project_value(v, proj)
}
fn find_bib_definition(
ctx: &mut AnalysisContext,
introspector: &Introspector,
key: &str,
) -> Option<DefinitionLink> {
let bib_elem = BibliographyElem::find(introspector.track()).ok()?;
let Value::Array(arr) = bib_elem.path().clone().into_value() else {
return None;
};
let bib_paths = arr.into_iter().map(Value::cast).flat_map(|e| e.ok());
let bib_info = ctx.analyze_bib(bib_elem.span(), bib_paths)?;
let entry = bib_info.entries.get(key);
log::debug!("find_bib_definition: {key} => {entry:?}");
let entry = entry?;
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::BibKey),
name: key.into(),
value: None,
def_at: Some((entry.file_id, entry.span.clone())),
// todo: rename with regard to string format: yaml-key/bib etc.
name_range: Some(entry.span.clone()),
})
}
fn find_ref_definition(
ctx: &mut AnalysisContext,
introspector: &Introspector,
ref_node: &str,
is_label: bool,
span: Span,
) -> Option<DefinitionLink> {
let label = Label::new(ref_node);
let sel = Selector::Label(label);
let elem = introspector.query_first(&sel)?;
// if it is a label, we put the selection range to itself
let (def_at, name_range) = if is_label {
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
let rng = source.range(span)?;
let name_range = rng.start + 1..rng.end - 1;
let name_range = (name_range.start <= name_range.end).then_some(name_range);
(Some((fid, rng)), name_range)
} else {
let span = elem.labelled_at();
let span = if !span.is_detached() {
span
} else {
// otherwise, it is estimated to the span of the pointed content
elem.span()
};
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
let rng = source.range(span)?;
(Some((fid, rng)), None)
};
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Label),
name: ref_node.into(),
value: Some(Value::Content(elem)),
def_at,
name_range,
})
}
/// The target of a dynamic call.
#[derive(Debug, Clone)]
pub struct DynCallTarget {
/// The function pointer.
pub func_ptr: Func,
/// The this pointer.
pub this: Option<Value>,
}
/// The call of a function with calling convention identified.
#[derive(Debug, Clone)]
pub enum CallConvention {
/// A static function.
Static(Func),
/// A method call with a this.
Method(Value, Func),
/// A function call by with binding.
With(Func),
/// A function call by where binding.
Where(Func),
}
impl CallConvention {
/// Get the function pointer of the call.
pub fn method_this(&self) -> Option<&Value> {
match self {
CallConvention::Static(_) => None,
CallConvention::Method(this, _) => Some(this),
CallConvention::With(_) => None,
CallConvention::Where(_) => None,
}
}
/// Get the function pointer of the call.
pub fn callee(self) -> Func {
match self {
CallConvention::Static(f) => f,
CallConvention::Method(_, f) => f,
CallConvention::With(f) => f,
CallConvention::Where(f) => f,
}
}
}
fn identify_call_convention(target: DynCallTarget) -> CallConvention {
match target.this {
Some(Value::Func(func)) if is_with_func(&target.func_ptr) => CallConvention::With(func),
Some(Value::Func(func)) if is_where_func(&target.func_ptr) => CallConvention::Where(func),
Some(this) => CallConvention::Method(this, target.func_ptr),
None => CallConvention::Static(target.func_ptr),
}
}
fn is_with_func(func_ptr: &Func) -> bool {
static WITH_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
let fn_ty = Type::of::<Func>();
let Some(Value::Func(f)) = fn_ty.scope().get("with") else {
return None;
};
Some(f)
});
is_same_native_func(*WITH_FUNC, func_ptr)
}
fn is_where_func(func_ptr: &Func) -> bool {
static WITH_FUNC: LazyLock<Option<&'static Func>> = LazyLock::new(|| {
let fn_ty = Type::of::<Func>();
let Some(Value::Func(f)) = fn_ty.scope().get("where") else {
return None;
};
Some(f)
});
is_same_native_func(*WITH_FUNC, func_ptr)
}
fn is_same_native_func(x: Option<&Func>, y: &Func) -> bool {
let Some(x) = x else {
return false;
};
use typst::foundations::func::Repr;
match (x.inner(), y.inner()) {
(Repr::Native(x), Repr::Native(y)) => x == y,
(Repr::Element(x), Repr::Element(y)) => x == y,
_ => false,
}
}
// todo: merge me with resolve_callee
/// Resolve a call target to a function or a method with a this.
pub fn resolve_call_target(
ctx: &mut AnalysisContext,
callee: &LinkedNode,
) -> Option<CallConvention> {
resolve_callee_(ctx, callee, true).map(identify_call_convention)
}
/// Resolve a callee expression to a function.
pub fn resolve_callee(ctx: &mut AnalysisContext, callee: &LinkedNode) -> Option<Func> {
resolve_callee_(ctx, callee, false).map(|e| e.func_ptr)
}
fn resolve_callee_(
ctx: &mut AnalysisContext,
callee: &LinkedNode,
resolve_this: bool,
) -> Option<DynCallTarget> {
None.or_else(|| {
let source = ctx.source_by_id(callee.span().id()?).ok()?;
let node = source.find(callee.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = find_definition(ctx, source.clone(), None, deref_target)?;
match def.kind {
LexicalKind::Var(LexicalVarKind::Function) => match def.value {
Some(Value::Func(f)) => Some(f),
_ => None,
},
_ => None,
}
})
.or_else(|| {
resolve_global_value(ctx, callee.clone(), false).and_then(|v| match v {
Value::Func(f) => Some(f),
_ => None,
})
})
.map(|e| DynCallTarget {
func_ptr: e,
this: None,
})
.or_else(|| {
let values = ctx.analyze_expr(callee);
if let Some(func) = values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
}) {
return Some(DynCallTarget {
func_ptr: func,
this: None,
});
};
if resolve_this {
if let Some(access) = match callee.cast::<ast::Expr>() {
Some(ast::Expr::FieldAccess(access)) => Some(access),
_ => None,
} {
let target = access.target();
let field = access.field().get();
let values = ctx.analyze_expr(&callee.find(target.span())?);
if let Some((this, func_ptr)) = values.into_iter().find_map(|(this, _styles)| {
if let Some(Value::Func(f)) = this.ty().scope().get(field) {
return Some((this, f.clone()));
}
None
}) {
return Some(DynCallTarget {
func_ptr,
this: Some(this),
});
}
}
}
None
})
}
// todo: math scope
pub(crate) fn resolve_global_value(
ctx: &AnalysisContext,
callee: LinkedNode,
is_math: bool,
) -> Option<Value> {
let lib = ctx.world().library();
let scope = if is_math {
lib.math.scope()
} else {
lib.global.scope()
};
let v = match callee.cast::<ast::Expr>()? {
ast::Expr::Ident(ident) => scope.get(&ident)?,
ast::Expr::FieldAccess(access) => match access.target() {
ast::Expr::Ident(target) => match scope.get(&target)? {
Value::Module(module) => module.field(&access.field()).ok()?,
Value::Func(func) => func.field(&access.field()).ok()?,
_ => return None,
},
_ => return None,
},
_ => return None,
};
Some(v.clone())
}
fn value_to_def(
ctx: &mut AnalysisContext,
value: Value,
name: impl FnOnce() -> Option<EcoString>,
name_range: Option<Range<usize>>,
) -> Option<DefinitionLink> {
let def_at = |span: Span| {
span.id().and_then(|fid| {
let source = ctx.source_by_id(fid).ok()?;
Some((fid, source.find(span)?.range()))
})
};
Some(match value {
Value::Func(func) => {
let name = func.name().map(|e| e.into()).or_else(name)?;
let span = func.span();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
name,
value: Some(Value::Func(func)),
def_at: def_at(span),
name_range,
}
}
Value::Module(module) => {
let name = module.name().clone();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Variable),
name,
value: None,
def_at: None,
name_range,
}
}
_v => {
let name = name()?;
DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name,
value: None,
def_at: None,
name_range,
}
}
})
}

View file

@ -3,7 +3,7 @@
use hashbrown::HashSet;
use tinymist_derive::BindTyCtx;
use super::prelude::*;
use super::{prelude::*, SharedContext};
use super::{
ArgsTy, FieldTy, Sig, SigChecker, SigShape, SigSurfaceKind, SigTy, Ty, TyCtx, TyCtxMut,
TypeBounds, TypeScheme, TypeVar,
@ -13,7 +13,7 @@ use crate::syntax::{get_check_target, get_check_target_by_context, CheckTarget,
/// With given type information, check the type of a literal expression again by
/// touching the possible related nodes.
pub(crate) fn post_type_check(
ctx: &mut AnalysisContext,
ctx: Arc<SharedContext>,
info: &TypeScheme,
node: LinkedNode,
) -> Option<Ty> {
@ -92,14 +92,14 @@ fn check_signature<'a>(
}
}
pub(crate) struct PostTypeChecker<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>,
pub(crate) struct PostTypeChecker<'a> {
ctx: Arc<SharedContext>,
pub info: &'a TypeScheme,
checked: HashMap<Span, Option<Ty>>,
locals: TypeScheme,
}
impl<'a, 'w> TyCtx for PostTypeChecker<'a, 'w> {
impl<'a> TyCtx for PostTypeChecker<'a> {
fn global_bounds(&self, var: &Interned<TypeVar>, pol: bool) -> Option<TypeBounds> {
self.info.global_bounds(var, pol)
}
@ -109,7 +109,7 @@ impl<'a, 'w> TyCtx for PostTypeChecker<'a, 'w> {
}
}
impl<'a, 'w> TyCtxMut for PostTypeChecker<'a, 'w> {
impl<'a> TyCtxMut for PostTypeChecker<'a> {
type Snap = <TypeScheme as TyCtxMut>::Snap;
fn start_scope(&mut self) -> Self::Snap {
@ -133,8 +133,8 @@ impl<'a, 'w> TyCtxMut for PostTypeChecker<'a, 'w> {
}
}
impl<'a, 'w> PostTypeChecker<'a, 'w> {
pub fn new(ctx: &'a mut AnalysisContext<'w>, info: &'a TypeScheme) -> Self {
impl<'a> PostTypeChecker<'a> {
pub fn new(ctx: Arc<SharedContext>, info: &'a TypeScheme) -> Self {
Self {
ctx,
info,
@ -365,9 +365,9 @@ where
#[derive(BindTyCtx)]
#[bind(0)]
struct PostSigCheckWorker<'x, 'a, 'w, T>(&'x mut PostTypeChecker<'a, 'w>, &'x mut T);
struct PostSigCheckWorker<'x, 'a, T>(&'x mut PostTypeChecker<'a>, &'x mut T);
impl<'x, 'a, 'w, T: PostSigChecker> SigChecker for PostSigCheckWorker<'x, 'a, 'w, T> {
impl<'x, 'a, T: PostSigChecker> SigChecker for PostSigCheckWorker<'x, 'a, T> {
fn check(
&mut self,
sig: Sig,

View file

@ -1,13 +1,12 @@
pub use core::fmt;
pub use std::collections::{BTreeMap, HashMap};
pub use std::hash::{Hash, Hasher};
pub use std::ops::{Deref, Range};
pub use std::ops::Range;
pub use std::path::{Path, PathBuf};
pub use std::sync::{Arc, LazyLock};
pub use comemo::Track;
pub use ecow::*;
pub use reflexo::vector::ir::DefId;
pub use serde::{Deserialize, Serialize};
pub use typst::foundations::{Func, Value};
pub use typst::syntax::ast::{self, AstNode};
@ -16,11 +15,7 @@ pub use typst::World;
pub use typst_shim::syntax::LinkedNodeExt;
pub use typst_shim::utils::LazyHash;
pub use super::AnalysisContext;
pub use super::SearchCtx;
pub(crate) use super::StrRef;
pub(crate) use super::{AnalysisContext, ToFunc};
pub(crate) use crate::adt::interner::Interned;
pub(crate) use crate::syntax::{
IdentDef, IdentRef, LexicalHierarchy, LexicalKind, LexicalModKind, LexicalVarKind, ModSrc,
};
pub use crate::ty::Ty;

View file

@ -4,7 +4,7 @@ use itertools::Either;
use tinymist_derive::BindTyCtx;
use typst::foundations::{Closure, ParamInfo};
use super::{prelude::*, resolve_callee, BoundChecker, DocSource, SigTy, TypeVar};
use super::{prelude::*, BoundChecker, Definition, DocSource, SharedContext, SigTy, TypeVar};
use crate::analysis::PostTypeChecker;
use crate::docs::{UntypedSignatureDocs, UntypedSymbolDocs, UntypedVarDocs};
use crate::syntax::get_non_strict_def_target;
@ -223,13 +223,13 @@ pub struct PartialSignature {
/// The language object that the signature is being analyzed for.
#[derive(Debug, Clone)]
pub enum SignatureTarget<'a> {
pub enum SignatureTarget {
/// A static node without knowing the function at runtime.
Def(Source, IdentRef),
Def(Option<Source>, Definition),
/// A static node without knowing the function at runtime.
SyntaxFast(Source, LinkedNode<'a>),
SyntaxFast(Source, Span),
/// A static node without knowing the function at runtime.
Syntax(Source, LinkedNode<'a>),
Syntax(Source, Span),
/// A function that is known at runtime.
Runtime(Func),
/// A function that is known at runtime.
@ -237,10 +237,10 @@ pub enum SignatureTarget<'a> {
}
pub(crate) fn analyze_signature(
ctx: &mut AnalysisContext,
ctx: &Arc<SharedContext>,
callee_node: SignatureTarget,
) -> Option<Signature> {
ctx.compute_signature(callee_node.clone(), |ctx| {
ctx.compute_signature(callee_node.clone(), move |ctx| {
log::debug!("analyzing signature for {callee_node:?}");
analyze_type_signature(ctx, &callee_node)
.or_else(|| analyze_dyn_signature(ctx, &callee_node))
@ -248,14 +248,20 @@ pub(crate) fn analyze_signature(
}
fn analyze_type_signature(
ctx: &mut AnalysisContext,
callee_node: &SignatureTarget<'_>,
ctx: &Arc<SharedContext>,
callee_node: &SignatureTarget,
) -> Option<Signature> {
let (type_info, ty) = match callee_node {
SignatureTarget::Def(..) | SignatureTarget::Convert(..) => None,
SignatureTarget::SyntaxFast(source, node) | SignatureTarget::Syntax(source, node) => {
SignatureTarget::Convert(..) => return None,
SignatureTarget::SyntaxFast(source, span) | SignatureTarget::Syntax(source, span) => {
let type_info = ctx.type_check(source)?;
let ty = type_info.type_of_span(node.span())?;
let ty = type_info.type_of_span(*span)?;
Some((type_info, ty))
}
SignatureTarget::Def(source, def) => {
let span = def.decl.span();
let type_info = ctx.type_check(source.as_ref()?)?;
let ty = type_info.type_of_span(span)?;
Some((type_info, ty))
}
SignatureTarget::Runtime(f) => {
@ -275,7 +281,7 @@ fn analyze_type_signature(
let type_var = srcs.into_iter().next()?;
match type_var {
DocSource::Var(v) => {
let mut ty_ctx = PostTypeChecker::new(ctx, &type_info);
let mut ty_ctx = PostTypeChecker::new(ctx.clone(), &type_info);
let sig_ty = Ty::Func(ty.sig_repr(true, &mut ty_ctx)?);
let sig_ty = type_info.simplify(sig_ty, false);
let Ty::Func(sig_ty) = sig_ty else {
@ -385,14 +391,14 @@ fn find_alias_stack<'a>(
#[derive(BindTyCtx)]
#[bind(ctx)]
struct AliasStackChecker<'a, 'b, 'w> {
ctx: &'a mut PostTypeChecker<'b, 'w>,
struct AliasStackChecker<'a, 'b> {
ctx: &'a mut PostTypeChecker<'b>,
stack: Vec<&'a UntypedVarDocs>,
res: Option<Either<&'a UntypedSignatureDocs, Func>>,
checking_with: bool,
}
impl<'a, 'b, 'w> BoundChecker for AliasStackChecker<'a, 'b, 'w> {
impl<'a, 'b> BoundChecker for AliasStackChecker<'a, 'b> {
fn check_var(&mut self, u: &Interned<TypeVar>, pol: bool) {
log::debug!("collecting var {u:?} {pol:?}");
if self.res.is_some() {
@ -455,16 +461,16 @@ impl<'a, 'b, 'w> BoundChecker for AliasStackChecker<'a, 'b, 'w> {
}
fn analyze_dyn_signature(
ctx: &mut AnalysisContext,
callee_node: &SignatureTarget<'_>,
ctx: &Arc<SharedContext>,
callee_node: &SignatureTarget,
) -> Option<Signature> {
let func = match callee_node {
SignatureTarget::Def(..) => return None,
SignatureTarget::Def(_source, def) => def.value()?.to_func()?,
SignatureTarget::SyntaxFast(..) => return None,
SignatureTarget::Syntax(_, node) => {
let func = resolve_callee(ctx, node)?;
log::debug!("got function {func:?}");
func
SignatureTarget::Syntax(source, span) => {
let target = ctx.deref_syntax(source, *span)?;
let def = ctx.definition(source, None, target)?;
def.value()?.to_func()?
}
SignatureTarget::Convert(func) | SignatureTarget::Runtime(func) => func.clone(),
};

View file

@ -7,11 +7,22 @@ use typst::eval::Vm;
use typst::foundations::{Context, Label, Scopes, Styles, Value};
use typst::introspection::Introspector;
use typst::model::{BibliographyElem, Document};
use typst::syntax::{ast, LinkedNode, Span, SyntaxKind};
use typst::syntax::{ast, LinkedNode, Span, SyntaxKind, SyntaxNode};
use typst::World;
/// Try to determine a set of possible values for an expression.
pub fn analyze_expr_(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Option<Styles>)> {
pub fn analyze_expr(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Option<Styles>)> {
if let Some(parent) = node.parent() {
if parent.kind() == SyntaxKind::FieldAccess && node.index() > 0 {
return analyze_expr(world, parent);
}
}
analyze_expr_(world, node.get())
}
/// Try to determine a set of possible values for an expression.
pub fn analyze_expr_(world: &dyn World, node: &SyntaxNode) -> EcoVec<(Value, Option<Styles>)> {
let Some(expr) = node.cast::<ast::Expr>() else {
return eco_vec![];
};
@ -27,13 +38,7 @@ pub fn analyze_expr_(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Opt
_ => {
if node.kind() == SyntaxKind::Contextual {
if let Some(child) = node.children().last() {
return analyze_expr_(world, &child);
}
}
if let Some(parent) = node.parent() {
if parent.kind() == SyntaxKind::FieldAccess && node.index() > 0 {
return analyze_expr_(world, parent);
return analyze_expr_(world, child);
}
}
@ -45,12 +50,13 @@ pub fn analyze_expr_(world: &dyn World, node: &LinkedNode) -> EcoVec<(Value, Opt
}
/// Try to load a module from the current source file.
pub fn analyze_import_(world: &dyn World, source: &LinkedNode) -> Option<Value> {
// Use span in the node for resolving imports with relative paths.
pub fn analyze_import_(world: &dyn World, source: &SyntaxNode) -> (Option<Value>, Option<Value>) {
let source_span = source.span();
let (source, _) = analyze_expr_(world, source).into_iter().next()?;
let Some((source, _)) = analyze_expr_(world, source).into_iter().next() else {
return (None, None);
};
if source.scope().is_some() {
return Some(source);
return (Some(source.clone()), Some(source));
}
let introspector = Introspector::default();
@ -58,10 +64,10 @@ pub fn analyze_import_(world: &dyn World, source: &LinkedNode) -> Option<Value>
let mut sink = Sink::new();
let engine = Engine {
world: world.track(),
route: Route::default(),
introspector: introspector.track(),
traced: traced.track(),
sink: sink.track_mut(),
route: Route::default(),
};
let context = Context::none();
@ -71,10 +77,11 @@ pub fn analyze_import_(world: &dyn World, source: &LinkedNode) -> Option<Value>
Scopes::new(Some(world.library())),
Span::detached(),
);
typst::eval::import(&mut vm, source, source_span, true)
let module = typst::eval::import(&mut vm, source.clone(), source_span, true)
.ok()
.map(Value::Module)
.map(Value::Module);
(Some(source), module)
}
/// A label with a description and details.

View file

@ -3,10 +3,13 @@
use tinymist_derive::BindTyCtx;
use super::{
prelude::*, resolve_global_value, BuiltinTy, DefUseInfo, FlowVarKind, TyCtxMut, TypeBounds,
TypeScheme, TypeVar, TypeVarBounds,
prelude::*, BuiltinTy, FlowVarKind, SharedContext, TyCtxMut, TypeBounds, TypeScheme, TypeVar,
TypeVarBounds,
};
use crate::{
syntax::{Decl, DeclExpr, DeferExpr, Expr, ExprInfo, Processing, UnaryOp},
ty::*,
};
use crate::ty::*;
mod apply;
mod convert;
@ -16,54 +19,49 @@ mod syntax;
pub(crate) use apply::*;
pub(crate) use convert::*;
pub(crate) use docs::*;
pub(crate) use select::*;
/// Type checking at the source unit level.
pub(crate) fn type_check(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<TypeScheme>> {
pub(crate) fn type_check(
ctx: Arc<SharedContext>,
expr_info: Arc<ExprInfo>,
route: &mut Processing<Arc<TypeScheme>>,
) -> Option<Arc<TypeScheme>> {
let mut info = TypeScheme::default();
// Retrieve def-use information for the source.
let def_use_info = ctx.def_use(source.clone())?;
route.insert(expr_info.fid, Arc::new(TypeScheme::default()));
let mut type_checker = TypeChecker {
// Retrieve def-use information for the source.
let root = expr_info.root.clone();
let mut checker = TypeChecker {
ctx,
source: source.clone(),
def_use_info,
ei: expr_info,
info: &mut info,
externals: HashMap::new(),
mode: InterpretMode::Markup,
route,
};
let lnk = LinkedNode::new(source.root());
let type_check_start = std::time::Instant::now();
type_checker.check(lnk);
checker.check(&root);
let elapsed = type_check_start.elapsed();
log::info!("Type checking on {:?} took {elapsed:?}", source.id());
log::debug!("Type checking on {:?} took {elapsed:?}", checker.ei.fid);
checker.route.remove(&checker.ei.fid);
Some(Arc::new(info))
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum InterpretMode {
Markup,
Code,
Math,
}
#[derive(BindTyCtx)]
#[bind(info)]
struct TypeChecker<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>,
source: Source,
def_use_info: Arc<DefUseInfo>,
struct TypeChecker<'a> {
ctx: Arc<SharedContext>,
ei: Arc<ExprInfo>,
info: &'a mut TypeScheme,
externals: HashMap<DefId, Option<Ty>>,
mode: InterpretMode,
route: &'a mut Processing<Arc<TypeScheme>>,
}
impl<'a, 'w> TyCtxMut for TypeChecker<'a, 'w> {
impl<'a> TyCtxMut for TypeChecker<'a> {
type Snap = <TypeScheme as TyCtxMut>::Snap;
fn start_scope(&mut self) -> Self::Snap {
@ -87,90 +85,84 @@ impl<'a, 'w> TyCtxMut for TypeChecker<'a, 'w> {
}
}
impl<'a, 'w> TypeChecker<'a, 'w> {
fn check(&mut self, root: LinkedNode) -> Ty {
let should_record = matches!(root.kind(), SyntaxKind::FuncCall).then(|| root.span());
let w = self.check_syntax(root).unwrap_or(Ty::undef());
if let Some(s) = should_record {
self.info.witness_at_least(s, w.clone());
}
w
impl<'a> TypeChecker<'a> {
fn check(&mut self, root: &Expr) -> Ty {
self.check_syntax(root).unwrap_or(Ty::undef())
}
fn get_def_id(&mut self, s: Span, r: &IdentRef) -> Option<DefId> {
self.def_use_info
.get_ref(r)
.or_else(|| Some(self.def_use_info.get_def(s.id()?, r)?.0))
}
fn copy_based_on(&mut self, fr: &TypeVarBounds, offset: u64, id: DefId) -> Ty {
let encoded = DefId((id.0 + 1) * 0x100_0000_0000 + offset + fr.id().0);
fn copy_doc_vars(
&mut self,
fr: &TypeVarBounds,
var: &Interned<TypeVar>,
base: &Interned<Decl>,
) -> Ty {
let mut gen_var = var.as_ref().clone();
let encoded = Interned::new(Decl::docs(base.clone(), var.clone()));
gen_var.def = encoded.clone();
log::debug!("copy var {fr:?} as {encoded:?}");
let bounds = TypeVarBounds::new(
TypeVar {
name: fr.name().clone(),
def: encoded,
},
fr.bounds.bounds().read().clone(),
);
let bounds = TypeVarBounds::new(gen_var, fr.bounds.bounds().read().clone());
let var = bounds.as_type();
self.info.vars.insert(encoded, bounds);
var
}
fn get_var(&mut self, root: &LinkedNode<'_>, ident: ast::Ident) -> Option<Interned<TypeVar>> {
let s = ident.span();
let r = to_ident_ref(root, ident)?;
let def_id = self.get_def_id(s, &r)?;
self.get_var_by_id(s, r.name.as_ref().into(), def_id)
}
fn get_var(&mut self, decl: &DeclExpr) -> Interned<TypeVar> {
log::debug!("get_var {decl:?}");
let entry = self.info.vars.entry(decl.clone()).or_insert_with(|| {
let name = decl.name().clone();
let decl = decl.clone();
fn get_var_by_id(
&mut self,
s: Span,
name: Interned<str>,
def_id: DefId,
) -> Option<Interned<TypeVar>> {
// todo: false positive of clippy
#[allow(clippy::map_entry)]
if !self.info.vars.contains_key(&def_id) {
let def = self.import_ty(def_id);
let init_expr = self.init_var(def);
self.info.vars.insert(
def_id,
TypeVarBounds::new(TypeVar { name, def: def_id }, init_expr),
);
// Check External variables
let init = decl.file_id().and_then(|fid| {
if fid == self.ei.fid {
return None;
}
log::debug!("import_ty {name} from {fid:?}");
let source = self.ctx.source_by_id(fid).ok()?;
let ext_def_use_info = self.ctx.expr_stage(&source);
// todo: check types in cycle
let ext_type_info = if let Some(route) = self.route.get(&source.id()) {
route.clone()
} else {
self.ctx.type_check_(&source, self.route)?
};
let ext_def = ext_def_use_info.exports.get(&name)?;
// todo: rest expressions
let def = match ext_def {
Expr::Decl(decl) => {
let ext_ty = ext_type_info.vars.get(decl)?.as_type();
if let Some(ext_docs) = ext_type_info.var_docs.get(decl) {
self.info.var_docs.insert(decl.clone(), ext_docs.clone());
}
ext_type_info.simplify(ext_ty, false)
}
_ => return None,
};
Some(ext_type_info.to_bounds(def))
});
TypeVarBounds::new(TypeVar { name, def: decl }, init.unwrap_or_default())
});
let var = entry.var.clone();
let s = decl.span();
if !s.is_detached() {
// todo: record decl types
// let should_record = matches!(root.kind(), SyntaxKind::FuncCall).then(||
// root.span());
// if let Some(s) = should_record {
// self.info.witness_at_least(s, w.clone());
// }
TypeScheme::witness_(s, Ty::Var(var.clone()), &mut self.info.mapping);
}
let var = self.info.vars.get(&def_id).unwrap().var.clone();
TypeScheme::witness_(s, Ty::Var(var.clone()), &mut self.info.mapping);
Some(var)
}
fn import_ty(&mut self, def_id: DefId) -> Option<Ty> {
if let Some(ty) = self.externals.get(&def_id) {
return ty.clone();
}
let (def_id, def_pos) = self.def_use_info.get_def_by_id(def_id)?;
if def_id == self.source.id() {
return None;
}
let source = self.ctx.source_by_id(def_id).ok()?;
let ext_def_use_info = self.ctx.def_use(source.clone())?;
let ext_type_info = self.ctx.type_check(&source)?;
let (ext_def_id, _) = ext_def_use_info.get_def(
def_id,
&IdentRef {
name: def_pos.name.clone(),
range: def_pos.range.clone(),
},
)?;
let ext_ty = ext_type_info.vars.get(&ext_def_id)?.as_type();
Some(ext_type_info.simplify(ext_ty, false))
var
}
fn constrain(&mut self, lhs: &Ty, rhs: &Ty) {
@ -355,11 +347,11 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
let rhs = if expected_in {
match container {
Ty::Tuple(elements) => Ty::Union(elements.clone()),
_ => Ty::Unary(TypeUnary::new(UnaryOp::ElementOf, container.into())),
_ => Ty::Unary(TypeUnary::new(UnaryOp::ElementOf, container.clone())),
}
} else {
// todo: remove not element of
Ty::Unary(TypeUnary::new(UnaryOp::NotElementOf, container.into()))
Ty::Unary(TypeUnary::new(UnaryOp::NotElementOf, container.clone()))
};
self.constrain(elem, &rhs);
@ -375,36 +367,6 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
}
}
fn init_var(&mut self, def: Option<Ty>) -> TypeBounds {
let mut store = TypeBounds::default();
let Some(def) = def else {
return store;
};
match def {
Ty::Var(v) => {
let w = self.info.vars.get(&v.def).unwrap();
match &w.bounds {
FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => {
let w = w.read();
store.lbs.extend(w.lbs.iter().cloned());
store.ubs.extend(w.ubs.iter().cloned());
}
}
}
Ty::Let(v) => {
store.lbs.extend(v.lbs.iter().cloned());
store.ubs.extend(v.ubs.iter().cloned());
}
_ => {
store.ubs.push(def);
}
}
store
}
fn weaken(&mut self, v: &Ty) {
match v {
Ty::Var(v) => {
@ -415,7 +377,7 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
Ty::Field(v) => {
self.weaken(&v.field);
}
Ty::Func(v) | Ty::Args(v) => {
Ty::Func(v) | Ty::Args(v) | Ty::Pattern(v) => {
for ty in v.inputs() {
self.weaken(ty);
}
@ -482,13 +444,11 @@ impl<'a, 'w> TypeChecker<'a, 'w> {
c.clone()
}
}
fn to_ident_ref(root: &LinkedNode, c: ast::Ident) -> Option<IdentRef> {
Some(IdentRef {
name: c.get().clone(),
range: root.find(c.span())?.range(),
})
fn check_defer(&mut self, expr: &DeferExpr) -> Ty {
let expr = self.ei.exprs.get(&expr.span).unwrap();
self.check(&expr.clone())
}
}
struct Joiner {
@ -546,6 +506,8 @@ impl Joiner {
(Ty::With(..), _) => self.definite = Ty::undef(),
(Ty::Args(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Args(w),
(Ty::Args(..), _) => self.definite = Ty::undef(),
(Ty::Pattern(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Pattern(w),
(Ty::Pattern(..), _) => self.definite = Ty::undef(),
(Ty::Select(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Select(w),
(Ty::Select(..), _) => self.definite = Ty::undef(),
(Ty::Unary(w), Ty::Builtin(BuiltinTy::None)) => self.definite = Ty::Unary(w),

View file

@ -7,18 +7,15 @@ use crate::{analysis::ApplyChecker, ty::ArgsTy};
#[derive(BindTyCtx)]
#[bind(base)]
pub struct ApplyTypeChecker<'a, 'b, 'w> {
pub(super) base: &'a mut TypeChecker<'b, 'w>,
pub struct ApplyTypeChecker<'a, 'b> {
pub(super) base: &'a mut TypeChecker<'b>,
pub call_site: Span,
pub call_raw_for_with: Option<Ty>,
pub args: Option<ast::Args<'a>>,
pub resultant: Vec<Ty>,
}
impl<'a, 'b, 'w> ApplyChecker for ApplyTypeChecker<'a, 'b, 'w> {
impl<'a, 'b> ApplyChecker for ApplyTypeChecker<'a, 'b> {
fn apply(&mut self, sig: Sig, args: &Interned<ArgsTy>, pol: bool) {
let _ = self.args;
let (sig, is_partialize) = match sig {
Sig::Partialize(sig) => (*sig, true),
sig => (sig, false),
@ -36,7 +33,7 @@ impl<'a, 'b, 'w> ApplyChecker for ApplyTypeChecker<'a, 'b, 'w> {
if *val == typst::foundations::Type::of::<typst::foundations::Type>() {
if let Some(p0) = args.pos(0) {
self.resultant
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.into())));
.push(Ty::Unary(TypeUnary::new(UnaryOp::TypeOf, p0.clone())));
}
}
}
@ -59,7 +56,6 @@ impl<'a, 'b, 'w> ApplyChecker for ApplyTypeChecker<'a, 'b, 'w> {
let mut mapper = ApplyTypeChecker {
base,
call_site: Span::detached(),
args: None,
call_raw_for_with: None,
resultant: vec![],
};
@ -75,7 +71,6 @@ impl<'a, 'b, 'w> ApplyChecker for ApplyTypeChecker<'a, 'b, 'w> {
let mut mapper = ApplyTypeChecker {
base,
call_site: Span::detached(),
args: None,
call_raw_for_with: None,
resultant: vec![],
};
@ -179,12 +174,12 @@ impl<T: FnMut(&mut TypeChecker, Sig, bool)> TupleCheckDriver for T {
#[derive(BindTyCtx)]
#[bind(base)]
pub struct TupleChecker<'a, 'b, 'w> {
pub(super) base: &'a mut TypeChecker<'b, 'w>,
pub struct TupleChecker<'a, 'b> {
pub(super) base: &'a mut TypeChecker<'b>,
driver: &'a mut dyn TupleCheckDriver,
}
impl<'a, 'b, 'w> ApplyChecker for TupleChecker<'a, 'b, 'w> {
impl<'a, 'b> ApplyChecker for TupleChecker<'a, 'b> {
fn apply(&mut self, sig: Sig, _args: &Interned<ArgsTy>, pol: bool) {
self.driver.check(self.base, sig, pol);
}

View file

@ -1,6 +1,6 @@
use super::*;
pub fn term_value(ctx: &mut AnalysisContext, value: &Value) -> Ty {
pub fn term_value(ctx: &Arc<SharedContext>, value: &Value) -> Ty {
match value {
Value::Array(a) => {
let values = a.iter().map(term_value_rec).collect::<Vec<_>>();
@ -12,14 +12,14 @@ pub fn term_value(ctx: &mut AnalysisContext, value: &Value) -> Ty {
// todo: create infer variables for plugin functions
let values = p
.iter()
.map(|k| (k.as_str().into(), Ty::Func(SigTy::any()), Span::detached()))
.map(|k| (k.as_str().into(), Ty::Func(SigTy::any())))
.collect();
Ty::Dict(RecordTy::new(values))
}
Value::Dict(d) => {
let values = d
.iter()
.map(|(k, v)| (k.as_str().into(), term_value_rec(v), Span::detached()))
.map(|(k, v)| (k.as_str().into(), term_value_rec(v)))
.collect();
Ty::Dict(RecordTy::new(values))
}
@ -27,7 +27,7 @@ pub fn term_value(ctx: &mut AnalysisContext, value: &Value) -> Ty {
let values = m
.scope()
.iter()
.map(|(k, v, _)| (k.into(), term_value_rec(v), Span::detached()))
.map(|(k, v, _)| (k.into(), term_value_rec(v)))
.collect();
Ty::Dict(RecordTy::new(values))
}

View file

@ -1,72 +1,19 @@
use std::sync::OnceLock;
use reflexo::TakeAs;
use typst::foundations::{IntoValue, Module, Str, Type};
use super::*;
use crate::{
adt::snapshot_map::SnapshotMap,
docs::{
convert_docs, identify_func_docs, identify_var_docs, DocStringKind, UntypedSymbolDocs,
VarDocsT,
},
syntax::{find_docs_of, get_non_strict_def_target},
};
use crate::syntax::DocString;
const DOC_VARS: u64 = 0;
impl<'a, 'w> TypeChecker<'a, 'w> {
pub fn check_var_docs(&mut self, root: &LinkedNode) -> Option<Arc<DocString>> {
let lb = root.cast::<ast::LetBinding>()?;
let first = lb.kind().bindings();
let documenting_id = first
.first()
.and_then(|n| self.get_def_id(n.span(), &to_ident_ref(root, *n)?))?;
self.check_docstring(root, DocStringKind::Variable, documenting_id)
impl<'a> TypeChecker<'a> {
pub fn check_docstring(&mut self, base_id: &Interned<Decl>) -> Option<Arc<DocString>> {
let docstring = self.ei.docstrings.get(base_id)?.clone();
Some(Arc::new(
docstring.take().rename_based_on(base_id.clone(), self),
))
}
pub fn check_docstring(
&mut self,
root: &LinkedNode,
kind: DocStringKind,
base_id: DefId,
) -> Option<Arc<DocString>> {
// todo: cache docs capture
// use parent of params, todo: reliable way to get the def target
let def = get_non_strict_def_target(root.clone())?;
let docs = find_docs_of(&self.source, def)?;
let docstring = self.ctx.compute_docstring(root.span().id()?, docs, kind)?;
Some(Arc::new(docstring.take().rename_based_on(base_id, self)))
}
}
/// The documentation string of an item
#[derive(Debug, Clone, Default)]
pub struct DocString {
/// The documentation of the item
pub docs: Option<EcoString>,
/// The typing on definitions
pub var_bounds: HashMap<DefId, TypeVarBounds>,
/// The variable doc associated with the item
pub vars: BTreeMap<StrRef, VarDoc>,
/// The type of the resultant type
pub res_ty: Option<Ty>,
}
impl DocString {
/// Get the documentation of a variable associated with the item
pub fn get_var(&self, name: &StrRef) -> Option<&VarDoc> {
self.vars.get(name)
}
/// Get the type of a variable associated with the item
pub fn var_ty(&self, name: &StrRef) -> Option<&Ty> {
self.get_var(name).and_then(|v| v.ty.as_ref())
}
fn rename_based_on(self, documenting_id: DefId, base: &mut TypeChecker) -> DocString {
fn rename_based_on(self, documenting_id: Interned<Decl>, base: &mut TypeChecker) -> DocString {
let DocString {
docs,
var_bounds,
@ -77,7 +24,6 @@ impl DocString {
base,
var_bounds: &var_bounds,
base_id: documenting_id,
offset: DOC_VARS,
};
let mut vars = vars;
for (_name, doc) in vars.iter_mut() {
@ -101,308 +47,19 @@ impl DocString {
}
}
/// The documentation string of a variable associated with some item.
#[derive(Debug, Clone, Default)]
pub struct VarDoc {
/// The documentation of the variable
pub docs: EcoString,
/// The type of the variable
pub ty: Option<Ty>,
struct IdRenamer<'a, 'b> {
base: &'a mut TypeChecker<'b>,
var_bounds: &'a HashMap<DeclExpr, TypeVarBounds>,
base_id: Interned<Decl>,
}
impl VarDoc {
/// Convert the variable doc to an untyped version
pub fn to_untyped(&self) -> Arc<UntypedSymbolDocs> {
Arc::new(UntypedSymbolDocs::Variable(VarDocsT {
docs: self.docs.clone(),
return_ty: (),
def_docs: OnceLock::new(),
}))
}
}
pub(crate) fn compute_docstring(
ctx: &AnalysisContext,
fid: TypstFileId,
docs: String,
kind: DocStringKind,
) -> Option<DocString> {
let checker = DocsChecker {
fid,
ctx,
vars: HashMap::new(),
globals: HashMap::default(),
locals: SnapshotMap::default(),
next_id: 0,
};
match kind {
DocStringKind::Function => checker.check_func_docs(docs),
DocStringKind::Variable => checker.check_var_docs(docs),
DocStringKind::Module => None,
DocStringKind::Constant => None,
DocStringKind::Struct => None,
DocStringKind::Reference => None,
}
}
struct DocsChecker<'a, 'w> {
fid: TypstFileId,
ctx: &'a AnalysisContext<'w>,
/// The typing on definitions
vars: HashMap<DefId, TypeVarBounds>,
globals: HashMap<EcoString, Option<Ty>>,
locals: SnapshotMap<EcoString, Ty>,
next_id: u32,
}
impl<'a, 'w> DocsChecker<'a, 'w> {
pub fn check_func_docs(mut self, docs: String) -> Option<DocString> {
let converted = convert_docs(self.ctx.world(), &docs).ok()?;
let converted = identify_func_docs(&converted).ok()?;
let module = self.ctx.module_by_str(docs)?;
let mut params = BTreeMap::new();
for param in converted.params.into_iter() {
params.insert(
param.name.into(),
VarDoc {
docs: param.docs,
ty: self.check_type_strings(&module, &param.types),
},
);
}
let res_ty = converted
.return_ty
.and_then(|ty| self.check_type_strings(&module, &ty));
Some(DocString {
docs: Some(converted.docs),
var_bounds: self.vars,
vars: params,
res_ty,
})
}
pub fn check_var_docs(mut self, docs: String) -> Option<DocString> {
let converted = convert_docs(self.ctx.world(), &docs).ok()?;
let converted = identify_var_docs(converted).ok()?;
let module = self.ctx.module_by_str(docs)?;
let res_ty = converted
.return_ty
.and_then(|ty| self.check_type_strings(&module, &ty.0));
Some(DocString {
docs: Some(converted.docs),
var_bounds: self.vars,
vars: BTreeMap::new(),
res_ty,
})
}
fn generate_var(&mut self, name: StrRef) -> Ty {
self.next_id += 1;
let encoded = DefId(self.next_id as u64);
log::debug!("generate var {name:?} {encoded:?}");
let bounds = TypeVarBounds::new(TypeVar { name, def: encoded }, TypeBounds::default());
let var = bounds.as_type();
self.vars.insert(encoded, bounds);
var
}
fn check_type_strings(&mut self, m: &Module, strs: &str) -> Option<Ty> {
let mut types = vec![];
for name in strs.split(",").map(|e| e.trim()) {
let Some(ty) = self.check_type_ident(m, name) else {
continue;
};
types.push(ty);
}
Some(Ty::from_types(types.into_iter()))
}
fn check_type_ident(&mut self, m: &Module, name: &str) -> Option<Ty> {
static TYPE_REPRS: LazyLock<HashMap<&'static str, Ty>> = LazyLock::new(|| {
let values = Vec::from_iter(
[
Value::None,
Value::Auto,
// Value::Bool(Default::default()),
Value::Int(Default::default()),
Value::Float(Default::default()),
Value::Length(Default::default()),
Value::Angle(Default::default()),
Value::Ratio(Default::default()),
Value::Relative(Default::default()),
Value::Fraction(Default::default()),
Value::Str(Default::default()),
]
.map(|v| v.ty())
.into_iter()
.chain([
Type::of::<typst::visualize::Color>(),
Type::of::<typst::visualize::Gradient>(),
Type::of::<typst::visualize::Pattern>(),
Type::of::<typst::symbols::Symbol>(),
Type::of::<typst::foundations::Version>(),
Type::of::<typst::foundations::Bytes>(),
Type::of::<typst::foundations::Label>(),
Type::of::<typst::foundations::Datetime>(),
Type::of::<typst::foundations::Duration>(),
Type::of::<typst::foundations::Content>(),
Type::of::<typst::foundations::Styles>(),
Type::of::<typst::foundations::Array>(),
Type::of::<typst::foundations::Dict>(),
Type::of::<typst::foundations::Func>(),
Type::of::<typst::foundations::Args>(),
Type::of::<typst::foundations::Type>(),
Type::of::<typst::foundations::Module>(),
]),
);
let shorts = values
.clone()
.into_iter()
.map(|ty| (ty.short_name(), Ty::Builtin(BuiltinTy::Type(ty))));
let longs = values
.into_iter()
.map(|ty| (ty.long_name(), Ty::Builtin(BuiltinTy::Type(ty))));
let builtins = [
("any", Ty::Any),
("bool", Ty::Boolean(None)),
("boolean", Ty::Boolean(None)),
("false", Ty::Boolean(Some(false))),
("true", Ty::Boolean(Some(true))),
];
HashMap::from_iter(shorts.chain(longs).chain(builtins))
});
let builtin_ty = TYPE_REPRS.get(name).cloned();
builtin_ty
.or_else(|| self.locals.get(name).cloned())
.or_else(|| self.check_type_annotation(m, name))
}
fn check_type_annotation(&mut self, m: &Module, name: &str) -> Option<Ty> {
if let Some(v) = self.globals.get(name) {
return v.clone();
}
let v = m.scope().get(name)?;
log::debug!("check doc type annotation: {name:?}");
if let Value::Content(c) = v {
let annotated = c.clone().unpack::<typst::text::RawElem>().ok()?;
let text = annotated.text().clone().into_value().cast::<Str>().ok()?;
let code = typst::syntax::parse_code(&text.as_str().replace('\'', "θ"));
let mut exprs = code.cast::<ast::Code>()?.exprs();
let ret = self.check_type_expr(m, exprs.next()?);
self.globals.insert(name.into(), ret.clone());
ret
} else {
None
}
}
fn check_type_expr(&mut self, m: &Module, s: ast::Expr) -> Option<Ty> {
log::debug!("check doc type expr: {s:?}");
match s {
ast::Expr::Ident(i) => self.check_type_ident(m, i.get().as_str()),
ast::Expr::FuncCall(c) => match c.callee() {
ast::Expr::Ident(i) => {
let name = i.get().as_str();
match name {
"array" => Some({
let ast::Arg::Pos(pos) = c.args().items().next()? else {
return None;
};
Ty::Array(self.check_type_expr(m, pos)?.into())
}),
"tag" => Some({
let ast::Arg::Pos(ast::Expr::Str(s)) = c.args().items().next()? else {
return None;
};
let pkg_id = PackageId::try_from(self.fid).ok();
Ty::Builtin(BuiltinTy::Tag(Box::new((
s.get().into(),
pkg_id.map(From::from),
))))
}),
_ => None,
}
}
_ => None,
},
ast::Expr::Closure(c) => {
log::debug!("check doc closure annotation: {c:?}");
let mut pos = vec![];
let mut named = BTreeMap::new();
let mut rest = None;
let snap = self.locals.snapshot();
let sig = None.or_else(|| {
for param in c.params().children() {
match param {
ast::Param::Pos(ast::Pattern::Normal(ast::Expr::Ident(i))) => {
let name = i.get().clone();
let base_ty = self.generate_var(name.as_str().into());
self.locals.insert(name, base_ty.clone());
pos.push(base_ty);
}
ast::Param::Pos(_) => {
pos.push(Ty::Any);
}
ast::Param::Named(e) => {
let exp = self.check_type_expr(m, e.expr()).unwrap_or(Ty::Any);
named.insert(e.name().into(), exp);
}
// todo: spread left/right
ast::Param::Spread(s) => {
let Some(i) = s.sink_ident() else {
continue;
};
let name = i.get().clone();
let rest_ty = self.generate_var(name.as_str().into());
self.locals.insert(name, rest_ty.clone());
rest = Some(rest_ty);
}
}
}
let body = self.check_type_expr(m, c.body())?;
let sig = SigTy::new(pos.into_iter(), named, None, rest, Some(body)).into();
Some(Ty::Func(sig))
});
self.locals.rollback_to(snap);
sig
}
ast::Expr::Dict(d) => {
log::debug!("check doc dict annotation: {d:?}");
None
}
_ => None,
}
}
}
struct IdRenamer<'a, 'b, 'w> {
base: &'a mut TypeChecker<'b, 'w>,
var_bounds: &'a HashMap<DefId, TypeVarBounds>,
base_id: DefId,
offset: u64,
}
impl<'a, 'b, 'w> TyMutator for IdRenamer<'a, 'b, 'w> {
impl<'a, 'b> TyMutator for IdRenamer<'a, 'b> {
fn mutate(&mut self, ty: &Ty, pol: bool) -> Option<Ty> {
match ty {
Ty::Var(v) => Some(self.base.copy_based_on(
Ty::Var(v) => Some(self.base.copy_doc_vars(
self.var_bounds.get(&v.def).unwrap(),
self.offset,
self.base_id,
v,
&self.base_id,
)),
ty => self.mutate_rec(ty, pol),
}

View file

@ -5,13 +5,13 @@ use crate::analysis::SelectChecker;
#[derive(BindTyCtx)]
#[bind(base)]
pub struct SelectFieldChecker<'a, 'b, 'w> {
pub(super) base: &'a mut TypeChecker<'b, 'w>,
pub struct SelectFieldChecker<'a, 'b> {
pub(super) base: &'a mut TypeChecker<'b>,
pub select_site: Span,
pub resultant: Vec<Ty>,
}
impl<'a, 'b, 'w> SelectChecker for SelectFieldChecker<'a, 'b, 'w> {
impl<'a, 'b> SelectChecker for SelectFieldChecker<'a, 'b> {
fn select(&mut self, iface: Iface, key: &Interned<str>, pol: bool) {
log::debug!("selecting field: {iface:?} {key:?}");
let _ = pol;

File diff suppressed because it is too large Load diff

View file

@ -32,7 +32,7 @@ fn convert_diagnostic(
let source = ctx.world().source(id)?;
lsp_range = diagnostic_range(&source, span, ctx.position_encoding());
} else {
uri = path_to_url(&ctx.root)?;
uri = path_to_url(&ctx.local.root)?;
lsp_range = LspRange::default();
};

View file

@ -16,6 +16,17 @@ use crate::docs::{file_id_repr, module_docs, symbol_docs, SymbolDocs, SymbolsInf
use crate::ty::Ty;
use crate::AnalysisContext;
/// Check Package.
pub fn check_package(ctx: &mut AnalysisContext, spec: &PackageInfo) -> StrResult<()> {
let toml_id = get_manifest_id(spec)?;
let manifest = get_manifest(ctx.world(), toml_id)?;
let entry_point = toml_id.join(&manifest.package.entrypoint);
ctx.shared_().preload_package(entry_point);
Ok(())
}
/// Generate full documents in markdown format
pub fn package_docs(
ctx: &mut AnalysisContext,
@ -30,6 +41,9 @@ pub fn package_docs(
let for_spec = toml_id.package().unwrap();
let entry_point = toml_id.join(&manifest.package.entrypoint);
ctx.preload_package(entry_point);
let SymbolsInfo { root, module_uses } = module_docs(ctx, entry_point)?;
log::debug!("module_uses: {module_uses:#?}");

View file

@ -7,7 +7,7 @@ use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use tinymist_world::base::{EntryState, ShadowApi, TaskInputs};
use tinymist_world::LspWorld;
use typst::foundations::{Bytes, Func, Value};
use typst::foundations::{Bytes, Value};
use typst::syntax::LinkedNode;
use typst::{
diag::StrResult,
@ -15,7 +15,7 @@ use typst::{
};
use super::tidy::*;
use crate::analysis::{ParamAttrs, ParamSpec};
use crate::analysis::{ParamAttrs, ParamSpec, Signature, ToFunc};
use crate::docs::library;
use crate::ty::{DocSource, Interned};
use crate::upstream::plain_docs_sentence;
@ -101,7 +101,8 @@ pub(crate) fn symbol_docs(
docs: Option<&str>,
doc_ty: Option<ShowTypeRepr>,
) -> Result<SymbolDocs, String> {
let signature = sym_value.and_then(|e| signature_docs(ctx, e, doc_ty));
let signature =
sym_value.and_then(|e| signature_docs(&ctx.signature_dyn(e.to_func()?), doc_ty));
if let Some(signature) = signature {
return Ok(SymbolDocs::Function(Box::new(signature)));
}
@ -344,12 +345,9 @@ pub(crate) fn variable_docs(ctx: &mut AnalysisContext, pos: &LinkedNode) -> Opti
}
pub(crate) fn signature_docs(
ctx: &mut AnalysisContext,
runtime_fn: &Value,
sig: &Signature,
mut doc_ty: Option<ShowTypeRepr>,
) -> Option<SignatureDocs> {
let func = runtime_fn.clone().cast::<Func>().ok()?;
let sig = ctx.signature_dyn(func.clone());
let type_sig = sig.type_sig().clone();
let pos_in = sig

View file

@ -235,7 +235,7 @@ impl<'a, 'w> DocumentMetricsWorker<'a, 'w> {
let font_info = std::mem::take(&mut self.font_info)
.into_iter()
.map(|(font, font_info_value)| {
let extra = self.ctx.resources.font_info(font.clone());
let extra = self.ctx.font_info(font.clone());
let info = &font.info();
DocumentFontInfo {
name: info.family.clone(),

View file

@ -1,6 +1,6 @@
---
source: crates/tinymist-query/src/completion.rs
description: Completion on > (262..263)
description: Completion on > (257..258)
expression: "JsonRepr::new_pure(results)"
input_file: crates/tinymist-query/src/fixtures/completion/bug_cite_function_infer.typ
---

View file

@ -14,6 +14,7 @@ input_file: crates/tinymist-query/src/fixtures/completion/complete_purely_label.
"labelDetails": {
"description": "H2"
},
"sortText": "001",
"textEdit": {
"newText": "test>",
"range": {

View file

@ -1,19 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/base.typ
---
{
"x@33..34@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "x",
"range": "33:34"
},
"refs": [
"x@41..42"
]
}
}

View file

@ -1,17 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/base2.typ
---
{
"y@27..28@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "y",
"range": "27:28"
},
"refs": []
}
}

View file

@ -1,31 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/for_loop.typ
---
{
"i@18..19@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "i",
"range": "18:19"
},
"refs": [
"i@28..29"
]
},
"i@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "i",
"range": "5:6"
},
"refs": [
"i@40..41"
]
}
}

View file

@ -1,32 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias.typ
---
{
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@20..24",
"base@52..56"
]
},
"foo@47..50@s0.typ": {
"def": {
"kind": {
"Mod": "ModuleAlias"
},
"name": "foo",
"range": "47:50"
},
"refs": [
"foo@59..62"
]
}
}

View file

@ -1,46 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
---
{
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@20..24",
"base@62..66"
]
},
"foo@47..50@s0.typ": {
"def": {
"kind": {
"Mod": "ModuleAlias"
},
"name": "foo",
"range": "47:50"
},
"refs": []
},
"foo@57..60@s0.typ": {
"def": {
"kind": {
"Mod": {
"Alias": {
"target": "x@52..53"
}
}
},
"name": "foo",
"range": "57:60"
},
"refs": [
"foo@69..72"
]
}
}

View file

@ -1,32 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident.typ
---
{
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@20..24",
"base@48..52"
]
},
"x@45..46@s0.typ": {
"def": {
"kind": {
"Mod": "Ident"
},
"name": "x",
"range": "45:46"
},
"refs": [
"x@55..56"
]
}
}

View file

@ -1,36 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
---
{
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@20..24",
"base@55..59"
]
},
"foo@50..53@s0.typ": {
"def": {
"kind": {
"Mod": {
"Alias": {
"target": "x@45..46"
}
}
},
"name": "foo",
"range": "50:53"
},
"refs": [
"foo@62..65"
]
}
}

View file

@ -1,32 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_star.typ
---
{
"base@8..18@s1.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@20..24",
"base@48..52"
]
},
"x@5..6@base.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "x",
"range": "5:6"
},
"refs": [
"x@55..56"
]
}
}

View file

@ -1,31 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_star_recursive.typ
---
{
"x@5..6@base.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "x",
"range": "5:6"
},
"refs": [
"x@24..25"
]
},
"y@27..28@base2.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "y",
"range": "27:28"
},
"refs": [
"y@28..29"
]
}
}

View file

@ -1,31 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/param_scope.typ
---
{
"term@10..14@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "term",
"range": "10:14"
},
"refs": [
"term@18..22"
]
},
"term@5..9@s0.typ": {
"def": {
"kind": {
"Var": "Function"
},
"name": "term",
"range": "5:9"
},
"refs": [
"term@25..29"
]
}
}

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/base.typ
---
= docstings
Pattern(..)@19..20 in /s0.typ -> DocString { docs: Some("This is X."), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/blocky.typ
---
= docstings
Pattern(..)@21..22 in /s0.typ -> DocString { docs: Some("This is X"), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/blocky2.typ
---
= docstings
Pattern(..)@41..42 in /s0.typ -> DocString { docs: Some("This is X\nNote: This is not Y"), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/multiple_line.typ
---
= docstings
Pattern(..)@43..44 in /s0.typ -> DocString { docs: Some("This is X.\nNote: this is not Y."), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,6 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/no_comment.typ
---
= docstings

View file

@ -1,6 +1,6 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/not_attach.typ
---
<nil>
= docstings

View file

@ -1,6 +1,6 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/not_attach2.typ
---
<nil>
= docstings

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/param.typ
---
= docstings
Func(f)@20..21 in /s0.typ -> DocString { docs: Some("Docs for f."), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/param_in_init.typ
---
= docstings
Func(f)@20..21 in /s0.typ -> DocString { docs: Some("Docs for f."), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -0,0 +1,7 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/docs/raw.typ
---
= docstings
Func(f)@109..110 in /s0.typ -> DocString { docs: Some(" Docs for f.\n\n ```typst\n #show raw: it => it {\n it\n }\n ```"), var_bounds: {}, vars: {}, res_ty: None }

View file

@ -1,6 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/base.typ
---
This is X.

View file

@ -1,6 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/blocky.typ
---
This is X

View file

@ -1,7 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/blocky2.typ
---
This is X
Note: This is not Y

View file

@ -1,7 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/multiple_line.typ
---
This is X.
Note: this is not Y.

View file

@ -1,6 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "result.as_deref().unwrap_or(\"<nil>\")"
input_file: crates/tinymist-query/src/fixtures/document/no_comment.typ
---
<nil>

View file

@ -1,6 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/param.typ
---
<nil>

View file

@ -1,6 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/param_in_init.typ
---
<nil>

View file

@ -1,12 +0,0 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: result
input_file: crates/tinymist-query/src/fixtures/docs/raw.typ
---
Docs for f.
```typst
#show raw: it => it {
it
}
```

View file

@ -0,0 +1,2 @@
#let (a, b) = (1, 1);
#let (a, b) = (b, a);

View file

@ -0,0 +1,5 @@
#let z = 1;
#let x = (
y: z,
"1 2": z,
)

View file

@ -0,0 +1,2 @@
#let x = 1;
#let f(a) = a;

View file

@ -0,0 +1,2 @@
#import "base.typ"
#base

View file

@ -0,0 +1,9 @@
// path: base.typ
#let f() = 1;
-----
// path: derive.typ
#import "base.typ"
-----
#import "derive.typ": *
#import base: *
#f()

View file

@ -0,0 +1,2 @@
#let x = 1;
#let x = x;

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/base.typ
---
= resolves
Var(x)@33..34 in /s0.typ -> Var(x)@33..34 in /s0.typ, root Var(x)@33..34 in /s0.typ, val: None
IdentRef(x)@41..42 in /s0.typ -> Var(x)@33..34 in /s0.typ, root Var(x)@33..34 in /s0.typ, val: None
= exports
x -> Var(x)@33..34 in /s0.typ

View file

@ -0,0 +1,11 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/base2.typ
---
= resolves
ModuleImport(..)@1..21 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
Var(y)@27..28 in /s0.typ -> Var(y)@27..28 in /s0.typ, root Var(y)@27..28 in /s0.typ, val: None
= exports
y -> Var(y)@27..28 in /s0.typ

View file

@ -0,0 +1,15 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/destructing.typ
---
= resolves
Var(a)@6..7 in /s0.typ -> Var(a)@6..7 in /s0.typ, root Var(a)@6..7 in /s0.typ, val: None
Var(a)@28..29 in /s0.typ -> Var(a)@28..29 in /s0.typ, root Var(a)@28..29 in /s0.typ, val: None
IdentRef(a)@40..41 in /s0.typ -> Var(a)@6..7 in /s0.typ, root Var(a)@6..7 in /s0.typ, val: None
Var(b)@9..10 in /s0.typ -> Var(b)@9..10 in /s0.typ, root Var(b)@9..10 in /s0.typ, val: None
Var(b)@31..32 in /s0.typ -> Var(b)@31..32 in /s0.typ, root Var(b)@31..32 in /s0.typ, val: None
IdentRef(b)@37..38 in /s0.typ -> Var(b)@9..10 in /s0.typ, root Var(b)@9..10 in /s0.typ, val: None
= exports
a -> Var(a)@28..29 in /s0.typ
b -> Var(b)@31..32 in /s0.typ

View file

@ -0,0 +1,13 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/dict.typ
---
= resolves
Var(x)@17..18 in /s0.typ -> Var(x)@17..18 in /s0.typ, root Var(x)@17..18 in /s0.typ, val: None
Var(z)@5..6 in /s0.typ -> Var(z)@5..6 in /s0.typ, root Var(z)@5..6 in /s0.typ, val: None
IdentRef(z)@28..29 in /s0.typ -> Var(z)@5..6 in /s0.typ, root Var(z)@5..6 in /s0.typ, val: None
IdentRef(z)@40..41 in /s0.typ -> Var(z)@5..6 in /s0.typ, root Var(z)@5..6 in /s0.typ, val: None
= exports
x -> Var(x)@17..18 in /s0.typ
z -> Var(z)@5..6 in /s0.typ

View file

@ -0,0 +1,11 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/for_loop.typ
---
= resolves
Var(i)@5..6 in /s0.typ -> Var(i)@5..6 in /s0.typ, root Var(i)@5..6 in /s0.typ, val: None
Var(i)@18..19 in /s0.typ -> Var(i)@18..19 in /s0.typ, root Var(i)@18..19 in /s0.typ, val: None
IdentRef(i)@28..29 in /s0.typ -> Var(i)@18..19 in /s0.typ, root Var(i)@18..19 in /s0.typ, val: None
IdentRef(i)@40..41 in /s0.typ -> Var(i)@5..6 in /s0.typ, root Var(i)@5..6 in /s0.typ, val: None
= exports

View file

@ -0,0 +1,13 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/func.typ
---
= resolves
Var(a)@19..20 in /s0.typ -> Var(a)@19..20 in /s0.typ, root Var(a)@19..20 in /s0.typ, val: None
IdentRef(a)@24..25 in /s0.typ -> Var(a)@19..20 in /s0.typ, root Var(a)@19..20 in /s0.typ, val: None
Func(f)@17..18 in /s0.typ -> Func(f)@17..18 in /s0.typ, root Func(f)@17..18 in /s0.typ, val: None
Var(x)@5..6 in /s0.typ -> Var(x)@5..6 in /s0.typ, root Var(x)@5..6 in /s0.typ, val: None
= exports
f -> Func(f)@17..18 in /s0.typ
x -> Var(x)@5..6 in /s0.typ

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import.typ
---
= resolves
PathStem(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))

View file

@ -0,0 +1,15 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_alias.typ
---
= resolves
PathStem(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@33..43 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@52..56 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ModuleAlias(foo)@47..50 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(foo)@59..62 in /s0.typ -> ModuleAlias(foo)@47..50 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
foo -> ref(ModuleAlias(foo), step = Module(base), root = Module(base))

View file

@ -0,0 +1,17 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_alias_both.typ
---
= resolves
PathStem(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@33..43 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@62..66 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ModuleAlias(foo)@47..50 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
ImportAlias(foo)@57..60 in /s0.typ -> Import(x)@52..53 in /s0.typ, root (ModuleAlias(foo)).IdentRef(x), val: None
IdentRef(foo)@69..72 in /s0.typ -> ImportAlias(foo)@57..60 in /s0.typ, root (ModuleAlias(foo)).IdentRef(x), val: None
Import(x)@52..53 in /s0.typ -> (ModuleAlias(foo)).IdentRef(x), root (ModuleAlias(foo)).IdentRef(x), val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
foo -> ref(ImportAlias(foo), step = Import(x), root = (ModuleAlias(foo)).IdentRef(x))

View file

@ -0,0 +1,13 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_by_ident.typ
---
= resolves
ModuleImport(..)@1..23 in /s2.typ -> Module(derive)@0..0 in /derive.typ, root Module(derive)@0..0 in /derive.typ, val: None
ModuleImport(..)@25..39 in /s2.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(derive)@8..20 in /s2.typ -> Module(derive)@0..0 in /derive.typ, root Module(derive)@0..0 in /derive.typ, val: None
IdentRef(f)@41..42 in /s2.typ -> Func(f)@0..0 in /base.typ, root Func(f)@0..0 in /base.typ, val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
f -> Func(f)@0..0 in /base.typ

View file

@ -0,0 +1,16 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_ident.typ
---
= resolves
ModuleImport(..)@26..46 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
PathStem(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@33..43 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@48..52 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
Import(x)@45..46 in /s0.typ -> (ModuleImport(..)).IdentRef(x), root (ModuleImport(..)).IdentRef(x), val: None
IdentRef(x)@55..56 in /s0.typ -> Import(x)@45..46 in /s0.typ, root (ModuleImport(..)).IdentRef(x), val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
x -> ref(Import(x), step = (ModuleImport(..)).IdentRef(x), root = (ModuleImport(..)).IdentRef(x))

View file

@ -0,0 +1,17 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_ident_alias.typ
---
= resolves
ModuleImport(..)@26..53 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
PathStem(base)@8..18 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@33..43 in /s0.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@55..59 in /s0.typ -> PathStem(base)@8..18 in /s0.typ, root Module(base)@0..0 in /base.typ, val: None
ImportAlias(foo)@50..53 in /s0.typ -> Import(x)@45..46 in /s0.typ, root (ModuleImport(..)).IdentRef(x), val: None
IdentRef(foo)@62..65 in /s0.typ -> ImportAlias(foo)@50..53 in /s0.typ, root (ModuleImport(..)).IdentRef(x), val: None
Import(x)@45..46 in /s0.typ -> (ModuleImport(..)).IdentRef(x), root (ModuleImport(..)).IdentRef(x), val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
foo -> ref(ImportAlias(foo), step = Import(x), root = (ModuleImport(..)).IdentRef(x))

View file

@ -0,0 +1,15 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_star.typ
---
= resolves
ModuleImport(..)@26..46 in /s1.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
PathStem(base)@8..18 in /s1.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@20..24 in /s1.typ -> PathStem(base)@8..18 in /s1.typ, root Module(base)@0..0 in /base.typ, val: None
ImportPath(base)@33..43 in /s1.typ -> Module(base)@0..0 in /base.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(base)@48..52 in /s1.typ -> PathStem(base)@8..18 in /s1.typ, root Module(base)@0..0 in /base.typ, val: None
IdentRef(x)@55..56 in /s1.typ -> Var(x)@0..0 in /base.typ, root Var(x)@0..0 in /base.typ, val: None
= exports
base -> ref(PathStem(base), step = Module(base), root = Module(base))
x -> Var(x)@0..0 in /base.typ

View file

@ -0,0 +1,13 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/import_star_recursive.typ
---
= resolves
ModuleImport(..)@1..22 in /s2.typ -> Module(base2)@0..0 in /base2.typ, root Module(base2)@0..0 in /base2.typ, val: None
ImportPath(base2)@8..19 in /s2.typ -> Module(base2)@0..0 in /base2.typ, root Module(base2)@0..0 in /base2.typ, val: None
IdentRef(x)@24..25 in /s2.typ -> Var(x)@0..0 in /base.typ, root Var(x)@0..0 in /base.typ, val: None
IdentRef(y)@28..29 in /s2.typ -> Var(y)@0..0 in /base2.typ, root Var(y)@0..0 in /base2.typ, val: None
= exports
x -> Var(x)@0..0 in /base.typ
y -> Var(y)@0..0 in /base2.typ

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/param_scope.typ
---
= resolves
Func(term)@5..9 in /s0.typ -> Func(term)@5..9 in /s0.typ, root Func(term)@5..9 in /s0.typ, val: None
Var(term)@10..14 in /s0.typ -> Var(term)@10..14 in /s0.typ, root Var(term)@10..14 in /s0.typ, val: None
IdentRef(term)@18..22 in /s0.typ -> Var(term)@10..14 in /s0.typ, root Var(term)@10..14 in /s0.typ, val: None
IdentRef(term)@25..29 in /s0.typ -> Func(term)@5..9 in /s0.typ, root Func(term)@5..9 in /s0.typ, val: None
= exports
term -> Func(term)@5..9 in /s0.typ

View file

@ -0,0 +1,11 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "snap.join(\"\\n\")"
input_file: crates/tinymist-query/src/fixtures/expr_of/redefine.typ
---
= resolves
Var(x)@5..6 in /s0.typ -> Var(x)@5..6 in /s0.typ, root Var(x)@5..6 in /s0.typ, val: None
Var(x)@17..18 in /s0.typ -> Var(x)@17..18 in /s0.typ, root Var(x)@17..18 in /s0.typ, val: None
IdentRef(x)@21..22 in /s0.typ -> Var(x)@5..6 in /s0.typ, root Var(x)@5..6 in /s0.typ, val: None
= exports
x -> Var(x)@17..18 in /s0.typ

View file

@ -0,0 +1 @@
#(/* position after */ str);

View file

@ -0,0 +1,6 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/builtin.typ
---
null

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_alias.typ
[
{
"originSelectionRange": "1:23:1:26",
"targetRange": "0:25:0:28",
"targetSelectionRange": "0:25:0:28",
"targetUri": "s1.typ"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6",
"targetUri": "base.typ"
}
]

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_new_name.t
[
{
"originSelectionRange": "1:23:1:24",
"targetRange": "0:22:0:23",
"targetSelectionRange": "0:22:0:23",
"targetUri": "s1.typ"
"targetRange": "0:0:0:0",
"targetSelectionRange": "0:0:0:0",
"targetUri": "base.typ"
}
]

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_package.ty
[
{
"originSelectionRange": "1:20:1:27",
"targetRange": "0:8:0:32",
"targetSelectionRange": "0:8:0:32",
"targetUri": "base.typ"
"targetRange": "0:0:0:0",
"targetSelectionRange": "0:0:0:0",
"targetUri": "lib.typ"
}
]

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_package_se
[
{
"originSelectionRange": "1:20:1:27",
"targetRange": "0:8:0:32",
"targetSelectionRange": "0:8:0:32",
"targetUri": "base.typ"
"targetRange": "0:0:0:0",
"targetSelectionRange": "0:0:0:0",
"targetUri": "lib.typ"
}
]

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_self.typ
[
{
"originSelectionRange": "1:23:1:27",
"targetRange": "0:8:0:18",
"targetSelectionRange": "0:8:0:18",
"targetUri": "s1.typ"
"targetRange": "0:0:0:0",
"targetSelectionRange": "0:0:0:0",
"targetUri": "base.typ"
}
]

View file

@ -6,8 +6,8 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/inside_import_ali
[
{
"originSelectionRange": "2:43:2:45",
"targetRange": "2:43:2:45",
"targetSelectionRange": "2:43:2:45",
"targetUri": "s1.typ"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6",
"targetUri": "base.typ"
}
]

View file

@ -3,11 +3,4 @@ source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/module_select.typ
---
[
{
"originSelectionRange": "1:2:1:12",
"targetRange": "0:6:0:9",
"targetSelectionRange": "0:6:0:9",
"targetUri": "variable.typ"
}
]
null

View file

@ -3,11 +3,4 @@ source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/module_select_alias.typ
---
[
{
"originSelectionRange": "1:2:1:15",
"targetRange": "0:6:0:9",
"targetSelectionRange": "0:6:0:9",
"targetUri": "variable.typ"
}
]
null

View file

@ -4,4 +4,4 @@ description: "Check on \")\" (69)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/post_type_check/text_stroke2.typ
---
( ⪯ ( ⪰ Any ⪯ Stroke))
( ⪯ ( ⪯ Stroke))

View file

@ -4,4 +4,4 @@ description: "Check on \"(\" (48)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/post_type_check/text_stroke4.typ
---
( ⪯ ( ⪰ Any ⪯ Stroke))
( ⪯ ( ⪯ Stroke))

View file

@ -4,4 +4,4 @@ description: "Check on \",\" (83)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/post_type_check/user_func_pos.typ
---
( ⪯ ( ⪰ Any ⪯ Stroke))
( ⪯ ( ⪯ Stroke))

View file

@ -4,4 +4,4 @@ description: "Check on \")\" (98)"
expression: literal_type
input_file: crates/tinymist-query/src/fixtures/post_type_check/user_named.typ
---
( ⪯ ( ⪰ Any | None) & "font": Any)
( ⪯ None & "font": Any)

Some files were not shown because too many files have changed in this diff Show more