dev: refactor def and use cache for improving definition (#179)

* chore: add a notes to do.

* dev: cache def and use information

* dev: move signature cache to analysis level

* dev: refactor a bit for improving definition

* dev: more appropriate definition discover

* fix: clippy error
This commit is contained in:
Myriad-Dreamin 2024-04-11 20:45:02 +08:00 committed by GitHub
parent ebed95cbcd
commit c22f70b49e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 1109 additions and 667 deletions

View file

@ -6,8 +6,15 @@ pub mod color_exprs;
pub use color_exprs::*;
pub mod def_use;
pub use def_use::*;
pub mod import;
pub use import::*;
pub mod linked_def;
pub use linked_def::*;
pub mod signature;
pub use signature::*;
pub mod track_values;
pub use track_values::*;
mod prelude;
mod global;
pub use global::*;
@ -118,13 +125,81 @@ mod document_tests {
#[cfg(test)]
mod lexical_hierarchy_tests {
use def_use::DefUseSnapshot;
use std::collections::HashMap;
use def_use::DefUseInfo;
use lexical_hierarchy::LexicalKind;
use reflexo::path::unix_slash;
use reflexo::vector::ir::DefId;
use crate::analysis::def_use;
// use crate::prelude::*;
use crate::syntax::lexical_hierarchy;
use crate::syntax::{lexical_hierarchy, IdentDef, IdentRef};
use crate::tests::*;
/// A snapshot of the def-use information for testing.
pub struct DefUseSnapshot<'a>(pub &'a DefUseInfo);
impl<'a> Serialize for DefUseSnapshot<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeMap;
// HashMap<IdentRef, DefId>
let mut references: HashMap<DefId, Vec<IdentRef>> = {
let mut map = HashMap::new();
for (k, v) in &self.0.ident_refs {
map.entry(*v).or_insert_with(Vec::new).push(k.clone());
}
map
};
// sort
for (_, v) in references.iter_mut() {
v.sort();
}
#[derive(Serialize)]
struct DefUseEntry<'a> {
def: &'a IdentDef,
refs: &'a Vec<IdentRef>,
}
let mut state = serializer.serialize_map(None)?;
for (k, (ident_ref, ident_def)) in self.0.ident_defs.as_slice().iter().enumerate() {
let id = DefId(k as u64);
let empty_ref = Vec::new();
let entry = DefUseEntry {
def: ident_def,
refs: references.get(&id).unwrap_or(&empty_ref),
};
state.serialize_entry(
&format!(
"{}@{}",
ident_ref.1,
unix_slash(ident_ref.0.vpath().as_rootless_path())
),
&entry,
)?;
}
if !self.0.undefined_refs.is_empty() {
let mut undefined_refs = self.0.undefined_refs.clone();
undefined_refs.sort();
let entry = DefUseEntry {
def: &IdentDef {
name: "<nil>".to_string(),
kind: LexicalKind::Block,
range: 0..0,
},
refs: &undefined_refs,
};
state.serialize_entry("<nil>", &entry)?;
}
state.end()
}
}
#[test]
fn scope() {
snapshot_testing("lexical_hierarchy", &|ctx, path| {

View file

@ -1,14 +1,9 @@
//! Hybrid analysis for function calls.
use core::fmt;
use std::borrow::Cow;
use ecow::{eco_format, eco_vec};
use typst::{
foundations::{Args, CastInfo, Closure},
syntax::SyntaxNode,
util::LazyHash,
};
use ecow::eco_vec;
use typst::{foundations::Args, syntax::SyntaxNode};
use super::{analyze_signature, ParamSpec, Signature};
use crate::prelude::*;
/// Describes kind of a parameter.
@ -44,14 +39,23 @@ pub struct CallInfo {
pub arg_mapping: HashMap<SyntaxNode, CallParamInfo>,
}
// todo: cache call
/// Analyzes a function call.
#[comemo::memoize]
pub fn analyze_call(func: Func, args: ast::Args<'_>) -> Option<Arc<CallInfo>> {
Some(Arc::new(analyze_call_no_cache(func, args)?))
pub fn analyze_call(
ctx: &mut AnalysisContext,
callee_node: LinkedNode,
args: ast::Args<'_>,
) -> Option<Arc<CallInfo>> {
Some(Arc::new(analyze_call_no_cache(ctx, callee_node, args)?))
}
/// Analyzes a function call without caching the result.
pub fn analyze_call_no_cache(func: Func, args: ast::Args<'_>) -> Option<CallInfo> {
pub fn analyze_call_no_cache(
ctx: &mut AnalysisContext,
callee_node: LinkedNode,
args: ast::Args<'_>,
) -> Option<CallInfo> {
let _ = ctx;
#[derive(Debug, Clone)]
enum ArgValue<'a> {
Instance(Args),
@ -60,6 +64,13 @@ pub fn analyze_call_no_cache(func: Func, args: ast::Args<'_>) -> Option<CallInfo
let mut with_args = eco_vec![ArgValue::Instantiating(args)];
let values = analyze_expr(ctx.world(), &callee_node);
let func = values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
})?;
log::debug!("got function {func:?}");
use typst::foundations::func::Repr;
let mut func = func;
while let Repr::With(f) = func.inner() {
@ -67,7 +78,7 @@ pub fn analyze_call_no_cache(func: Func, args: ast::Args<'_>) -> Option<CallInfo
func = f.0.clone();
}
let signature = analyze_signature(func);
let signature = analyze_signature(ctx, func);
trace!("got signature {signature:?}");
let mut info = CallInfo {
@ -210,234 +221,3 @@ pub fn analyze_call_no_cache(func: Func, args: ast::Args<'_>) -> Option<CallInfo
Some(info)
}
/// Describes a function parameter.
#[derive(Debug, Clone)]
pub struct ParamSpec {
/// The parameter's name.
pub name: Cow<'static, str>,
/// Documentation for the parameter.
pub docs: Cow<'static, str>,
/// Describe what values this parameter accepts.
pub input: CastInfo,
/// The parameter's default name as type.
pub type_repr: Option<EcoString>,
/// The parameter's default name as value.
pub expr: Option<EcoString>,
/// Creates an instance of the parameter's default value.
pub default: Option<fn() -> Value>,
/// Is the parameter positional?
pub positional: bool,
/// Is the parameter named?
///
/// Can be true even if `positional` is true if the parameter can be given
/// in both variants.
pub named: bool,
/// Can the parameter be given any number of times?
pub variadic: bool,
/// Is the parameter settable with a set rule?
pub settable: bool,
}
impl ParamSpec {
fn from_static(s: &ParamInfo) -> Arc<Self> {
Arc::new(Self {
name: Cow::Borrowed(s.name),
docs: Cow::Borrowed(s.docs),
input: s.input.clone(),
type_repr: Some(eco_format!("{}", TypeExpr(&s.input))),
expr: None,
default: s.default,
positional: s.positional,
named: s.named,
variadic: s.variadic,
settable: s.settable,
})
}
}
/// Describes a function signature.
#[derive(Debug, Clone)]
pub struct Signature {
/// The positional parameters.
pub pos: Vec<Arc<ParamSpec>>,
/// The named parameters.
pub named: HashMap<Cow<'static, str>, Arc<ParamSpec>>,
/// Whether the function has fill, stroke, or size parameters.
pub has_fill_or_size_or_stroke: bool,
/// The rest parameter.
pub rest: Option<Arc<ParamSpec>>,
_broken: bool,
}
#[comemo::memoize]
pub(crate) fn analyze_signature(func: Func) -> Arc<Signature> {
use typst::foundations::func::Repr;
let params = match func.inner() {
Repr::With(..) => unreachable!(),
Repr::Closure(c) => analyze_closure_signature(c.clone()),
Repr::Element(..) | Repr::Native(..) => {
let params = func.params().unwrap();
params.iter().map(ParamSpec::from_static).collect()
}
};
let mut pos = vec![];
let mut named = HashMap::new();
let mut rest = None;
let mut broken = false;
let mut has_fill = false;
let mut has_stroke = false;
let mut has_size = false;
for param in params.into_iter() {
if param.named {
match param.name.as_ref() {
"fill" => {
has_fill = true;
}
"stroke" => {
has_stroke = true;
}
"size" => {
has_size = true;
}
_ => {}
}
named.insert(param.name.clone(), param.clone());
}
if param.variadic {
if rest.is_some() {
broken = true;
} else {
rest = Some(param.clone());
}
}
if param.positional {
pos.push(param);
}
}
Arc::new(Signature {
pos,
named,
rest,
has_fill_or_size_or_stroke: has_fill || has_stroke || has_size,
_broken: broken,
})
}
fn analyze_closure_signature(c: Arc<LazyHash<Closure>>) -> Vec<Arc<ParamSpec>> {
let mut params = vec![];
trace!("closure signature for: {:?}", c.node.kind());
let closure = &c.node;
let closure_ast = match closure.kind() {
SyntaxKind::Closure => closure.cast::<ast::Closure>().unwrap(),
_ => return params,
};
for param in closure_ast.params().children() {
match param {
ast::Param::Pos(ast::Pattern::Placeholder(..)) => {
params.push(Arc::new(ParamSpec {
name: Cow::Borrowed("_"),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: true,
named: false,
variadic: false,
settable: false,
docs: Cow::Borrowed(""),
}));
}
ast::Param::Pos(e) => {
// todo: destructing
let name = e.bindings();
if name.len() != 1 {
continue;
}
let name = name[0].as_str();
params.push(Arc::new(ParamSpec {
name: Cow::Owned(name.to_owned()),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: true,
named: false,
variadic: false,
settable: false,
docs: Cow::Borrowed(""),
}));
}
// todo: pattern
ast::Param::Named(n) => {
let expr = unwrap_expr(n.expr()).to_untyped().clone().into_text();
params.push(Arc::new(ParamSpec {
name: Cow::Owned(n.name().as_str().to_owned()),
input: CastInfo::Any,
type_repr: Some(expr.clone()),
expr: Some(expr.clone()),
default: None,
positional: false,
named: true,
variadic: false,
settable: true,
docs: Cow::Owned("Default value: ".to_owned() + expr.as_str()),
}));
}
ast::Param::Spread(n) => {
let ident = n.sink_ident().map(|e| e.as_str());
params.push(Arc::new(ParamSpec {
name: Cow::Owned(ident.unwrap_or_default().to_owned()),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: false,
named: false,
variadic: true,
settable: false,
docs: Cow::Borrowed(""),
}));
}
}
}
params
}
fn unwrap_expr(mut e: ast::Expr) -> ast::Expr {
while let ast::Expr::Parenthesized(p) = e {
e = p.expr();
}
e
}
struct TypeExpr<'a>(&'a CastInfo);
impl<'a> fmt::Display for TypeExpr<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self.0 {
CastInfo::Any => "any",
CastInfo::Value(.., v) => v,
CastInfo::Type(v) => {
f.write_str(v.short_name())?;
return Ok(());
}
CastInfo::Union(v) => {
let mut values = v.iter().map(|e| TypeExpr(e).to_string());
f.write_str(&values.join(" | "))?;
return Ok(());
}
})
}
}

View file

@ -6,18 +6,12 @@ use std::{
sync::Arc,
};
use ecow::EcoVec;
use log::info;
use reflexo::path::unix_slash;
pub use reflexo::vector::ir::DefId;
use serde::Serialize;
use typst::syntax::FileId as TypstFileId;
use typst::syntax::Source;
use super::SearchCtx;
use crate::syntax::{
find_source_by_import_path, IdentRef, LexicalHierarchy, LexicalKind, LexicalVarKind, ModSrc,
};
use crate::{adt::snapshot_map::SnapshotMap, syntax::LexicalModKind};
use super::{prelude::*, ImportInfo};
use crate::adt::snapshot_map::SnapshotMap;
use crate::syntax::find_source_by_import_path;
/// The type namespace of def-use relations
///
@ -29,28 +23,18 @@ enum Ns {
Value,
}
/// A flat and transient reference to some symbol in a source file.
///
/// See [`IdentRef`] for definition of a "transient" reference.
#[derive(Serialize, Clone)]
pub struct IdentDef {
/// The name of the symbol.
pub name: String,
/// The kind of the symbol.
pub kind: LexicalKind,
/// The byte range of the symbol in the source file.
pub range: Range<usize>,
}
type ExternalRefMap = HashMap<(TypstFileId, Option<String>), Vec<(Option<DefId>, IdentRef)>>;
/// The def-use information of a source file.
#[derive(Default)]
pub struct DefUseInfo {
ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
/// The definitions of symbols.
pub ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
external_refs: ExternalRefMap,
ident_refs: HashMap<IdentRef, DefId>,
undefined_refs: Vec<IdentRef>,
/// The references to defined symbols.
pub ident_refs: HashMap<IdentRef, DefId>,
/// The references to undefined symbols.
pub undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
exports_defs: HashMap<String, DefId>,
}
@ -115,20 +99,13 @@ impl DefUseInfo {
}
}
pub(super) fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseInfo>> {
pub(super) fn get_def_use_inner(
ctx: &mut AnalysisContext,
source: Source,
e: EcoVec<LexicalHierarchy>,
_m: Arc<ImportInfo>,
) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
ctx.ctx.get_mut(current_id);
let c = ctx.ctx.get(current_id).unwrap();
if let Some(info) = c.def_use() {
return Some(info);
}
if !ctx.searched.insert(current_id) {
return None;
}
let e = ctx.ctx.def_use_lexical_hierarchy(source)?;
let mut collector = DefUseCollector {
ctx,
@ -142,16 +119,12 @@ pub(super) fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<A
collector.scan(&e);
collector.calc_exports();
let res = Some(Arc::new(collector.info));
let c = ctx.ctx.get(current_id).unwrap();
// todo: cyclic import cause no any information
c.compute_def_use(|| res.clone());
res
Some(Arc::new(collector.info))
}
struct DefUseCollector<'a, 'b, 'w> {
ctx: &'a mut SearchCtx<'b, 'w>,
struct DefUseCollector<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
@ -160,7 +133,7 @@ struct DefUseCollector<'a, 'b, 'w> {
ext_src: Option<Source>,
}
impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
impl<'a, 'w> DefUseCollector<'a, 'w> {
fn enter<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let id_snap = self.id_scope.snapshot();
let res = f(self);
@ -181,8 +154,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
let source = self.ext_src.as_ref()?;
log::debug!("import for def use: {:?}, name: {name}", source.id());
let (_, external_info) =
Some(source.id()).zip(get_def_use_inner(self.ctx, source.clone()))?;
let (_, external_info) = Some(source.id()).zip(self.ctx.def_use(source.clone()))?;
let ext_id = external_info.exports_defs.get(name)?;
self.import_from(&external_info, *ext_id);
@ -269,7 +241,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
ModSrc::Expr(_) => {}
ModSrc::Path(p) => {
let src = find_source_by_import_path(
self.ctx.ctx.world(),
self.ctx.world(),
self.current_id,
p.deref(),
);
@ -288,7 +260,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
if let Some(source) = &self.ext_src {
info!("diving source for def use: {:?}", source.id());
let (_, external_info) =
Some(source.id()).zip(get_def_use_inner(self.ctx, source.clone()))?;
Some(source.id()).zip(self.ctx.def_use(source.clone()))?;
for ext_id in &external_info.exports_refs {
self.import_from(&external_info, *ext_id);
@ -376,66 +348,3 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
);
}
}
/// A snapshot of the def-use information for testing.
pub struct DefUseSnapshot<'a>(pub &'a DefUseInfo);
impl<'a> Serialize for DefUseSnapshot<'a> {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
use serde::ser::SerializeMap;
// HashMap<IdentRef, DefId>
let mut references: HashMap<DefId, Vec<IdentRef>> = {
let mut map = HashMap::new();
for (k, v) in &self.0.ident_refs {
map.entry(*v).or_insert_with(Vec::new).push(k.clone());
}
map
};
// sort
for (_, v) in references.iter_mut() {
v.sort();
}
#[derive(Serialize)]
struct DefUseEntry<'a> {
def: &'a IdentDef,
refs: &'a Vec<IdentRef>,
}
let mut state = serializer.serialize_map(None)?;
for (k, (ident_ref, ident_def)) in self.0.ident_defs.as_slice().iter().enumerate() {
let id = DefId(k as u64);
let empty_ref = Vec::new();
let entry = DefUseEntry {
def: ident_def,
refs: references.get(&id).unwrap_or(&empty_ref),
};
state.serialize_entry(
&format!(
"{}@{}",
ident_ref.1,
unix_slash(ident_ref.0.vpath().as_rootless_path())
),
&entry,
)?;
}
if !self.0.undefined_refs.is_empty() {
let mut undefined_refs = self.0.undefined_refs.clone();
undefined_refs.sort();
let entry = DefUseEntry {
def: &IdentDef {
name: "<nil>".to_string(),
kind: LexicalKind::Block,
range: 0..0,
},
refs: &undefined_refs,
};
state.serialize_entry("<nil>", &entry)?;
}
state.end()
}
}

View file

@ -1,3 +1,4 @@
use std::sync::atomic::AtomicBool;
use std::{
collections::{HashMap, HashSet},
hash::Hash,
@ -7,7 +8,10 @@ use std::{
use ecow::EcoVec;
use once_cell::sync::OnceCell;
use parking_lot::RwLock;
use reflexo::hash::hash128;
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
use typst::foundations;
use typst::{
diag::{eco_format, FileError, FileResult, PackageError},
syntax::{package::PackageSpec, Source, Span, VirtualPath},
@ -16,7 +20,7 @@ use typst::{
use typst::{foundations::Value, syntax::ast, text::Font};
use typst::{layout::Position, syntax::FileId as TypstFileId};
use super::{get_def_use_inner, DefUseInfo};
use super::{DefUseInfo, ImportInfo, Signature};
use crate::{
lsp_to_typst,
syntax::{
@ -28,6 +32,7 @@ use crate::{
/// A cache for module-level analysis results of a module.
///
/// You should not holds across requests, because source code may change.
#[derive(Default)]
pub struct ModuleAnalysisCache {
source: OnceCell<FileResult<Source>>,
def_use: OnceCell<Option<Arc<DefUseInfo>>>,
@ -80,17 +85,25 @@ impl Analysis {
.map(|v| {
v.def_use_lexical_hierarchy
.output
.read()
.as_ref()
.map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum())
})
.sum::<usize>()
}
fn gc(&mut self) {
self.caches
.signatures
.retain(|_, (l, _, _)| (self.caches.lifetime - *l) < 30);
}
}
struct ComputingNode<Inputs, Output> {
name: &'static str,
inputs: Option<Inputs>,
output: Option<Output>,
computing: AtomicBool,
inputs: RwLock<Option<Inputs>>,
output: RwLock<Option<Output>>,
}
pub(crate) trait ComputeDebug {
@ -102,56 +115,96 @@ impl ComputeDebug for Source {
self.id()
}
}
impl ComputeDebug for EcoVec<LexicalHierarchy> {
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
self.len()
}
}
impl ComputeDebug for Arc<ImportInfo> {
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
self.imports.len()
}
}
impl<A, B> ComputeDebug for (A, B)
where
A: ComputeDebug,
B: ComputeDebug,
{
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
(self.0.compute_debug_repr(), self.1.compute_debug_repr())
}
}
impl<Inputs, Output> ComputingNode<Inputs, Output> {
fn new(name: &'static str) -> Self {
Self {
name,
inputs: None,
output: None,
computing: AtomicBool::new(false),
inputs: RwLock::new(None),
output: RwLock::new(None),
}
}
fn compute(
&mut self,
&self,
inputs: Inputs,
compute: impl FnOnce(Option<Inputs>, Inputs) -> Option<Output>,
) -> Option<Output>
) -> Result<Option<Output>, ()>
where
Inputs: ComputeDebug + Hash + Clone,
Output: Clone,
{
match &self.inputs {
if self
.computing
.swap(true, std::sync::atomic::Ordering::SeqCst)
{
return Err(());
}
let input_cmp = self.inputs.read();
let res = Ok(match input_cmp.as_ref() {
Some(s) if reflexo::hash::hash128(&inputs) == reflexo::hash::hash128(&s) => {
log::debug!(
"{}({:?}): hit cache",
self.name,
inputs.compute_debug_repr()
);
self.output.clone()
self.output.read().clone()
}
_ => {
s => {
let s = s.cloned();
drop(input_cmp);
log::info!("{}({:?}): compute", self.name, inputs.compute_debug_repr());
let output = compute(self.inputs.clone(), inputs.clone());
self.output = output.clone();
self.inputs = Some(inputs);
let output = compute(s, inputs.clone());
*self.output.write() = output.clone();
*self.inputs.write() = Some(inputs);
output
}
}
});
self.computing
.store(false, std::sync::atomic::Ordering::SeqCst);
res
}
}
/// A cache for module-level analysis results of a module.
///
/// You should not holds across requests, because source code may change.
#[allow(clippy::type_complexity)]
pub struct ModuleAnalysisGlobalCache {
def_use_lexical_hierarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>,
import: Arc<ComputingNode<EcoVec<LexicalHierarchy>, Arc<ImportInfo>>>,
def_use: Arc<ComputingNode<(EcoVec<LexicalHierarchy>, Arc<ImportInfo>), Arc<DefUseInfo>>>,
}
impl Default for ModuleAnalysisGlobalCache {
fn default() -> Self {
Self {
def_use_lexical_hierarchy: ComputingNode::new("def_use_lexical_hierarchy"),
import: Arc::new(ComputingNode::new("import")),
def_use: Arc::new(ComputingNode::new("def_use")),
}
}
}
@ -160,7 +213,32 @@ impl Default for ModuleAnalysisGlobalCache {
/// of a module.
#[derive(Default)]
pub struct AnalysisGlobalCaches {
lifetime: u64,
modules: HashMap<TypstFileId, ModuleAnalysisGlobalCache>,
signatures: HashMap<u128, (u64, foundations::Func, Arc<Signature>)>,
}
impl AnalysisGlobalCaches {
/// Get the signature of a function.
pub fn signature(&self, func: foundations::Func) -> Option<Arc<Signature>> {
self.signatures
.get(&hash128(&func))
.and_then(|(_, cached_func, s)| (func == *cached_func).then_some(s.clone()))
}
/// Compute the signature of a function.
pub fn compute_signature(
&mut self,
func: foundations::Func,
compute: impl FnOnce() -> Arc<Signature>,
) -> Arc<Signature> {
let key = hash128(&func);
self.signatures
.entry(key)
.or_insert_with(|| (self.lifetime, func, compute()))
.2
.clone()
}
}
/// A cache for all level of analysis results of a module.
@ -219,6 +297,9 @@ impl<'w> AnalysisContext<'w> {
/// Create a new analysis context with borrowing the analysis data.
pub fn new_borrow(resources: &'w dyn AnalysisResources, a: &'w mut Analysis) -> Self {
a.caches.lifetime += 1;
a.gc();
Self {
resources,
analysis: CowMut::Borrowed(a),
@ -300,16 +381,7 @@ impl<'w> AnalysisContext<'w> {
/// Get the module-level analysis cache of a file.
pub fn get_mut(&mut self, file_id: TypstFileId) -> &ModuleAnalysisCache {
self.caches.modules.entry(file_id).or_insert_with(|| {
let source = OnceCell::new();
let def_use = OnceCell::new();
ModuleAnalysisCache { source, def_use }
})
}
/// Get the def-use information of a source file.
pub fn def_use(&mut self, source: Source) -> Option<Arc<DefUseInfo>> {
get_def_use_inner(&mut self.fork_for_search(), source)
self.caches.modules.entry(file_id).or_default()
}
/// Fork a new context for searching in the workspace.
@ -346,19 +418,53 @@ impl<'w> AnalysisContext<'w> {
typst_to_lsp::range(position, src, self.analysis.position_encoding)
}
pub(crate) fn def_use_lexical_hierarchy(
&mut self,
source: Source,
) -> Option<EcoVec<LexicalHierarchy>> {
self.analysis
.caches
.modules
.entry(source.id())
.or_default()
/// Get the def-use information of a source file.
pub fn def_use(&mut self, source: Source) -> Option<Arc<DefUseInfo>> {
let fid = source.id();
if let Some(res) = self.caches.modules.entry(fid).or_default().def_use() {
return Some(res);
}
let cache = self.at_module(fid);
let l = cache
.def_use_lexical_hierarchy
.compute(source, |_before, after| {
.compute(source.clone(), |_before, after| {
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
})
.ok()
.flatten()?;
let source2 = source.clone();
let m = cache
.import
.clone()
.compute(l.clone(), |_before, after| {
crate::analysis::get_import_info(self, source2, after)
})
.ok()
.flatten()?;
let cache = self.at_module(fid);
let res = cache
.def_use
.clone()
.compute((l, m), |_before, after| {
crate::analysis::get_def_use_inner(self, source, after.0, after.1)
})
.ok()
.flatten();
self.caches
.modules
.entry(fid)
.or_default()
.compute_def_use(|| res.clone());
res
}
fn at_module(&mut self, fid: TypstFileId) -> &mut ModuleAnalysisGlobalCache {
self.analysis.caches.modules.entry(fid).or_default()
}
pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {

View file

@ -0,0 +1,84 @@
//! Import analysis
use ecow::EcoVec;
use crate::syntax::find_source_by_import_path;
pub use super::prelude::*;
/// The import information of a source file.
#[derive(Default)]
pub struct ImportInfo {
/// The source file that this source file imports.
pub imports: indexmap::IndexMap<Range<usize>, Option<Source>>,
}
impl Hash for ImportInfo {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_usize(self.imports.len());
for item in &self.imports {
item.hash(state);
}
}
}
pub(super) fn get_import_info(
ctx: &mut AnalysisContext,
source: Source,
e: EcoVec<LexicalHierarchy>,
) -> Option<Arc<ImportInfo>> {
let current_id = source.id();
let mut collector = ImportCollector {
ctx,
info: ImportInfo::default(),
current_id,
};
collector.scan(&e);
Some(Arc::new(collector.info))
}
struct ImportCollector<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>,
info: ImportInfo,
current_id: TypstFileId,
}
impl<'a, 'w> ImportCollector<'a, 'w> {
fn scan(&mut self, e: &'a [LexicalHierarchy]) {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(..) => {}
LexicalKind::Block => {
if let Some(e) = &e.children {
self.scan(e.as_slice());
}
}
LexicalKind::Mod(
LexicalModKind::PathInclude
| LexicalModKind::PathVar
| LexicalModKind::ModuleAlias
| LexicalModKind::Ident
| LexicalModKind::Alias { .. }
| LexicalModKind::Star,
) => {}
LexicalKind::Mod(LexicalModKind::Module(p)) => match p {
ModSrc::Expr(_) => {}
ModSrc::Path(p) => {
let src = find_source_by_import_path(
self.ctx.world(),
self.current_id,
p.deref(),
);
self.info.imports.insert(e.info.range.clone(), src);
}
},
}
}
}
}

View file

@ -0,0 +1,283 @@
//! Linked definition analysis
use std::ops::Range;
use log::debug;
use typst::syntax::FileId as TypstFileId;
use typst::{foundations::Value, syntax::Span};
use super::prelude::*;
use crate::{
prelude::*,
syntax::{
find_source_by_expr, get_deref_target, DerefTarget, IdentRef, LexicalKind, LexicalModKind,
LexicalVarKind,
},
};
/// A linked definition in the source code
pub struct DefinitionLink {
/// The kind of the definition.
pub kind: LexicalKind,
/// A possible instance of the definition.
pub value: Option<Value>,
/// The name of the definition.
pub name: String,
/// The location of the definition.
pub def_at: Option<(TypstFileId, Range<usize>)>,
/// The range of the name of the definition.
pub name_range: Option<Range<usize>>,
}
// todo: field definition
/// Finds the definition of a symbol.
pub fn find_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
deref_target: DerefTarget<'_>,
) -> Option<DefinitionLink> {
let source_id = source.id();
let use_site = match deref_target {
// todi: field access
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => node,
// todo: better support (rename import path?)
DerefTarget::ImportPath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let import_node = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_expr(ctx.world(), def_fid, import_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name: String::new(),
value: None,
def_at: Some((source.id(), LinkedNode::new(source.root()).range())),
name_range: None,
});
}
DerefTarget::IncludePath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let include_node = parent.cast::<ast::ModuleInclude>()?;
let source = find_source_by_expr(ctx.world(), def_fid, include_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathInclude),
name: String::new(),
value: None,
def_at: Some((source.id(), (LinkedNode::new(source.root())).range())),
name_range: None,
});
}
};
// syntactic definition
let def_use = ctx.def_use(source)?;
let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::MathIdent(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::FieldAccess(..) => {
debug!("find field access");
return None;
}
_ => {
debug!("unsupported kind {kind:?}", kind = use_site.kind());
return None;
}
};
let def_id = def_use.get_ref(&ident_ref);
let def_id = def_id.or_else(|| Some(def_use.get_def(source_id, &ident_ref)?.0));
let def_info = def_id.and_then(|def_id| def_use.get_def_by_id(def_id));
let values = analyze_expr(ctx.world(), &use_site);
for v in values {
// mostly builtin functions
if let Value::Func(f) = v.0 {
use typst::foundations::func::Repr;
match f.inner() {
// The with function should be resolved as the with position
Repr::Closure(..) | Repr::With(..) => continue,
Repr::Native(..) | Repr::Element(..) => {}
}
let name = f
.name()
.or_else(|| def_info.as_ref().map(|(_, r)| r.name.as_str()));
if let Some(name) = name {
let span = f.span();
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
return Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
name: name.to_owned(),
value: Some(Value::Func(f.clone())),
// value: None,
def_at: Some((fid, source.find(span)?.range())),
name_range: def_info.map(|(_, r)| r.range.clone()),
});
}
}
}
let Some((def_fid, def)) = def_info else {
return resolve_global(ctx, use_site.clone()).and_then(move |f| {
value_to_def(
ctx,
f,
|| Some(use_site.get().clone().into_text().to_string()),
None,
)
});
};
match def.kind {
LexicalKind::Heading(..) | LexicalKind::Block => unreachable!(),
LexicalKind::Var(
LexicalVarKind::Variable
| LexicalVarKind::ValRef
| LexicalVarKind::Label
| LexicalVarKind::LabelRef,
)
| LexicalKind::Mod(
LexicalModKind::Module(..)
| LexicalModKind::PathVar
| LexicalModKind::PathInclude
| LexicalModKind::ModuleAlias
| LexicalModKind::Alias { .. }
| LexicalModKind::Ident,
) => Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: None,
def_at: Some((def_fid, def.range.clone())),
name_range: Some(def.range.clone()),
}),
LexicalKind::Var(LexicalVarKind::Function) => {
let def_source = ctx.source_by_id(def_fid).ok()?;
let root = LinkedNode::new(def_source.root());
let def_name = root.leaf_at(def.range.start + 1)?;
log::info!("def_name for function: {def_name:?}", def_name = def_name);
let values = analyze_expr(ctx.world(), &def_name);
let func = values.into_iter().find(|v| matches!(v.0, Value::Func(..)));
log::info!("okay for function: {func:?}");
Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: func.map(|v| v.0),
// value: None,
def_at: Some((def_fid, def.range.clone())),
name_range: Some(def.range.clone()),
})
}
LexicalKind::Mod(LexicalModKind::Star) => {
log::info!("unimplemented star import {:?}", ident_ref);
None
}
}
}
/// Resolve a callee expression to a function.
pub fn resolve_callee(ctx: &mut AnalysisContext, callee: LinkedNode) -> Option<Func> {
{
let values = analyze_expr(ctx.world(), &callee);
values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
})
}
.or_else(|| {
let source = ctx.source_by_id(callee.span().id()?).ok()?;
let node = source.find(callee.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = find_definition(ctx, source.clone(), deref_target)?;
match def.kind {
LexicalKind::Var(LexicalVarKind::Function) => match def.value {
Some(Value::Func(f)) => Some(f),
_ => None,
},
_ => None,
}
})
.or_else(|| {
resolve_global(ctx, callee).and_then(|v| match v {
Value::Func(f) => Some(f),
_ => None,
})
})
}
// todo: math scope
fn resolve_global(ctx: &AnalysisContext, callee: LinkedNode) -> Option<Value> {
let lib = ctx.world().library();
let v = match callee.cast::<ast::Expr>()? {
ast::Expr::Ident(ident) => lib.global.scope().get(&ident)?,
ast::Expr::FieldAccess(access) => match access.target() {
ast::Expr::Ident(target) => match lib.global.scope().get(&target)? {
Value::Module(module) => module.field(&access.field()).ok()?,
Value::Func(func) => func.field(&access.field()).ok()?,
_ => return None,
},
_ => return None,
},
_ => return None,
};
Some(v.clone())
}
fn value_to_def(
ctx: &mut AnalysisContext,
value: Value,
name: impl FnOnce() -> Option<String>,
name_range: Option<Range<usize>>,
) -> Option<DefinitionLink> {
let mut def_at = |span: Span| {
span.id().and_then(|fid| {
let source = ctx.source_by_id(fid).ok()?;
Some((fid, source.find(span)?.range()))
})
};
Some(match value {
Value::Func(func) => {
let name = func.name().map(|e| e.to_owned()).or_else(name)?;
let span = func.span();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
name,
value: Some(Value::Func(func)),
def_at: def_at(span),
name_range,
}
}
Value::Module(module) => {
let name = module.name().to_string();
DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Variable),
name,
value: None,
def_at: None,
name_range,
}
}
_v => {
let name = name()?;
DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name,
value: None,
def_at: None,
name_range,
}
}
})
}

View file

@ -0,0 +1,18 @@
pub use std::{
collections::HashMap,
hash::{Hash, Hasher},
ops::{Deref, Range},
sync::Arc,
};
pub use reflexo::vector::ir::DefId;
pub use serde::Serialize;
pub use typst::syntax::FileId as TypstFileId;
pub use typst::syntax::Source;
pub use super::AnalysisContext;
pub use super::SearchCtx;
pub use crate::adt::snapshot_map::SnapshotMap;
pub(crate) use crate::syntax::{
IdentDef, IdentRef, LexicalHierarchy, LexicalKind, LexicalModKind, LexicalVarKind, ModSrc,
};

View file

@ -0,0 +1,259 @@
//! Analysis of function signatures.
use core::fmt;
use std::{borrow::Cow, collections::HashMap, sync::Arc};
use ecow::{eco_format, EcoString};
use itertools::Itertools;
use log::trace;
use typst::{
foundations::{CastInfo, Closure, Func, ParamInfo, Value},
syntax::{
ast::{self, AstNode},
SyntaxKind,
},
util::LazyHash,
};
use crate::AnalysisContext;
// pub fn analyze_signature
/// Describes a function parameter.
#[derive(Debug, Clone)]
pub struct ParamSpec {
/// The parameter's name.
pub name: Cow<'static, str>,
/// Documentation for the parameter.
pub docs: Cow<'static, str>,
/// Describe what values this parameter accepts.
pub input: CastInfo,
/// The parameter's default name as type.
pub type_repr: Option<EcoString>,
/// The parameter's default name as value.
pub expr: Option<EcoString>,
/// Creates an instance of the parameter's default value.
pub default: Option<fn() -> Value>,
/// Is the parameter positional?
pub positional: bool,
/// Is the parameter named?
///
/// Can be true even if `positional` is true if the parameter can be given
/// in both variants.
pub named: bool,
/// Can the parameter be given any number of times?
pub variadic: bool,
/// Is the parameter settable with a set rule?
pub settable: bool,
}
impl ParamSpec {
fn from_static(s: &ParamInfo) -> Arc<Self> {
Arc::new(Self {
name: Cow::Borrowed(s.name),
docs: Cow::Borrowed(s.docs),
input: s.input.clone(),
type_repr: Some(eco_format!("{}", TypeExpr(&s.input))),
expr: None,
default: s.default,
positional: s.positional,
named: s.named,
variadic: s.variadic,
settable: s.settable,
})
}
}
/// Describes a function signature.
#[derive(Debug, Clone)]
pub struct Signature {
/// The positional parameters.
pub pos: Vec<Arc<ParamSpec>>,
/// The named parameters.
pub named: HashMap<Cow<'static, str>, Arc<ParamSpec>>,
/// Whether the function has fill, stroke, or size parameters.
pub has_fill_or_size_or_stroke: bool,
/// The rest parameter.
pub rest: Option<Arc<ParamSpec>>,
_broken: bool,
}
// pub enum SignatureTarget<'a> {
// Static(LinkedNode<'a>)
// }
pub(crate) fn analyze_signature(ctx: &mut AnalysisContext, func: Func) -> Arc<Signature> {
ctx.analysis
.caches
.compute_signature(func.clone(), || analyze_dyn_signature(func))
}
pub(crate) fn analyze_dyn_signature(func: Func) -> Arc<Signature> {
use typst::foundations::func::Repr;
let params = match func.inner() {
Repr::With(..) => unreachable!(),
Repr::Closure(c) => analyze_closure_signature(c.clone()),
Repr::Element(..) | Repr::Native(..) => {
let params = func.params().unwrap();
params.iter().map(ParamSpec::from_static).collect()
}
};
let mut pos = vec![];
let mut named = HashMap::new();
let mut rest = None;
let mut broken = false;
let mut has_fill = false;
let mut has_stroke = false;
let mut has_size = false;
for param in params.into_iter() {
if param.named {
match param.name.as_ref() {
"fill" => {
has_fill = true;
}
"stroke" => {
has_stroke = true;
}
"size" => {
has_size = true;
}
_ => {}
}
named.insert(param.name.clone(), param.clone());
}
if param.variadic {
if rest.is_some() {
broken = true;
} else {
rest = Some(param.clone());
}
}
if param.positional {
pos.push(param);
}
}
Arc::new(Signature {
pos,
named,
rest,
has_fill_or_size_or_stroke: has_fill || has_stroke || has_size,
_broken: broken,
})
}
fn analyze_closure_signature(c: Arc<LazyHash<Closure>>) -> Vec<Arc<ParamSpec>> {
let mut params = vec![];
trace!("closure signature for: {:?}", c.node.kind());
let closure = &c.node;
let closure_ast = match closure.kind() {
SyntaxKind::Closure => closure.cast::<ast::Closure>().unwrap(),
_ => return params,
};
for param in closure_ast.params().children() {
match param {
ast::Param::Pos(ast::Pattern::Placeholder(..)) => {
params.push(Arc::new(ParamSpec {
name: Cow::Borrowed("_"),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: true,
named: false,
variadic: false,
settable: false,
docs: Cow::Borrowed(""),
}));
}
ast::Param::Pos(e) => {
// todo: destructing
let name = e.bindings();
if name.len() != 1 {
continue;
}
let name = name[0].as_str();
params.push(Arc::new(ParamSpec {
name: Cow::Owned(name.to_owned()),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: true,
named: false,
variadic: false,
settable: false,
docs: Cow::Borrowed(""),
}));
}
// todo: pattern
ast::Param::Named(n) => {
let expr = unwrap_expr(n.expr()).to_untyped().clone().into_text();
params.push(Arc::new(ParamSpec {
name: Cow::Owned(n.name().as_str().to_owned()),
input: CastInfo::Any,
type_repr: Some(expr.clone()),
expr: Some(expr.clone()),
default: None,
positional: false,
named: true,
variadic: false,
settable: true,
docs: Cow::Owned("Default value: ".to_owned() + expr.as_str()),
}));
}
ast::Param::Spread(n) => {
let ident = n.sink_ident().map(|e| e.as_str());
params.push(Arc::new(ParamSpec {
name: Cow::Owned(ident.unwrap_or_default().to_owned()),
input: CastInfo::Any,
type_repr: None,
expr: None,
default: None,
positional: false,
named: false,
variadic: true,
settable: false,
docs: Cow::Borrowed(""),
}));
}
}
}
params
}
fn unwrap_expr(mut e: ast::Expr) -> ast::Expr {
while let ast::Expr::Parenthesized(p) = e {
e = p.expr();
}
e
}
struct TypeExpr<'a>(&'a CastInfo);
impl<'a> fmt::Display for TypeExpr<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self.0 {
CastInfo::Any => "any",
CastInfo::Value(.., v) => v,
CastInfo::Type(v) => {
f.write_str(v.short_name())?;
return Ok(());
}
CastInfo::Union(v) => {
let mut values = v.iter().map(|e| TypeExpr(e).to_string());
f.write_str(&values.join(" | "))?;
return Ok(());
}
})
}
}

View file

@ -0,0 +1,2 @@
#(/* ident after */ table());

View file

@ -0,0 +1,2 @@
#(/* ident after */ sys);

View file

@ -0,0 +1,2 @@
#(/* ident after */ red);

View file

@ -0,0 +1,2 @@
#(/* ident after */ sys.version);

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/builtin.typ
---
{
"contents": "```typc\nlet table(children, children, align: alignment | auto | array | function, column-gutter: auto | relative | fraction | int | array, columns: auto | relative | fraction | int | array, fill: color | gradient | pattern | none | array | function, gutter: auto | relative | fraction | int | array, inset: relative | dictionary | array | function, row-gutter: auto | relative | fraction | int | array, rows: auto | relative | fraction | int | array, stroke: length | color | gradient | pattern | dictionary | stroke | none | array | function);\n```\n---\n\n\nA table of items.\n\nTables are used to arrange content in cells. Cells can contain arbitrary\ncontent, including multiple paragraphs and are specified in row-major order.\nFor a hands-on explanation of all the ways you can use and customize tables\nin Typst, check out the [table guide]($guides/table-guide).\n\nBecause tables are just grids with different defaults for some cell\nproperties (notably `stroke` and `inset`), refer to the [grid\ndocumentation]($grid) for more information on how to size the table tracks\nand specify the cell appearance properties.\n\nIf you are unsure whether you should be using a table or a grid, consider\nwhether the content you are arranging semantically belongs together as a set\nof related data points or similar or whether you are just want to enhance\nyour presentation by arranging unrelated content in a grid. In the former\ncase, a table is the right choice, while in the latter case, a grid is more\nappropriate. Furthermore, Typst will annotate its output in the future such\nthat screenreaders will annouce content in `table` as tabular while a grid's\ncontent will be announced no different than multiple content blocks in the\ndocument flow.\n\nNote that, to override a particular cell's properties or apply show rules on\ntable cells, you can use the [`table.cell`]($table.cell) element. See its\ndocumentation for more information.\n\nAlthough the `table` and the `grid` share most properties, set and show\nrules on one of them do not affect the other.\n\nTo give a table a caption and make it [referenceable]($ref), put it into a\n[figure].\n\n# Example\n\nThe example below demonstrates some of the most common table options.\n```example\n#table(\n columns: (1fr, auto, auto),\n inset: 10pt,\n align: horizon,\n table.header(\n [], [*Area*], [*Parameters*],\n ),\n image(\"cylinder.svg\"),\n $ pi h (D^2 - d^2) / 4 $,\n [\n $h$: height \\\n $D$: outer radius \\\n $d$: inner radius\n ],\n image(\"tetrahedron.svg\"),\n $ sqrt(2) / 12 a^3 $,\n [$a$: edge length]\n)\n```\n\nMuch like with grids, you can use [`table.cell`]($table.cell) to customize\nthe appearance and the position of each cell.\n\n```example\n>>> #set page(width: auto)\n>>> #set text(font: \"IBM Plex Sans\")\n>>> #let gray = rgb(\"#565565\")\n>>>\n#set table(\n stroke: none,\n gutter: 0.2em,\n fill: (x, y) =>\n if x == 0 or y == 0 { gray },\n inset: (right: 1.5em),\n)\n\n#show table.cell: it => {\n if it.x == 0 or it.y == 0 {\n set text(white)\n strong(it)\n } else if it.body == [] {\n // Replace empty cells with 'N/A'\n pad(..it.inset)[_N/A_]\n } else {\n it\n }\n}\n\n#let a = table.cell(\n fill: green.lighten(60%),\n)[A]\n#let b = table.cell(\n fill: aqua.lighten(60%),\n)[B]\n\n#table(\n columns: 4,\n [], [Exam 1], [Exam 2], [Exam 3],\n\n [John], [], a, [],\n [Mary], [], a, a,\n [Robert], b, a, b,\n)\n```",
"range": "0:20:0:25"
}

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/builtin_module.typ
---
{
"contents": "```typc\n// Values\n<module sys>\n```\n---\n```typc\nlet sys;\n```",
"range": "0:20:0:23"
}

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/builtin_var.typ
---
{
"contents": "```typst\nrgb(\"#ff4136\")\n```",
"range": "0:20:0:23"
}

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/builtin_var2.typ
---
{
"contents": "```typc\n// Values\n<module sys>\n```\n---\n```typc\nlet sys;\n```",
"range": "0:20:0:23"
}

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/hover.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/hover/user.typ
---
{
"contents": "```typc\nlet f();\n```\n---\n\n\nTest",
"range": "3:20:3:21"
}

View file

@ -0,0 +1,5 @@
/// Test
#let f() = 1;
#(/* ident after */ f());

View file

@ -0,0 +1,2 @@
#(/* ident after */ table());

View file

@ -0,0 +1,6 @@
---
source: crates/tinymist-query/src/prepare_rename.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/prepare_rename/builtin.typ
---
null

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/prepare_rename.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/prepare_rename/user.typ
---
{
"placeholder": "f",
"range": "3:20:3:21"
}

View file

@ -0,0 +1,5 @@
/// Test
#let f() = 1;
#(/* ident after */ f());

View file

@ -1,16 +1,6 @@
use std::ops::Range;
use log::debug;
use typst::syntax::FileId as TypstFileId;
use crate::{
prelude::*,
syntax::{
find_source_by_expr, get_deref_target, DerefTarget, IdentRef, LexicalKind, LexicalModKind,
LexicalVarKind,
},
SemanticRequest,
};
use crate::{analysis::find_definition, prelude::*, syntax::get_deref_target, SemanticRequest};
/// The [`textDocument/definition`] request asks the server for the definition
/// location of a symbol at a given text document position.
@ -52,11 +42,13 @@ impl SemanticRequest for GotoDefinitionRequest {
let def = find_definition(ctx, source.clone(), deref_target)?;
let span_path = ctx.path_for_id(def.fid).ok()?;
let (fid, def_range) = def.def_at?;
let span_path = ctx.path_for_id(fid).ok()?;
let uri = path_to_url(&span_path).ok()?;
let span_source = ctx.source_by_id(def.fid).ok()?;
let range = ctx.to_lsp_range(def.def_range, &span_source);
let span_source = ctx.source_by_id(fid).ok()?;
let range = ctx.to_lsp_range(def_range, &span_source);
let res = Some(GotoDefinitionResponse::Link(vec![LocationLink {
origin_selection_range: Some(origin_selection_range),
@ -65,168 +57,11 @@ impl SemanticRequest for GotoDefinitionRequest {
target_selection_range: range,
}]));
debug!("goto_definition: {:?} {res:?}", def.fid);
debug!("goto_definition: {fid:?} {res:?}");
res
}
}
pub(crate) struct DefinitionLink {
pub kind: LexicalKind,
pub value: Option<Value>,
pub fid: TypstFileId,
pub name: String,
pub def_range: Range<usize>,
pub name_range: Option<Range<usize>>,
}
// todo: field definition
pub(crate) fn find_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
deref_target: DerefTarget<'_>,
) -> Option<DefinitionLink> {
let source_id = source.id();
let use_site = match deref_target {
// todi: field access
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => node,
// todo: better support (rename import path?)
DerefTarget::ImportPath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let import_node = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_expr(ctx.world(), def_fid, import_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathVar),
name: String::new(),
value: None,
fid: source.id(),
def_range: (LinkedNode::new(source.root())).range(),
name_range: None,
});
}
DerefTarget::IncludePath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let include_node = parent.cast::<ast::ModuleInclude>()?;
let source = find_source_by_expr(ctx.world(), def_fid, include_node.source())?;
return Some(DefinitionLink {
kind: LexicalKind::Mod(LexicalModKind::PathInclude),
name: String::new(),
value: None,
fid: source.id(),
def_range: (LinkedNode::new(source.root())).range(),
name_range: None,
});
}
};
// syntactic definition
let def_use = ctx.def_use(source)?;
let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::MathIdent(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::FieldAccess(..) => {
debug!("find field access");
return None;
}
_ => {
debug!("unsupported kind {kind:?}", kind = use_site.kind());
return None;
}
};
let def_id = def_use.get_ref(&ident_ref);
let def_id = def_id.or_else(|| Some(def_use.get_def(source_id, &ident_ref)?.0));
let def_info = def_id.and_then(|def_id| def_use.get_def_by_id(def_id));
let values = analyze_expr(ctx.world(), &use_site);
for v in values {
// mostly builtin functions
if let Value::Func(f) = v.0 {
use typst::foundations::func::Repr;
match f.inner() {
// The with function should be resolved as the with position
Repr::Closure(..) | Repr::With(..) => continue,
Repr::Native(..) | Repr::Element(..) => {}
}
let name = f
.name()
.or_else(|| def_info.as_ref().map(|(_, r)| r.name.as_str()));
if let Some(name) = name {
let span = f.span();
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
return Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Function),
name: name.to_owned(),
value: Some(Value::Func(f.clone())),
fid,
def_range: source.find(span)?.range(),
name_range: def_info.map(|(_, r)| r.range.clone()),
});
}
}
}
let (def_fid, def) = def_info?;
match def.kind {
LexicalKind::Heading(..) | LexicalKind::Block => unreachable!(),
LexicalKind::Var(
LexicalVarKind::Variable
| LexicalVarKind::ValRef
| LexicalVarKind::Label
| LexicalVarKind::LabelRef,
)
| LexicalKind::Mod(
LexicalModKind::Module(..)
| LexicalModKind::PathVar
| LexicalModKind::PathInclude
| LexicalModKind::ModuleAlias
| LexicalModKind::Alias { .. }
| LexicalModKind::Ident,
) => Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: None,
fid: def_fid,
def_range: def.range.clone(),
name_range: Some(def.range.clone()),
}),
LexicalKind::Var(LexicalVarKind::Function) => {
let def_source = ctx.source_by_id(def_fid).ok()?;
let root = LinkedNode::new(def_source.root());
let def_name = root.leaf_at(def.range.start + 1)?;
log::info!("def_name for function: {def_name:?}", def_name = def_name);
let values = analyze_expr(ctx.world(), &def_name);
let func = values.into_iter().find(|v| matches!(v.0, Value::Func(..)));
log::info!("okay for function: {func:?}");
Some(DefinitionLink {
kind: def.kind.clone(),
name: def.name.clone(),
value: func.map(|v| v.0),
fid: def_fid,
def_range: def.range.clone(),
name_range: Some(def.range.clone()),
})
}
LexicalKind::Mod(LexicalModKind::Star) => {
log::info!("unimplemented star import {:?}", ident_ref);
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -1,12 +1,12 @@
use core::fmt;
use crate::{
analysis::analyze_signature,
find_definition, jump_from_cursor,
analysis::{analyze_signature, find_definition, DefinitionLink, Signature},
jump_from_cursor,
prelude::*,
syntax::{find_document_before, get_deref_target, LexicalKind, LexicalVarKind},
upstream::{expr_tooltip, tooltip, Tooltip},
DefinitionLink, LspHoverContents, StatefulRequest,
LspHoverContents, StatefulRequest,
};
/// The [`textDocument/hover`] request asks the server for hover information at
@ -136,12 +136,17 @@ fn def_tooltip(
| LexicalKind::Block
| LexicalKind::Heading(..) => None,
LexicalKind::Var(LexicalVarKind::Function) => {
let sig = if let Some(Value::Func(func)) = &lnk.value {
Some(analyze_signature(ctx, func.clone()))
} else {
None
};
results.push(MarkedString::LanguageString(LanguageString {
language: "typc".to_owned(),
value: format!(
"let {name}({params});",
name = lnk.name,
params = ParamTooltip(&lnk)
params = ParamTooltip(sig)
),
}));
@ -182,16 +187,13 @@ fn def_tooltip(
}
}
struct ParamTooltip<'a>(&'a DefinitionLink);
struct ParamTooltip(Option<Arc<Signature>>);
impl<'a> fmt::Display for ParamTooltip<'a> {
impl fmt::Display for ParamTooltip {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Some(Value::Func(func)) = &self.0.value else {
let Some(sig) = &self.0 else {
return Ok(());
};
let sig = analyze_signature(func.clone());
let mut is_first = true;
let mut write_sep = |f: &mut fmt::Formatter<'_>| {
if is_first {
@ -250,8 +252,10 @@ impl DocTooltip {
}
};
let src = ctx.source_by_id(lnk.fid).ok()?;
find_document_before(&src, lnk.def_range.start)
let (fid, def_range) = lnk.def_at.clone()?;
let src = ctx.source_by_id(fid).ok()?;
find_document_before(&src, def_range.start)
}
}
@ -279,3 +283,24 @@ impl DocTooltip {
Some(docs)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("hover", &|world, path| {
let source = world.source_by_path(&path).unwrap();
let request = HoverRequest {
path: path.clone(),
position: find_test_position(&source),
};
let result = request.request(world, None);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}
}

View file

@ -68,7 +68,7 @@ impl SemanticRequest for InlayHintRequest {
let source = ctx.source_by_path(&self.path).ok()?;
let range = ctx.to_typst_range(self.range, &source)?;
let hints = inlay_hint(ctx.world(), &source, range, ctx.position_encoding()).ok()?;
let hints = inlay_hint(ctx, &source, range, ctx.position_encoding()).ok()?;
debug!(
"got inlay hints on {source:?} => {hints:?}",
source = source.id(),
@ -84,22 +84,22 @@ impl SemanticRequest for InlayHintRequest {
}
fn inlay_hint(
world: &dyn World,
ctx: &mut AnalysisContext,
source: &Source,
range: Range<usize>,
encoding: PositionEncoding,
) -> FileResult<Vec<InlayHint>> {
const SMART: InlayHintConfig = InlayHintConfig::smart();
struct InlayHintWorker<'a> {
world: &'a dyn World,
struct InlayHintWorker<'a, 'w> {
ctx: &'a mut AnalysisContext<'w>,
source: &'a Source,
range: Range<usize>,
encoding: PositionEncoding,
hints: Vec<InlayHint>,
}
impl InlayHintWorker<'_> {
impl InlayHintWorker<'_, '_> {
fn analyze(&mut self, node: LinkedNode) {
let rng = node.range();
if rng.start >= self.range.end || rng.end <= self.range.start {
@ -148,15 +148,7 @@ fn inlay_hint(
let args = f.args();
let args_node = node.find(args.span())?;
// todo: reduce many such patterns
let values = analyze_expr(self.world, &callee_node);
let func = values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
})?;
log::debug!("got function {func:?}");
let call_info = analyze_call(func, args)?;
let call_info = analyze_call(self.ctx, callee_node, args)?;
log::debug!("got call_info {call_info:?}");
let check_single_pos_arg = || {
@ -298,7 +290,7 @@ fn inlay_hint(
}
let mut worker = InlayHintWorker {
world,
ctx,
source,
range,
encoding,

View file

@ -1,5 +1,8 @@
use crate::{
find_definition, prelude::*, syntax::get_deref_target, DefinitionLink, SemanticRequest,
analysis::{find_definition, DefinitionLink},
prelude::*,
syntax::get_deref_target,
SemanticRequest,
};
use log::debug;
@ -82,14 +85,37 @@ pub(crate) fn validate_renaming_definition(lnk: &DefinitionLink) -> Option<()> {
}
}
if lnk.fid.package().is_some() {
let (fid, _def_range) = lnk.def_at.clone()?;
if fid.package().is_some() {
debug!(
"prepare_rename: {name} is in a package {pkg:?}",
name = lnk.name,
pkg = lnk.fid.package()
pkg = fid.package()
);
return None;
}
Some(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::*;
#[test]
fn test() {
snapshot_testing("prepare_rename", &|world, path| {
let source = world.source_by_path(&path).unwrap();
let request = PrepareRenameRequest {
path: path.clone(),
position: find_test_position(&source),
};
let result = request.request(world);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}
}

View file

@ -2,7 +2,7 @@ use log::debug;
use lsp_types::TextEdit;
use crate::{
find_definition, find_references, prelude::*, syntax::get_deref_target,
analysis::find_definition, find_references, prelude::*, syntax::get_deref_target,
validate_renaming_definition, SemanticRequest,
};
@ -44,10 +44,12 @@ impl SemanticRequest for RenameRequest {
let mut editions = HashMap::new();
let def_loc = {
let def_source = ctx.source_by_id(lnk.fid).ok()?;
let (fid, _def_range) = lnk.def_at?;
let span_path = ctx.path_for_id(lnk.fid).ok()?;
let def_loc = {
let def_source = ctx.source_by_id(fid).ok()?;
let span_path = ctx.path_for_id(fid).ok()?;
let uri = path_to_url(&span_path).ok()?;
let Some(range) = lnk.name_range else {

View file

@ -94,3 +94,16 @@ impl<'de> Deserialize<'de> for IdentRef {
})
}
}
/// A flat and transient reference to some symbol in a source file.
///
/// See [`IdentRef`] for definition of a "transient" reference.
#[derive(Serialize, Clone)]
pub struct IdentDef {
/// The name of the symbol.
pub name: String,
/// The kind of the symbol.
pub kind: LexicalKind,
/// The byte range of the symbol in the source file.
pub range: Range<usize>,
}

View file

@ -6,7 +6,7 @@ use std::{
};
use once_cell::sync::Lazy;
use serde::Serialize;
pub use serde::Serialize;
use serde_json::{ser::PrettyFormatter, Serializer, Value};
use typst::syntax::{
ast::{self, AstNode},

View file

@ -2,14 +2,11 @@ use super::{Completion, CompletionContext, CompletionKind};
use std::collections::BTreeMap;
use ecow::{eco_format, EcoString};
use typst::foundations::{Func, Value};
use typst::foundations::Value;
use typst::syntax::ast::AstNode;
use typst::syntax::{ast, SyntaxKind};
use crate::analysis::{analyze_import, analyze_signature};
use crate::find_definition;
use crate::prelude::analyze_expr;
use crate::syntax::{get_deref_target, LexicalKind, LexicalVarKind};
use crate::analysis::{analyze_import, analyze_signature, resolve_callee};
use crate::upstream::plain_docs_sentence;
impl<'a, 'w> CompletionContext<'a, 'w> {
@ -166,7 +163,11 @@ pub fn param_completions<'a>(
set: bool,
args: ast::Args<'a>,
) {
let Some(func) = resolve_callee(ctx, callee) else {
let Some(func) = ctx
.root
.find(callee.span())
.and_then(|callee| resolve_callee(ctx.ctx, callee))
else {
return;
};
@ -178,7 +179,7 @@ pub fn param_completions<'a>(
func = f.0.clone();
}
let signature = analyze_signature(func.clone());
let signature = analyze_signature(ctx.ctx, func.clone());
// Exclude named arguments which are already present.
let exclude: Vec<_> = args
@ -230,7 +231,11 @@ pub fn named_param_value_completions<'a>(
callee: ast::Expr<'a>,
name: &str,
) {
let Some(func) = resolve_callee(ctx, callee) else {
let Some(func) = ctx
.root
.find(callee.span())
.and_then(|callee| resolve_callee(ctx.ctx, callee))
else {
return;
};
@ -242,7 +247,7 @@ pub fn named_param_value_completions<'a>(
func = f.0.clone();
}
let signature = analyze_signature(func.clone());
let signature = analyze_signature(ctx.ctx, func.clone());
let Some(param) = signature.named.get(name) else {
return;
@ -270,60 +275,3 @@ pub fn named_param_value_completions<'a>(
ctx.enrich(" ", "");
}
}
/// Resolve a callee expression to a function.
// todo: fallback to static analysis if we can't resolve the callee
pub fn resolve_callee<'a>(
ctx: &mut CompletionContext<'a, '_>,
callee: ast::Expr<'a>,
) -> Option<Func> {
resolve_global_dyn_callee(ctx, callee)
.or_else(|| {
let source = ctx.ctx.source_by_id(callee.span().id()?).ok()?;
let node = source.find(callee.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = find_definition(ctx.ctx, source.clone(), deref_target)?;
match def.kind {
LexicalKind::Var(LexicalVarKind::Function) => match def.value {
Some(Value::Func(f)) => Some(f),
_ => None,
},
_ => None,
}
})
.or_else(|| {
let lib = ctx.world().library();
let value = match callee {
ast::Expr::Ident(ident) => lib.global.scope().get(&ident)?,
ast::Expr::FieldAccess(access) => match access.target() {
ast::Expr::Ident(target) => match lib.global.scope().get(&target)? {
Value::Module(module) => module.field(&access.field()).ok()?,
Value::Func(func) => func.field(&access.field()).ok()?,
_ => return None,
},
_ => return None,
},
_ => return None,
};
match value {
Value::Func(func) => Some(func.clone()),
_ => None,
}
})
}
/// Resolve a callee expression to a dynamic function.
// todo: fallback to static analysis if we can't resolve the callee
fn resolve_global_dyn_callee<'a>(
ctx: &CompletionContext<'a, '_>,
callee: ast::Expr<'a>,
) -> Option<Func> {
let values = analyze_expr(ctx.world(), &ctx.root.find(callee.span())?);
values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
})
}