dev: reimplements definition analysis (#43)

* dev: reimplements definition based on def use analysis

* dev: reimplements lsp apis based on new definition api

* fix: most cases of references

* fix: scope of params
This commit is contained in:
Myriad-Dreamin 2024-03-15 22:05:53 +08:00 committed by GitHub
parent 5fa4f8f94f
commit da7028f59c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 746 additions and 1110 deletions

View file

@ -1,5 +1,3 @@
pub mod definition;
pub use definition::*;
pub mod def_use;
pub use def_use::*;
pub mod import;
@ -10,8 +8,6 @@ pub mod matcher;
pub use matcher::*;
pub mod module;
pub use module::*;
pub mod reference;
pub use reference::*;
pub mod track_values;
pub use track_values::*;

View file

@ -64,18 +64,43 @@ pub struct IdentDef {
pub range: Range<usize>,
}
type ExternalRefMap = HashMap<(TypstFileId, Option<String>), Vec<(Option<DefId>, IdentRef)>>;
#[derive(Default)]
pub struct DefUseInfo {
ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
external_refs: HashMap<(TypstFileId, Option<String>), Vec<IdentRef>>,
external_refs: ExternalRefMap,
ident_refs: HashMap<IdentRef, DefId>,
redefine_current: Option<TypstFileId>,
ident_redefines: HashMap<IdentRef, DefId>,
undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
pub exports_defs: HashMap<String, DefId>,
}
impl DefUseInfo {
pub fn get_ref(&self, ident: &IdentRef) -> Option<DefId> {
self.ident_refs.get(ident).copied()
}
pub fn get_def_by_id(&self, id: DefId) -> Option<(TypstFileId, &IdentDef)> {
let ((fid, _), def) = self.ident_defs.get_index(id.0 as usize)?;
Some((*fid, def))
}
pub fn get_def(&self, fid: TypstFileId, ident: &IdentRef) -> Option<(DefId, &IdentDef)> {
let (id, _, def) = self.ident_defs.get_full(&(fid, ident.clone()))?;
let (id, _, def) = self
.ident_defs
.get_full(&(fid, ident.clone()))
.or_else(|| {
if self.redefine_current == Some(fid) {
let def_id = self.ident_redefines.get(ident)?;
let kv = self.ident_defs.get_index(def_id.0 as usize)?;
Some((def_id.0 as usize, kv.0, kv.1))
} else {
None
}
})?;
Some((DefId(id as u64), def))
}
@ -89,7 +114,7 @@ impl DefUseInfo {
&self,
ext_id: TypstFileId,
ext_name: Option<String>,
) -> impl Iterator<Item = &IdentRef> {
) -> impl Iterator<Item = &(Option<DefId>, IdentRef)> {
self.external_refs
.get(&(ext_id, ext_name))
.into_iter()
@ -107,10 +132,6 @@ pub fn get_def_use(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<DefU
fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
if !ctx.searched.insert(current_id) {
return None;
}
ctx.ctx.get_mut(current_id);
let c = ctx.ctx.get(current_id).unwrap();
@ -118,6 +139,10 @@ fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseIn
return Some(info);
}
if !ctx.searched.insert(current_id) {
return None;
}
let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;
let mut collector = DefUseCollector {
@ -130,6 +155,7 @@ fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseIn
ext_src: None,
};
collector.info.redefine_current = Some(current_id);
collector.scan(&e);
collector.calc_exports();
let res = Some(Arc::new(collector.info));
@ -160,27 +186,101 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
fn calc_exports(&mut self) {
self.info.exports_refs = self.id_scope.values().copied().collect();
self.info.exports_defs = self
.id_scope
.entries()
.map(|(k, v)| (k.clone(), *v))
.collect();
}
fn import_name(&mut self, name: &str) -> Option<()> {
let source = self.ext_src.as_ref()?;
log::debug!("import for def use: {:?}, name: {name}", source.id());
let (_, external_info) =
Some(source.id()).zip(get_def_use_inner(self.ctx, source.clone()))?;
let ext_id = external_info.exports_defs.get(name)?;
self.import_from(&external_info, *ext_id);
Some(())
}
fn import_from(&mut self, external_info: &DefUseInfo, v: DefId) {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) = external_info.ident_defs.get_index(v.0 as usize).unwrap();
let name = ext_sym.name.clone();
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let id = DefId(id as u64);
self.id_scope.insert(name, id);
}
fn scan(&mut self, e: &'a [LexicalHierarchy]) -> Option<()> {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(LexicalVarKind::Label) => self.insert(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::Label) => {
self.insert(Ns::Label, e);
}
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::Function)
| LexicalKind::Var(LexicalVarKind::Variable) => self.insert(Ns::Value, e),
| LexicalKind::Var(LexicalVarKind::Variable) => {
self.insert(Ns::Value, e);
}
LexicalKind::Mod(super::LexicalModKind::PathVar)
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias) => {
self.insert_module(Ns::Value, e)
}
LexicalKind::Mod(super::LexicalModKind::Ident) => {
self.insert(Ns::Value, e);
self.insert_extern(e.info.name.clone(), e.info.range.clone());
match self.import_name(&e.info.name) {
Some(()) => {
self.insert_ref(Ns::Value, e);
self.insert_redef(e);
}
None => {
let def_id = self.insert(Ns::Value, e);
self.insert_extern(
e.info.name.clone(),
e.info.range.clone(),
Some(def_id),
);
}
}
}
LexicalKind::Mod(super::LexicalModKind::Alias { target }) => {
self.insert(Ns::Value, e);
self.insert_extern(target.name.clone(), target.range.clone());
match self.import_name(&target.name) {
Some(()) => {
self.insert_ident_ref(
Ns::Value,
IdentRef {
name: target.name.clone(),
range: target.range.clone(),
},
);
self.insert(Ns::Value, e);
}
None => {
let def_id = self.insert(Ns::Value, e);
self.insert_extern(
target.name.clone(),
target.range.clone(),
Some(def_id),
);
}
}
}
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
LexicalKind::Block => {
@ -214,31 +314,11 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
let (_, external_info) =
Some(source.id()).zip(get_def_use_inner(self.ctx, source.clone()))?;
for v in &external_info.exports_refs {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) =
external_info.ident_defs.get_index(v.0 as usize).unwrap();
let name = ext_sym.name.clone();
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let id = DefId(id as u64);
self.id_scope.insert(name, id);
for ext_id in &external_info.exports_refs {
self.import_from(&external_info, *ext_id);
}
}
}
LexicalKind::Mod(super::LexicalModKind::ExternResolved { .. }) => {}
}
}
@ -250,24 +330,27 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), None),
vec![IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
}],
vec![(
None,
IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
},
)],
);
}
}
fn insert_extern(&mut self, name: String, range: Range<usize>) {
fn insert_extern(&mut self, name: String, range: Range<usize>, redefine_id: Option<DefId>) {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), Some(name.clone())),
vec![IdentRef { name, range }],
vec![(redefine_id, IdentRef { name, range })],
);
}
}
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) {
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) -> DefId {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
@ -288,20 +371,16 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
let id = DefId(id as u64);
snap.insert(e.info.name.clone(), id);
id
}
fn insert_ref(&mut self, label: Ns, e: &LexicalHierarchy) {
fn insert_ident_ref(&mut self, label: Ns, id_ref: IdentRef) {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
match snap.get(&e.info.name) {
match snap.get(&id_ref.name) {
Some(id) => {
self.info.ident_refs.insert(id_ref, *id);
}
@ -310,6 +389,29 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
}
}
}
fn insert_ref(&mut self, label: Ns, e: &LexicalHierarchy) {
self.insert_ident_ref(
label,
IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
},
);
}
fn insert_redef(&mut self, e: &LexicalHierarchy) {
let snap = &mut self.id_scope;
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
if let Some(id) = snap.get(&e.info.name) {
self.info.ident_redefines.insert(id_ref, *id);
}
}
}
pub struct DefUseSnapshot<'a>(pub &'a DefUseInfo);

View file

@ -1,453 +0,0 @@
use std::collections::HashSet;
use std::ops::Deref;
use std::path::Path;
use log::{debug, trace};
use parking_lot::Mutex;
use typst::syntax::ast::Ident;
use typst::World;
use typst::{
foundations::{Func, Value},
syntax::{
ast::{self, AstNode},
LinkedNode, Source, Span, SyntaxKind,
},
};
use typst_ts_core::TypstFileId;
use crate::analysis::{deref_lvalue, find_source_by_import};
use crate::{prelude::*, TypstSpan};
#[derive(Debug, Clone)]
pub struct VariableDefinition<'a> {
pub def_site: LinkedNode<'a>,
pub use_site: LinkedNode<'a>,
pub span: TypstSpan,
}
#[derive(Debug, Clone)]
pub struct FuncDefinition<'a> {
pub value: Func,
pub use_site: LinkedNode<'a>,
pub span: TypstSpan,
}
#[derive(Debug, Clone)]
pub struct ModuleDefinition<'a> {
pub module: TypstFileId,
pub use_site: LinkedNode<'a>,
pub span: TypstSpan,
}
#[derive(Debug, Clone)]
pub struct ExternalDefinition<'a> {
pub use_site: LinkedNode<'a>,
pub span: TypstSpan,
}
#[derive(Debug, Clone)]
pub enum Definition<'a> {
Func(FuncDefinition<'a>),
Var(VariableDefinition<'a>),
Module(ModuleDefinition<'a>),
External(ExternalDefinition<'a>),
}
impl Definition<'_> {
pub fn span(&self) -> TypstSpan {
match self {
Definition::Func(f) => f.span,
Definition::Var(v) => v.span,
Definition::Module(m) => m.span,
Definition::External(s) => s.span,
}
}
pub fn use_site(&self) -> &LinkedNode {
match self {
Definition::Func(f) => &f.use_site,
Definition::Var(v) => &v.use_site,
Definition::Module(m) => &m.use_site,
Definition::External(s) => &s.use_site,
}
}
}
fn advance_prev_adjacent(node: LinkedNode) -> Option<LinkedNode> {
// this is aworkaround for a bug in the parser
if node.len() == 0 {
return None;
}
match node.prev_sibling() {
Some(prev) => Some(prev),
None => {
let parent = node.parent()?;
debug!("no prev sibling parent: {parent:?}");
advance_prev_adjacent(parent.clone())
}
}
}
// #[comemo::memoize]
fn find_definition_in_module<'a>(
search_ctx: &'a SearchCtx<'a>,
source: Source,
name: &str,
) -> Option<Span> {
{
let mut s = search_ctx.searched.lock();
if s.contains(&source.id()) {
return None;
}
s.insert(source.id());
}
let root = source.root();
let node = LinkedNode::new(root);
let last_expr = if let Some(m) = root.cast::<ast::Markup>() {
m.exprs().last()?
} else {
debug!("unexpected root kind {:?}", root.kind());
return None;
};
let last = node.find(last_expr.span())?;
let e = find_syntax_definition(search_ctx, last, name)?;
Some(e.span())
}
enum ImportRef<'a> {
/// `import "foo" as bar;`
/// ^^^
ModuleAs(Ident<'a>),
/// `import "foo.typ"`
/// ^^^
Path(ast::Expr<'a>),
/// `import "foo": bar`
/// ^^^
Ident(Ident<'a>),
/// `import "foo": bar as baz`
/// ^^^
IdentAs(ast::RenamedImportItem<'a>),
/// `import "foo": *`
ExternalResolved(Span),
}
fn find_ref_in_import<'b, 'a>(
ctx: &'b SearchCtx<'b>,
import_node: ast::ModuleImport<'a>,
name: &str,
) -> Option<ImportRef<'a>> {
if let Some(import_node) = import_node.new_name() {
if import_node.get() == name {
return Some(ImportRef::ModuleAs(import_node));
}
}
let Some(imports) = import_node.imports() else {
let v = import_node.source();
match v {
ast::Expr::Str(e) => {
let e = e.get();
let e = Path::new(e.as_ref());
let Some(e) = e.file_name() else {
return None;
};
let e = e.to_string_lossy();
let e = e.as_ref();
let Some(e) = e.strip_suffix(".typ") else {
return None;
};
return (e == name).then_some(ImportRef::Path(v));
}
_ => return None,
}
};
match imports {
ast::Imports::Wildcard => {
let dep = find_source_by_import(ctx.world.deref(), ctx.current, import_node)?;
let res = find_definition_in_module(ctx, dep, name)?;
return Some(ImportRef::ExternalResolved(res));
}
ast::Imports::Items(items) => {
for handle in items.iter() {
match handle {
ast::ImportItem::Simple(e) => {
if e.get() == name {
return Some(ImportRef::Ident(e));
}
}
ast::ImportItem::Renamed(e) => {
let o = e.new_name();
if o.get() == name {
return Some(ImportRef::IdentAs(e));
}
}
}
}
}
}
None
}
fn find_syntax_definition<'b, 'a>(
search_ctx: &'b SearchCtx<'b>,
node: LinkedNode<'a>,
name: &str,
) -> Option<Definition<'a>> {
struct SyntaxDefinitionWorker<'a, 'b, 'c> {
ctx: &'c SearchCtx<'c>,
name: &'b str,
use_site: LinkedNode<'a>,
}
impl<'a, 'b, 'c> SyntaxDefinitionWorker<'a, 'b, 'c> {
fn find(&mut self, mut node: LinkedNode<'a>) -> Option<Definition<'a>> {
loop {
if let Some(def) = self.check(node.clone()) {
return Some(def);
}
let Some(prev) = advance_prev_adjacent(node) else {
debug!("no prev sibling parent");
return None;
};
node = prev;
}
}
fn resolve_as_var(&self, node: LinkedNode<'a>, name: ast::Ident) -> Option<Definition<'a>> {
if name.get() != self.name {
return None;
}
let def_site = node.find(name.span())?;
Some(Definition::Var(VariableDefinition {
def_site,
use_site: self.use_site.clone(),
span: node.span(),
}))
}
fn check(&mut self, node: LinkedNode<'a>) -> Option<Definition<'a>> {
let node = deref_lvalue(node)?;
match node.kind() {
SyntaxKind::LetBinding => {
let binding = node.cast::<ast::LetBinding>()?;
match binding.kind() {
ast::LetBindingKind::Closure(name) => {
if name.get() == self.name {
let values =
analyze_expr(self.ctx.world.deref(), &node.find(name.span())?);
let func = values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
});
let Some(func) = func else {
debug!("no func found... {name:?}");
return None;
};
return Some(Definition::Func(FuncDefinition {
value: func,
use_site: self.use_site.clone(),
span: node.span(),
}));
}
None
}
ast::LetBindingKind::Normal(ast::Pattern::Normal(ast::Expr::Ident(
name,
))) => {
return self.resolve_as_var(node.clone(), name);
}
ast::LetBindingKind::Normal(ast::Pattern::Parenthesized(e)) => {
let e = deref_lvalue(node.find(e.span())?)?;
if let Some(name) = e.cast::<ast::Ident>() {
return self.resolve_as_var(e.clone(), name);
}
None
}
ast::LetBindingKind::Normal(ast::Pattern::Normal(e)) => {
let e = deref_lvalue(node.find(e.span())?)?;
if let Some(name) = e.cast::<ast::Ident>() {
return self.resolve_as_var(e.clone(), name);
}
None
}
ast::LetBindingKind::Normal(ast::Pattern::Destructuring(n)) => {
for i in n.bindings() {
if i.get() == self.name {
return self.resolve_as_var(node.clone(), i);
}
}
None
}
ast::LetBindingKind::Normal(ast::Pattern::Placeholder(..)) => None,
}
}
SyntaxKind::ModuleImport => {
let import_node = node.cast::<ast::ModuleImport>()?;
match find_ref_in_import(self.ctx, import_node, self.name)? {
ImportRef::ModuleAs(ident) => {
let m = find_source_by_import(
self.ctx.world.deref(),
self.ctx.current,
import_node,
)?;
return Some(Definition::Module(ModuleDefinition {
module: m.id(),
use_site: self.use_site.clone(),
span: ident.span(),
}));
}
ImportRef::Path(s) => {
let m = find_source_by_import(
self.ctx.world.deref(),
self.ctx.current,
import_node,
)?;
return Some(Definition::Module(ModuleDefinition {
module: m.id(),
use_site: self.use_site.clone(),
span: s.span(),
}));
}
ImportRef::Ident(ident) => {
return Some(Definition::Var(VariableDefinition {
def_site: node.find(ident.span())?,
use_site: self.use_site.clone(),
span: ident.span(),
}));
}
ImportRef::IdentAs(item) => {
let ident = item.new_name();
return Some(Definition::Var(VariableDefinition {
def_site: node.find(ident.span())?,
use_site: self.use_site.clone(),
span: ident.span(),
}));
}
ImportRef::ExternalResolved(def_span) => {
return Some(Definition::External(ExternalDefinition {
use_site: self.use_site.clone(),
span: def_span,
}));
}
}
}
_ => None,
}
}
}
let mut worker = SyntaxDefinitionWorker {
ctx: search_ctx,
name,
use_site: node.clone(),
};
worker.find(node)
}
struct SearchCtx<'a> {
world: Tracked<'a, dyn World>,
current: TypstFileId,
searched: Mutex<HashSet<TypstFileId>>,
}
// todo: field definition
pub(crate) fn find_definition<'a>(
world: Tracked<'a, dyn World>,
current: TypstFileId,
node: LinkedNode<'a>,
) -> Option<Definition<'a>> {
let mut search_ctx = SearchCtx {
world,
current,
searched: Mutex::new(HashSet::new()),
};
let search_ctx = &mut search_ctx;
search_ctx.searched.lock().insert(current);
let mut ancestor = node;
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?.clone();
}
let ancestor = deref_lvalue(ancestor)?;
let may_ident = ancestor.cast::<ast::Expr>()?;
if !may_ident.hash() && !matches!(may_ident, ast::Expr::MathIdent(_)) {
return None;
}
let mut is_ident_only = false;
trace!("got ast_node kind {kind:?}", kind = ancestor.kind());
let ref_node = match may_ident {
// todo: label, reference
// todo: import
// todo: include
ast::Expr::FuncCall(call) => call.callee(),
ast::Expr::Set(set) => set.target(),
ast::Expr::Ident(..) | ast::Expr::MathIdent(..) | ast::Expr::FieldAccess(..) => {
is_ident_only = true;
may_ident
}
ast::Expr::Str(..) => {
if let Some(parent) = ancestor.parent() {
let e = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_import(world.deref(), current, e)?;
let src = ancestor.find(e.source().span())?;
return Some(Definition::Module(ModuleDefinition {
module: source.id(),
use_site: src,
span: source.root().span(),
}));
}
return None;
}
ast::Expr::Import(..) => {
return None;
}
_ => {
debug!("unsupported kind {kind:?}", kind = ancestor.kind());
return None;
}
};
let use_site = if is_ident_only {
ancestor.clone()
} else {
ancestor.find(ref_node.span())?
};
let values = analyze_expr(world.deref(), &use_site);
let func = values.into_iter().find_map(|v| match &v.0 {
Value::Func(..) => Some(v.0),
_ => None,
});
Some(match func {
Some(Value::Func(f)) => Definition::Func(FuncDefinition {
value: f.clone(),
span: f.span(),
use_site,
}),
_ => {
return match may_ident {
ast::Expr::Ident(e) => find_syntax_definition(search_ctx, use_site, e.get()),
ast::Expr::MathIdent(e) => find_syntax_definition(search_ctx, use_site, e.get()),
ast::Expr::FieldAccess(..) => {
debug!("find field access");
None
}
_ => {
debug!("unsupported kind {kind:?}", kind = ancestor.kind());
None
}
}
}
})
}

View file

@ -40,76 +40,18 @@ pub fn find_source_by_import(
}
}
pub fn find_imports(
source: &Source,
def_id: Option<TypstFileId>,
) -> EcoVec<(VirtualPath, LinkedNode<'_>)> {
#[comemo::memoize]
pub fn find_imports(source: &Source) -> EcoVec<TypstFileId> {
let root = LinkedNode::new(source.root());
if let Some(def_id) = def_id.as_ref() {
debug!("find imports for {def_id:?}");
}
struct ImportWorker<'a> {
current: TypstFileId,
def_id: Option<TypstFileId>,
imports: EcoVec<(VirtualPath, LinkedNode<'a>)>,
}
impl<'a> ImportWorker<'a> {
fn analyze(&mut self, node: LinkedNode<'a>) -> Option<()> {
match node.kind() {
SyntaxKind::ModuleImport => {
let i = node.cast::<ast::ModuleImport>().unwrap();
let src = i.source();
match src {
ast::Expr::Str(s) => {
// todo: source in packages
let s = s.get();
let path = Path::new(s.as_str());
let vpath = if path.is_relative() {
self.current.vpath().join(path)
} else {
VirtualPath::new(path)
};
debug!("found import {vpath:?}");
if self.def_id.is_some_and(|e| e.vpath() != &vpath) {
return None;
}
self.imports.push((vpath, node));
}
// todo: handle dynamic import
ast::Expr::FieldAccess(..) | ast::Expr::Ident(..) => {}
_ => {}
}
return None;
}
SyntaxKind::ModuleInclude => {}
_ => {}
}
for child in node.children() {
self.analyze(child);
}
None
}
}
let mut worker = ImportWorker {
current: source.id(),
def_id,
imports: EcoVec::new(),
};
worker.analyze(root);
let res = worker.imports;
worker.imports
}
#[comemo::memoize]
pub fn find_imports2(source: &Source) -> EcoVec<TypstFileId> {
let res = find_imports(source, None);
let mut res: Vec<TypstFileId> = res
.into_iter()
.map(|(vpath, _)| TypstFileId::new(None, vpath))
@ -118,3 +60,45 @@ pub fn find_imports2(source: &Source) -> EcoVec<TypstFileId> {
res.dedup();
res.into_iter().collect()
}
struct ImportWorker<'a> {
current: TypstFileId,
imports: EcoVec<(VirtualPath, LinkedNode<'a>)>,
}
impl<'a> ImportWorker<'a> {
fn analyze(&mut self, node: LinkedNode<'a>) -> Option<()> {
match node.kind() {
SyntaxKind::ModuleImport => {
let i = node.cast::<ast::ModuleImport>().unwrap();
let src = i.source();
match src {
ast::Expr::Str(s) => {
// todo: source in packages
let s = s.get();
let path = Path::new(s.as_str());
let vpath = if path.is_relative() {
self.current.vpath().join(path)
} else {
VirtualPath::new(path)
};
debug!("found import {vpath:?}");
self.imports.push((vpath, node));
}
// todo: handle dynamic import
ast::Expr::FieldAccess(..) | ast::Expr::Ident(..) => {}
_ => {}
}
return None;
}
SyntaxKind::ModuleInclude => {}
_ => {}
}
for child in node.children() {
self.analyze(child);
}
None
}
}

View file

@ -14,10 +14,7 @@ use typst::{
},
util::LazyHash,
};
use typst_ts_core::{
typst::prelude::{eco_vec, EcoVec},
TypstFileId,
};
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
pub(crate) fn get_lexical_hierarchy(
source: Source,
@ -84,13 +81,6 @@ pub enum LexicalModKind {
/// `import "foo": *`
/// ^
Star,
/// the symbol inside of `import "foo": *`
/// ^
ExternResolved {
#[serde(skip_serializing, skip_deserializing)]
at: Option<TypstFileId>,
in_mod_kind: Box<LexicalModKind>,
},
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
@ -421,12 +411,6 @@ impl LexicalHierarchyWorker {
self.get_symbols_with(n, IdentContext::Func)?;
}
}
if self.g == LexicalScopeKind::DefUse {
let param = node.children().find(|n| n.kind() == SyntaxKind::Params);
if let Some(param) = param {
self.get_symbols_with(param, IdentContext::Params)?;
}
}
let body = node
.children()
.rev()
@ -440,6 +424,15 @@ impl LexicalHierarchyWorker {
};
self.stack.push((symbol, eco_vec![]));
let stack_height = self.stack.len();
if self.g == LexicalScopeKind::DefUse {
let param =
node.children().find(|n| n.kind() == SyntaxKind::Params);
if let Some(param) = param {
self.get_symbols_with(param, IdentContext::Params)?;
}
}
self.get_symbols_with(body, IdentContext::Ref)?;
while stack_height <= self.stack.len() {
self.symbreak();

View file

@ -11,6 +11,7 @@ pub fn deref_lvalue(mut node: LinkedNode) -> Option<LinkedNode> {
Some(node)
}
#[derive(Debug, Clone)]
pub enum DerefTarget<'a> {
VarAccess(LinkedNode<'a>),
Callee(LinkedNode<'a>),

View file

@ -2,7 +2,7 @@ use std::{collections::HashMap, sync::Once};
use typst_ts_core::{typst::prelude::EcoVec, TypstFileId};
use super::{find_imports2, AnalysisContext};
use super::{find_imports, AnalysisContext};
pub struct ModuleDependency {
pub dependencies: EcoVec<TypstFileId>,
@ -28,7 +28,7 @@ pub fn construct_module_dependencies(
};
let file_id = source.id();
let deps = find_imports2(&source);
let deps = find_imports(&source);
dependencies
.entry(file_id)
.or_insert_with(|| ModuleDependency {

View file

@ -1,74 +0,0 @@
use typst::syntax::{
ast::{self, AstNode},
LinkedNode, SyntaxKind,
};
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
pub fn find_lexical_references_after<'a, 'b: 'a>(
parent: LinkedNode<'a>,
node: LinkedNode<'a>,
target: &'b str,
) -> EcoVec<LinkedNode<'a>> {
let mut worker = Worker {
idents: eco_vec![],
target,
};
worker.analyze_after(parent, node);
worker.idents
}
struct Worker<'a> {
target: &'a str,
idents: EcoVec<LinkedNode<'a>>,
}
impl<'a> Worker<'a> {
fn analyze_after(&mut self, parent: LinkedNode<'a>, node: LinkedNode<'a>) -> Option<()> {
let mut after_node = false;
for child in parent.children() {
if child.offset() > node.offset() {
after_node = true;
}
if after_node {
self.analyze(child);
}
}
None
}
fn analyze(&mut self, node: LinkedNode<'a>) -> Option<()> {
match node.kind() {
SyntaxKind::LetBinding => {
let lb = node.cast::<ast::LetBinding>().unwrap();
let name = lb.kind().bindings();
for n in name {
if n.get() == self.target {
return None;
}
}
if let Some(init) = lb.init() {
let init_expr = node.find(init.span())?;
self.analyze(init_expr);
}
return None;
}
// todo: analyze import effect
SyntaxKind::Import => {}
SyntaxKind::Ident | SyntaxKind::MathIdent => {
if self.target == node.text() {
self.idents.push(node.clone());
}
}
_ => {}
}
for child in node.children() {
self.analyze(child);
}
None
}
}

View file

@ -0,0 +1,2 @@
#let term(term) = term;
#term(1)

View file

@ -27,7 +27,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
},
"refs": []
},
"foo@54..62@s0.typ": {
"foo@59..62@s0.typ": {
"def": {
"kind": {
"Mod": {
@ -40,7 +40,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
}
},
"name": "foo",
"range": "54:62"
"range": "59:62"
},
"refs": [
"foo@72..75"

View file

@ -17,7 +17,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
"base@58..62"
]
},
"foo@47..55@s0.typ": {
"foo@52..55@s0.typ": {
"def": {
"kind": {
"Mod": {
@ -30,7 +30,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
}
},
"name": "foo",
"range": "47:55"
"range": "52:55"
},
"refs": [
"foo@65..68"

View file

@ -0,0 +1,31 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/param_scope.typ
---
{
"term@10..14@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "term",
"range": "10:14"
},
"refs": [
"term@18..22"
]
},
"term@5..9@s0.typ": {
"def": {
"kind": {
"Var": "Function"
},
"name": "term",
"range": "5:9"
},
"refs": [
"term@26..30"
]
}
}

View file

@ -0,0 +1,6 @@
// path: base.typ
#let f() = 1;
-----
.
#import "base.typ": /* ident after */ f;

View file

@ -0,0 +1,6 @@
// path: base.typ
#let f() = 1;
-----
.
#import "base.typ": /* ident after */ f as ff;

View file

@ -0,0 +1,6 @@
// path: base.typ
#let f() = 1;
-----
.
#import "base.typ": f as /* ident after */ ff;

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/at_def.typ
[
{
"originSelectionRange": "0:23:0:24",
"targetRange": "0:24:0:26",
"targetSelectionRange": "0:24:0:26"
"targetRange": "0:23:0:24",
"targetSelectionRange": "0:23:0:24"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/base.typ
[
{
"originSelectionRange": "1:23:1:24",
"targetRange": "0:6:0:8",
"targetSelectionRange": "0:6:0:8"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_alias.typ
[
{
"originSelectionRange": "1:23:1:26",
"targetRange": "0:6:0:8",
"targetSelectionRange": "0:6:0:8"
"targetRange": "0:25:0:28",
"targetSelectionRange": "0:25:0:28"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_ident.typ
[
{
"originSelectionRange": "1:23:1:24",
"targetRange": "0:6:0:8",
"targetSelectionRange": "0:6:0:8"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_star.typ
[
{
"originSelectionRange": "3:23:3:24",
"targetRange": "0:6:0:8",
"targetSelectionRange": "0:6:0:8"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/import_star_varia
[
{
"originSelectionRange": "3:23:3:24",
"targetRange": "0:1:0:10",
"targetSelectionRange": "0:1:0:10"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/inside_import.typ
---
[
{
"originSelectionRange": "2:38:2:39",
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/inside_import_alias.typ
---
[
{
"originSelectionRange": "2:38:2:39",
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -0,0 +1,12 @@
---
source: crates/tinymist-query/src/goto_definition.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_definition/inside_import_alias2.typ
---
[
{
"originSelectionRange": "2:43:2:45",
"targetRange": "2:43:2:45",
"targetSelectionRange": "2:43:2:45"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/paren.typ
[
{
"originSelectionRange": "1:23:1:24",
"targetRange": "0:5:0:14",
"targetSelectionRange": "0:5:0:14"
"targetRange": "0:9:0:10",
"targetSelectionRange": "0:9:0:10"
}
]

View file

@ -6,7 +6,7 @@ input_file: crates/tinymist-query/src/fixtures/goto_definition/variable.typ
[
{
"originSelectionRange": "1:23:1:24",
"targetRange": "0:1:0:10",
"targetSelectionRange": "0:1:0:10"
"targetRange": "0:5:0:6",
"targetSelectionRange": "0:5:0:6"
}
]

View file

@ -18,15 +18,15 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
"name": "f",
"range": "18:19"
},
{
"kind": {
"Var": "Variable"
},
"name": "a",
"range": "20:21"
},
{
"children": [
{
"kind": {
"Var": "Variable"
},
"name": "a",
"range": "20:21"
},
{
"kind": {
"Var": "ValRef"

View file

@ -0,0 +1,9 @@
#import "base2.typ": x
#x
-----
// path: base2.typ
#import "base.typ": x
#x
-----
// path: base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,9 @@
#import "base2.typ": *
#x
-----
// path: base2.typ
#import "base.typ": *
#x
-----
// path: base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,10 @@
// path: basic/writing.typ
#import "mod.typ": *
#exercise()
-----
// path: basic/mod.typ
#import "../mod.typ": exercise
#exercise()
-----
// path: mod.typ
#let /* ident after */ exercise() = [];

View file

@ -1,13 +1,9 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/at_def.typ
---
[
{
"range": "1:2:1:3"
},
{
"range": "2:2:2:3"
}
"/s0.typ@1:2:1:3",
"/s0.typ@2:2:2:3"
]

View file

@ -1,6 +1,8 @@
---
source: crates/tinymist-query/src/goto_declaration.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/goto_declaration/base.typ
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/base.typ
---
null
[
"/s0.typ@1:23:1:24"
]

View file

@ -1,10 +1,8 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module.typ
---
[
{
"range": "1:1:1:2"
}
"/s0.typ@1:1:1:2"
]

View file

@ -1,10 +1,9 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module2.typ
---
[
{
"range": "0:20:0:21"
}
"/s0.typ@0:20:0:21",
"/s0.typ@1:1:1:2"
]

View file

@ -1,10 +1,9 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_absolute.typ
---
[
{
"range": "0:25:0:26"
}
"/out/main.typ@0:25:0:26",
"/out/main.typ@1:1:1:2"
]

View file

@ -1,10 +1,8 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_alias.typ
---
[
{
"range": "1:1:1:3"
}
"/s1.typ@1:1:1:3"
]

View file

@ -1,10 +1,8 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_alias2.typ
---
[
{
"range": "0:20:0:21"
}
"/s0.typ@0:20:0:21"
]

View file

@ -1,10 +1,9 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_relative.typ
---
[
{
"range": "0:20:0:21"
}
"/out/main.typ@0:20:0:21",
"/out/main.typ@1:1:1:2"
]

View file

@ -0,0 +1,11 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/recursive_import.typ
---
[
"/base2.typ@0:20:0:21",
"/s0.typ@0:21:0:22",
"/base2.typ@1:1:1:2",
"/s0.typ@1:1:1:2"
]

View file

@ -0,0 +1,9 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/recursive_import_star.typ
---
[
"/base2.typ@1:1:1:2",
"/s0.typ@1:1:1:2"
]

View file

@ -1,19 +1,11 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/redefine.typ
---
[
{
"range": "3:2:3:3"
},
{
"range": "3:6:3:7"
},
{
"range": "6:12:6:13"
},
{
"range": "8:9:8:10"
}
"/s0.typ@3:2:3:3",
"/s0.typ@3:6:3:7",
"/s0.typ@6:12:6:13",
"/s0.typ@8:9:8:10"
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_pure(result)"
input_file: crates/tinymist-query/src/fixtures/references/rename_issue_exercise.typ
---
[
"/basic/mod.typ@0:22:0:30",
"/basic/mod.typ@1:1:1:9",
"/basic/writing.typ@1:1:1:9"
]

View file

@ -1,8 +1,22 @@
use comemo::Track;
use log::debug;
use lsp_types::LocationLink;
use std::ops::Range;
use crate::{analysis::find_definition, prelude::*};
use log::debug;
use typst::{
foundations::Value,
syntax::{
ast::{self},
LinkedNode, Source,
},
};
use typst_ts_core::TypstFileId;
use crate::{
analysis::{
find_source_by_import, get_def_use, get_deref_target, DerefTarget, IdentRef, LexicalKind,
LexicalModKind, LexicalVarKind,
},
prelude::*,
};
#[derive(Debug, Clone)]
pub struct GotoDefinitionRequest {
@ -13,38 +27,29 @@ pub struct GotoDefinitionRequest {
impl GotoDefinitionRequest {
pub fn request(
self,
world: &TypstSystemWorld,
ctx: &mut AnalysisContext,
position_encoding: PositionEncoding,
) -> Option<GotoDefinitionResponse> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let source = ctx.source_by_path(&self.path).ok()?;
let offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let cursor = offset + 1;
let def = {
let ast_node = LinkedNode::new(source.root()).leaf_at(cursor)?;
let t: &dyn World = world;
find_definition(t.track(), source.id(), ast_node)?
};
let (span, use_site) = (def.span(), def.use_site());
if span.is_detached() {
return None;
}
let Some(id) = span.id() else {
return None;
};
let ast_node = LinkedNode::new(source.root()).leaf_at(cursor)?;
debug!("ast_node: {ast_node:?}", ast_node = ast_node);
let deref_target = get_deref_target(ast_node)?;
let use_site = deref_target.node().clone();
let origin_selection_range =
typst_to_lsp::range(use_site.range(), &source, position_encoding);
let span_path = world.path_for_id(id).ok()?;
let span_source = world.source(id).ok()?;
let def_node = span_source.find(span)?;
let typst_range = def_node.range();
let range = typst_to_lsp::range(typst_range, &span_source, position_encoding);
let def = find_definition(ctx, source.clone(), deref_target)?;
let span_path = ctx.world.path_for_id(def.fid).ok()?;
let uri = Url::from_file_path(span_path).ok()?;
let span_source = ctx.source_by_id(def.fid).ok()?;
let range = typst_to_lsp::range(def.def_range, &span_source, position_encoding);
let res = Some(GotoDefinitionResponse::Link(vec![LocationLink {
origin_selection_range: Some(origin_selection_range),
target_uri: uri,
@ -52,11 +57,154 @@ impl GotoDefinitionRequest {
target_selection_range: range,
}]));
debug!("goto_definition: {res:?}");
debug!("goto_definition: {:?} {res:?}", def.fid);
res
}
}
pub(crate) struct DefinitionLink {
pub value: Option<Value>,
pub fid: TypstFileId,
pub name: String,
pub def_range: Range<usize>,
pub name_range: Option<Range<usize>>,
}
// todo: field definition
pub(crate) fn find_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
deref_target: DerefTarget<'_>,
) -> Option<DefinitionLink> {
let source_id = source.id();
let use_site = match deref_target {
// todi: field access
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => node,
// todo: better support (rename import path?)
DerefTarget::ImportPath(path) => {
let parent = path.parent()?;
let def_fid = parent.span().id()?;
let e = parent.cast::<ast::ModuleImport>()?;
let source = find_source_by_import(ctx.world, def_fid, e)?;
return Some(DefinitionLink {
name: String::new(),
value: None,
fid: source.id(),
def_range: (LinkedNode::new(source.root())).range(),
name_range: None,
});
}
};
// syntatic definition
let def_use = get_def_use(ctx, source)?;
let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::MathIdent(e) => IdentRef {
name: e.get().to_string(),
range: use_site.range(),
},
ast::Expr::FieldAccess(..) => {
debug!("find field access");
return None;
}
_ => {
debug!("unsupported kind {kind:?}", kind = use_site.kind());
return None;
}
};
let def_id = def_use.get_ref(&ident_ref);
let def_id = def_id.or_else(|| Some(def_use.get_def(source_id, &ident_ref)?.0));
let def_info = def_id.and_then(|def_id| def_use.get_def_by_id(def_id));
let values = analyze_expr(ctx.world, &use_site);
for v in values {
// mostly builtin functions
if let Value::Func(f) = v.0 {
use typst::foundations::func::Repr;
match f.inner() {
// The with function should be resolved as the with position
Repr::Closure(..) | Repr::With(..) => continue,
Repr::Native(..) | Repr::Element(..) => {}
}
let name = f
.name()
.or_else(|| def_info.as_ref().map(|(_, r)| r.name.as_str()));
if let Some(name) = name {
let span = f.span();
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
return Some(DefinitionLink {
name: name.to_owned(),
value: Some(Value::Func(f.clone())),
fid,
def_range: source.find(span)?.range(),
name_range: def_info.map(|(_, r)| r.range.clone()),
});
}
}
}
let (def_fid, def) = def_info?;
match def.kind {
LexicalKind::Heading(..) | LexicalKind::Block => unreachable!(),
LexicalKind::Var(
LexicalVarKind::Variable
| LexicalVarKind::ValRef
| LexicalVarKind::Label
| LexicalVarKind::LabelRef,
)
| LexicalKind::Mod(
LexicalModKind::Module(..)
| LexicalModKind::PathVar
| LexicalModKind::ModuleAlias
| LexicalModKind::Alias { .. }
| LexicalModKind::Ident,
) => Some(DefinitionLink {
name: def.name.clone(),
value: None,
fid: def_fid,
def_range: def.range.clone(),
name_range: Some(def.range.clone()),
}),
LexicalKind::Var(LexicalVarKind::Function) => {
let def_source = ctx.source_by_id(def_fid).ok()?;
let root = LinkedNode::new(def_source.root());
let def_name = root.leaf_at(def.range.start + 1)?;
log::info!("def_name for function: {def_name:?}", def_name = def_name);
let values = analyze_expr(ctx.world, &def_name);
let Some(func) = values.into_iter().find_map(|v| match v.0 {
Value::Func(f) => Some(f),
_ => None,
}) else {
log::info!("no func found... {:?}", def.name);
return None;
};
log::info!("okay for function: {func:?}");
Some(DefinitionLink {
name: def.name.clone(),
value: Some(Value::Func(func.clone())),
fid: def_fid,
def_range: def.range.clone(),
name_range: Some(def.range.clone()),
})
}
LexicalKind::Mod(LexicalModKind::Star) => {
log::info!("unimplemented star import {:?}", ident_ref);
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -64,9 +212,8 @@ mod tests {
#[test]
fn test() {
// goto_definition
snapshot_testing("goto_definition", &|world, path| {
let source = get_suitable_source_in_workspace(world, &path).unwrap();
snapshot_testing2("goto_definition", &|world, path| {
let source = world.source_by_path(&path).unwrap();
let request = GotoDefinitionRequest {
path: path.clone(),

View file

@ -5,16 +5,15 @@ pub use std::{
sync::Arc,
};
pub use comemo::{Track, Tracked};
pub use itertools::{Format, Itertools};
pub use log::{error, trace};
pub use lsp_types::{
request::GotoDeclarationResponse, CodeLens, CompletionResponse, DiagnosticRelatedInformation,
DocumentSymbol, DocumentSymbolResponse, Documentation, FoldingRange, GotoDefinitionResponse,
Hover, InlayHint, Location as LspLocation, MarkupContent, MarkupKind, Position as LspPosition,
PrepareRenameResponse, SelectionRange, SemanticTokens, SemanticTokensDelta,
SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SignatureInformation,
SymbolInformation, Url, WorkspaceEdit,
Hover, InlayHint, Location as LspLocation, LocationLink, MarkupContent, MarkupKind,
Position as LspPosition, PrepareRenameResponse, SelectionRange, SemanticTokens,
SemanticTokensDelta, SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp,
SignatureInformation, SymbolInformation, Url, WorkspaceEdit,
};
pub use serde_json::Value as JsonValue;
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};

View file

@ -1,7 +1,4 @@
use crate::{
analysis::{find_definition, Definition},
prelude::*,
};
use crate::{analysis::get_deref_target, find_definition, prelude::*, DefinitionLink};
use log::debug;
#[derive(Debug, Clone)]
@ -10,6 +7,8 @@ pub struct PrepareRenameRequest {
pub position: LspPosition,
}
// todo: rename alias
// todo: rename import path?
impl PrepareRenameRequest {
/// See <https://github.com/microsoft/vscode-go/issues/2714>.
/// The prepareRename feature is sent before a rename request. If the user
@ -18,54 +17,68 @@ impl PrepareRenameRequest {
/// show the rename pop-up.
pub fn request(
self,
world: &TypstSystemWorld,
ctx: &mut AnalysisContext,
position_encoding: PositionEncoding,
) -> Option<PrepareRenameResponse> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let typst_offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let source = ctx.source_by_path(&self.path).ok()?;
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset + 1)?;
let offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let cursor = offset + 1;
let t: &dyn World = world;
let Definition::Func(func) = find_definition(t.track(), source.id(), ast_node)? else {
// todo: handle other definitions
return None;
};
let ast_node = LinkedNode::new(source.root()).leaf_at(cursor)?;
debug!("ast_node: {ast_node:?}", ast_node = ast_node);
let deref_target = get_deref_target(ast_node)?;
let use_site = deref_target.node().clone();
let origin_selection_range =
typst_to_lsp::range(use_site.range(), &source, position_encoding);
let lnk = find_definition(ctx, source.clone(), deref_target)?;
validate_renaming_definition(&lnk)?;
debug!("prepare_rename: {}", lnk.name);
Some(PrepareRenameResponse::RangeWithPlaceholder {
range: origin_selection_range,
placeholder: lnk.name,
})
}
}
pub(crate) fn validate_renaming_definition(lnk: &DefinitionLink) -> Option<()> {
'check_func: {
use typst::foundations::func::Repr;
let mut f = func.value.clone();
let mut f = match &lnk.value {
Some(Value::Func(f)) => f,
Some(..) => {
log::info!(
"prepare_rename: not a function on function definition site: {:?}",
lnk.value
);
return None;
}
None => {
break 'check_func;
}
};
loop {
match f.inner() {
// native functions can't be renamed
Repr::Native(..) | Repr::Element(..) => return None,
// todo: rename with site
Repr::With(w) => f = w.0.clone(),
Repr::With(w) => f = &w.0,
Repr::Closure(..) => break,
}
}
// todo: unwrap parentheses
let ident = match func.use_site.kind() {
SyntaxKind::Ident | SyntaxKind::MathIdent => func.use_site.text(),
_ => return None,
};
debug!("prepare_rename: {ident}");
let id = func.span.id()?;
if id.package().is_some() {
debug!(
"prepare_rename: {ident} is in a package {pkg:?}",
pkg = id.package()
);
return None;
}
let origin_selection_range =
typst_to_lsp::range(func.use_site.range(), &source, position_encoding);
Some(PrepareRenameResponse::RangeWithPlaceholder {
range: origin_selection_range,
placeholder: ident.to_string(),
})
}
if lnk.fid.package().is_some() {
debug!(
"prepare_rename: {name} is in a package {pkg:?}",
name = lnk.name,
pkg = lnk.fid.package()
);
return None;
}
Some(())
}

View file

@ -1,4 +1,5 @@
use log::debug;
use typst_ts_core::vector::ir::DefId;
use crate::{
analysis::{get_def_use, get_deref_target, DerefTarget, IdentRef},
@ -33,7 +34,7 @@ impl ReferencesRequest {
}
}
fn find_references(
pub(crate) fn find_references(
ctx: &mut AnalysisContext<'_>,
def_use: Arc<crate::analysis::DefUseInfo>,
deref_target: DerefTarget<'_>,
@ -76,17 +77,45 @@ fn find_references(
name: name.clone(),
range: ident.range(),
};
let def_fid = ident.span().id()?;
let cur_fid = ident.span().id()?;
let def_id = def_use.get_ref(&ident_ref);
let def_id = def_id.or_else(|| Some(def_use.get_def(cur_fid, &ident_ref)?.0));
let (def_fid, def) = def_id.and_then(|def_id| def_use.get_def_by_id(def_id))?;
let def_ident = IdentRef {
name: def.name.clone(),
range: def.range.clone(),
};
let (id, _) = def_use.get_def(def_fid, &ident_ref)?;
let def_source = ctx.source_by_id(def_fid).ok()?;
let root_def_use = get_def_use(ctx, def_source)?;
let root_def_id = root_def_use.get_def(def_fid, &def_ident)?.0;
find_references_root(
ctx,
root_def_use,
def_fid,
root_def_id,
def_ident,
position_encoding,
)
}
pub(crate) fn find_references_root(
ctx: &mut AnalysisContext<'_>,
def_use: Arc<crate::analysis::DefUseInfo>,
def_fid: TypstFileId,
def_id: DefId,
def_ident: IdentRef,
position_encoding: PositionEncoding,
) -> Option<Vec<LspLocation>> {
let def_source = ctx.source_by_id(def_fid).ok()?;
let def_path = ctx.world.path_for_id(def_fid).ok()?;
let uri = Url::from_file_path(def_path).ok()?;
// todo: reuse uri, range to location
let mut references = def_use
.get_refs(id)
.get_refs(def_id)
.map(|r| {
let range = typst_to_lsp::range(r.range.clone(), &def_source, position_encoding);
@ -97,7 +126,7 @@ fn find_references(
})
.collect::<Vec<_>>();
if def_use.is_exported(id) {
if def_use.is_exported(def_id) {
// Find dependents
let mut ctx = ctx.fork_for_search();
ctx.push_dependents(def_fid);
@ -105,10 +134,13 @@ fn find_references(
let ref_source = ctx.ctx.source_by_id(ref_fid).ok()?;
let def_use = get_def_use(ctx.ctx, ref_source.clone())?;
log::info!("def_use for {ref_fid:?} => {:?}", def_use.exports_defs);
let uri = ctx.ctx.world.path_for_id(ref_fid).ok()?;
let uri = Url::from_file_path(uri).ok()?;
if let Some((id, _def)) = def_use.get_def(def_fid, &ident_ref) {
let mut redefines = vec![];
if let Some((id, _def)) = def_use.get_def(def_fid, &def_ident) {
references.extend(def_use.get_refs(id).map(|r| {
let range =
typst_to_lsp::range(r.range.clone(), &ref_source, position_encoding);
@ -118,25 +150,12 @@ fn find_references(
range,
}
}));
redefines.push(id);
if def_use.is_exported(id) {
ctx.push_dependents(ref_fid);
}
};
references.extend(
def_use
.get_external_refs(def_fid, Some(name.clone()))
.map(|r| {
let range =
typst_to_lsp::range(r.range.clone(), &ref_source, position_encoding);
LspLocation {
uri: uri.clone(),
range,
}
}),
);
if def_use.is_exported(id) {
ctx.push_dependents(ref_fid);
}
}
}
@ -145,6 +164,8 @@ fn find_references(
#[cfg(test)]
mod tests {
use typst_ts_core::path::unix_slash;
use super::*;
use crate::tests::*;
@ -168,7 +189,24 @@ mod tests {
});
e
});
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
let result = result.map(|v| {
v.into_iter()
.map(|l| {
let fp = unix_slash(&l.uri.to_file_path().unwrap());
let fp = fp.strip_prefix("C:").unwrap_or(&fp);
format!(
"{fp}@{}:{}:{}:{}",
l.range.start.line,
l.range.start.character,
l.range.end.line,
l.range.end.character
)
})
.collect::<Vec<_>>()
});
assert_snapshot!(JsonRepr::new_pure(result));
});
}
}

View file

@ -1,11 +1,11 @@
use std::collections::HashSet;
use log::{debug, warn};
use log::debug;
use lsp_types::TextEdit;
use crate::{
analysis::{find_definition, find_imports, find_lexical_references_after, Definition},
analysis::{get_def_use, get_deref_target},
find_definition, find_references,
prelude::*,
validate_renaming_definition,
};
#[derive(Debug, Clone)]
@ -18,295 +18,59 @@ pub struct RenameRequest {
impl RenameRequest {
pub fn request(
self,
world: &TypstSystemWorld,
ctx: &mut AnalysisContext,
position_encoding: PositionEncoding,
) -> Option<WorkspaceEdit> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let typst_offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let source = ctx.source_by_path(&self.path).ok()?;
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset + 1)?;
let offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let cursor = offset + 1;
let t: &dyn World = world;
let Definition::Func(func) = find_definition(t.track(), source.id(), ast_node)? else {
// todo: handle other definitions
return None;
};
let ast_node = LinkedNode::new(source.root()).leaf_at(cursor)?;
debug!("ast_node: {ast_node:?}", ast_node = ast_node);
// todo: unwrap parentheses
let deref_target = get_deref_target(ast_node)?;
let ident = match func.use_site.kind() {
SyntaxKind::Ident | SyntaxKind::MathIdent => func.use_site.text(),
_ => return None,
};
debug!("prepare_rename: {ident}");
let lnk = find_definition(ctx, source.clone(), deref_target.clone())?;
let def_id = func.span.id()?;
if def_id.package().is_some() {
debug!(
"prepare_rename: {ident} is in a package {pkg:?}",
pkg = def_id.package()
);
return None;
}
validate_renaming_definition(&lnk)?;
let def_use = get_def_use(ctx, source.clone())?;
let references = find_references(ctx, def_use, deref_target, position_encoding)?;
let mut editions = HashMap::new();
let def_source = world.source(def_id).ok()?;
let def_id = def_source.id();
let def_path = world.path_for_id(def_id).ok()?;
let def_node = def_source.find(func.span)?;
let mut def_node = &def_node;
loop {
if def_node.kind() == SyntaxKind::LetBinding {
break;
let def_loc = {
let def_source = ctx.source_by_id(lnk.fid).ok()?;
let span_path = ctx.world.path_for_id(lnk.fid).ok()?;
let uri = Url::from_file_path(span_path).ok()?;
let Some(range) = lnk.name_range else {
log::warn!("rename: no name range");
return None;
};
LspLocation {
uri,
range: typst_to_lsp::range(range, &def_source, position_encoding),
}
def_node = def_node.parent()?;
}
};
debug!(
"rename: def_node found: {def_node:?} in {path}",
path = def_path.display()
);
let def_func = def_node.cast::<ast::LetBinding>()?;
let def_names = def_func.kind().bindings();
if def_names.len() != 1 {
return None;
}
let def_name = def_names.first().unwrap();
let def_name_node = def_node.find(def_name.span())?;
// find after function definition
let def_root = LinkedNode::new(def_source.root());
let parent = def_node.parent().unwrap_or(&def_root).clone();
let idents = find_lexical_references_after(parent, def_node.clone(), ident);
debug!("rename: in file idents found: {idents:?}");
let def_uri = Url::from_file_path(def_path).unwrap();
for i in (Some(def_name_node).into_iter()).chain(idents) {
let range = typst_to_lsp::range(i.range(), &def_source, position_encoding);
editions.insert(
def_uri.clone(),
vec![TextEdit {
range,
new_text: self.new_name.clone(),
}],
);
}
// check whether it is in a sub scope
if is_rooted_definition(def_node) {
let mut wq = WorkQueue::default();
wq.push(def_id);
while let Some(id) = wq.pop() {
search_in_workspace(
world,
id,
ident,
&self.new_name,
&mut editions,
&mut wq,
position_encoding,
)?;
}
for i in (Some(def_loc).into_iter()).chain(references) {
let uri = i.uri;
let range = i.range;
let edits = editions.entry(uri).or_insert_with(Vec::new);
edits.push(TextEdit {
range,
new_text: self.new_name.clone(),
});
}
// todo: conflict analysis
Some(WorkspaceEdit {
changes: Some(editions),
..Default::default()
})
}
}
#[derive(Debug, Clone, Default)]
struct WorkQueue {
searched: HashSet<TypstFileId>,
queue: Vec<TypstFileId>,
}
impl WorkQueue {
fn push(&mut self, id: TypstFileId) {
if self.searched.contains(&id) {
return;
}
self.searched.insert(id);
self.queue.push(id);
}
fn pop(&mut self) -> Option<TypstFileId> {
let id = self.queue.pop()?;
Some(id)
}
}
fn is_rooted_definition(node: &LinkedNode) -> bool {
// check whether it is in a sub scope
let mut parent_has_block = false;
let mut parent = node.parent();
while let Some(p) = parent {
if matches!(p.kind(), SyntaxKind::CodeBlock | SyntaxKind::ContentBlock) {
parent_has_block = true;
break;
}
parent = p.parent();
}
!parent_has_block
}
fn search_in_workspace(
world: &TypstSystemWorld,
def_id: TypstFileId,
ident: &str,
new_name: &str,
editions: &mut HashMap<Url, Vec<TextEdit>>,
wq: &mut WorkQueue,
position_encoding: PositionEncoding,
) -> Option<()> {
for path in walkdir::WalkDir::new(world.root.clone())
.follow_links(false)
.into_iter()
{
let Ok(de) = path else {
continue;
};
if !de.file_type().is_file() {
continue;
}
if !de
.path()
.extension()
.is_some_and(|e| e == "typ" || e == "typc")
{
continue;
}
let Ok(source) = get_suitable_source_in_workspace(world, de.path()) else {
warn!("rename: failed to get source for {}", de.path().display());
return None;
};
let use_id = source.id();
// todo: whether we can rename identifiers in packages?
if use_id.package().is_some() || wq.searched.contains(&use_id) {
continue;
}
// todo: find dynamically
let mut res = vec![];
if def_id != use_id {
// find import statement
let imports = find_imports(&source, Some(def_id));
debug!("rename: imports found: {imports:?}");
// todo: precise import analysis
if imports.is_empty() {
continue;
}
let root = LinkedNode::new(source.root());
for i in imports {
let stack_store = i.1.clone();
let Some(import_node) = stack_store.cast::<ast::ModuleImport>() else {
continue;
};
// todo: don't ignore import node
if import_node.new_name().is_some() {
continue;
}
let Some(imports) = import_node.imports() else {
continue;
};
let mut found = false;
let mut found_ident = None;
match imports {
ast::Imports::Wildcard => found = true,
ast::Imports::Items(items) => {
for handle in items.iter() {
match handle {
ast::ImportItem::Simple(e) => {
if e.get() == ident {
found = true;
found_ident = Some((e, false));
break;
}
}
ast::ImportItem::Renamed(e) => {
let o = e.original_name();
if o.get() == ident {
found = true;
found_ident = Some((o, true));
break;
}
}
}
}
}
}
if !found {
continue;
}
debug!("rename: import ident found in {:?}", de.path().display());
let is_renamed = found_ident.as_ref().map(|e| e.1).unwrap_or(false);
let found_ident = found_ident.map(|e| e.0);
if !is_renamed && is_rooted_definition(&i.1) {
wq.push(use_id);
debug!("rename: push {use_id:?} to work queue");
}
let idents = if !is_renamed {
let parent = i.1.parent().unwrap_or(&root).clone();
Some(find_lexical_references_after(parent, i.1.clone(), ident))
} else {
None
};
debug!("rename: idents found: {idents:?}");
let found_ident = found_ident.map(|found_ident| {
let Some(found_ident) = i.1.find(found_ident.span()) else {
warn!(
"rename: found_ident not found: {found_ident:?} in {:?} in {}",
i.1,
de.path().display()
);
return None;
};
Some(found_ident)
});
// we do early return because there may be some unreliability during
// analysis
if found_ident.as_ref().is_some_and(Option::is_none) {
return None;
}
let found_ident = found_ident.flatten();
for i in idents.into_iter().flatten().chain(found_ident.into_iter()) {
let range = typst_to_lsp::range(i.range(), &source, position_encoding);
res.push(TextEdit {
range,
new_text: new_name.to_owned(),
});
}
}
}
if !res.is_empty() {
let use_path = world.path_for_id(use_id).unwrap();
let uri = Url::from_file_path(use_path).unwrap();
editions.insert(uri, res);
}
}
Some(())
}