feat: support query of cross-module references (#42)

* feat: support cross-module references by wild-card import

* feat: support cross-module references by ident import

* feat: support cross-module references by alias import

* docs: update readme

* dev: remove two debug printing
This commit is contained in:
Myriad-Dreamin 2024-03-15 15:43:42 +08:00 committed by GitHub
parent fe25933d0e
commit 5fa4f8f94f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 360 additions and 136 deletions

View file

@ -1,12 +1,11 @@
use core::fmt;
use std::{
collections::{HashMap, HashSet},
collections::HashMap,
ops::{Deref, Range},
sync::Arc,
};
use log::info;
use parking_lot::Mutex;
use serde::Serialize;
use typst::syntax::Source;
use typst_ts_core::{path::unix_slash, TypstFileId};
@ -15,7 +14,7 @@ use crate::{adt::snapshot_map::SnapshotMap, analysis::find_source_by_import_path
use super::{
get_lexical_hierarchy, AnalysisContext, LexicalHierarchy, LexicalKind, LexicalScopeKind,
LexicalVarKind, ModSrc,
LexicalVarKind, ModSrc, SearchCtx,
};
pub use typst_ts_core::vector::ir::DefId;
@ -60,14 +59,15 @@ impl Serialize for IdentRef {
#[derive(Serialize, Clone)]
pub struct IdentDef {
name: String,
kind: LexicalKind,
range: Range<usize>,
pub name: String,
pub kind: LexicalKind,
pub range: Range<usize>,
}
#[derive(Default)]
pub struct DefUseInfo {
ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
external_refs: HashMap<(TypstFileId, Option<String>), Vec<IdentRef>>,
ident_refs: HashMap<IdentRef, DefId>,
undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
@ -84,28 +84,30 @@ impl DefUseInfo {
.iter()
.filter_map(move |(k, v)| if *v == id { Some(k) } else { None })
}
pub fn get_external_refs(
&self,
ext_id: TypstFileId,
ext_name: Option<String>,
) -> impl Iterator<Item = &IdentRef> {
self.external_refs
.get(&(ext_id, ext_name))
.into_iter()
.flatten()
}
pub fn is_exported(&self, id: DefId) -> bool {
self.exports_refs.contains(&id)
}
}
pub fn get_def_use<'a>(
world: &'a mut AnalysisContext<'a>,
source: Source,
) -> Option<Arc<DefUseInfo>> {
let mut ctx = SearchCtx {
ctx: world,
searched: Default::default(),
};
get_def_use_inner(&mut ctx, source)
}
struct SearchCtx<'w> {
ctx: &'w mut AnalysisContext<'w>,
searched: Mutex<HashSet<TypstFileId>>,
pub fn get_def_use(ctx: &mut AnalysisContext, source: Source) -> Option<Arc<DefUseInfo>> {
get_def_use_inner(&mut ctx.fork_for_search(), source)
}
fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
if !ctx.searched.lock().insert(current_id) {
if !ctx.searched.insert(current_id) {
return None;
}
@ -125,7 +127,7 @@ fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseIn
label_scope: SnapshotMap::default(),
current_id,
current_path: None,
ext_src: None,
};
collector.scan(&e);
@ -138,17 +140,17 @@ fn get_def_use_inner(ctx: &mut SearchCtx, source: Source) -> Option<Arc<DefUseIn
res
}
struct DefUseCollector<'a, 'w> {
ctx: &'a mut SearchCtx<'w>,
struct DefUseCollector<'a, 'b, 'w> {
ctx: &'a mut SearchCtx<'b, 'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
current_id: TypstFileId,
current_path: Option<&'a str>,
ext_src: Option<Source>,
}
impl<'a, 'w> DefUseCollector<'a, 'w> {
impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
fn enter<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let id_snap = self.id_scope.snapshot();
let res = f(self);
@ -167,12 +169,18 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
LexicalKind::Var(LexicalVarKind::Label) => self.insert(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::Function)
| LexicalKind::Var(LexicalVarKind::Variable)
| LexicalKind::Mod(super::LexicalModKind::PathVar)
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias)
| LexicalKind::Mod(super::LexicalModKind::Ident)
| LexicalKind::Mod(super::LexicalModKind::Alias { .. }) => {
self.insert(Ns::Value, e)
| LexicalKind::Var(LexicalVarKind::Variable) => self.insert(Ns::Value, e),
LexicalKind::Mod(super::LexicalModKind::PathVar)
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias) => {
self.insert_module(Ns::Value, e)
}
LexicalKind::Mod(super::LexicalModKind::Ident) => {
self.insert(Ns::Value, e);
self.insert_extern(e.info.name.clone(), e.info.range.clone());
}
LexicalKind::Mod(super::LexicalModKind::Alias { target }) => {
self.insert(Ns::Value, e);
self.insert_extern(target.name.clone(), target.range.clone());
}
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
LexicalKind::Block => {
@ -184,7 +192,12 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
match p {
ModSrc::Expr(_) => {}
ModSrc::Path(p) => {
self.current_path = Some(p.deref());
let src = find_source_by_import_path(
self.ctx.ctx.world,
self.current_id,
p.deref(),
);
self.ext_src = src;
}
}
@ -193,40 +206,35 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
self.scan(e.as_slice())?;
}
self.current_path = None;
self.ext_src = None;
}
LexicalKind::Mod(super::LexicalModKind::Star) => {
if let Some(path) = self.current_path {
let external_info =
find_source_by_import_path(self.ctx.ctx.world, self.current_id, path)
.and_then(|source| {
info!("diving source for def use: {:?}", source.id());
Some(source.id()).zip(get_def_use_inner(self.ctx, source))
});
if let Some(source) = &self.ext_src {
info!("diving source for def use: {:?}", source.id());
let (_, external_info) =
Some(source.id()).zip(get_def_use_inner(self.ctx, source.clone()))?;
if let Some((_, external_info)) = external_info {
for v in &external_info.exports_refs {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) =
external_info.ident_defs.get_index(v.0 as usize).unwrap();
for v in &external_info.exports_refs {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) =
external_info.ident_defs.get_index(v.0 as usize).unwrap();
let name = ext_sym.name.clone();
let name = ext_sym.name.clone();
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let id = DefId(id as u64);
self.id_scope.insert(name, id);
}
let id = DefId(id as u64);
self.id_scope.insert(name, id);
}
}
}
@ -237,6 +245,28 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
Some(())
}
fn insert_module(&mut self, label: Ns, e: &LexicalHierarchy) {
self.insert(label, e);
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), None),
vec![IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
}],
);
}
}
fn insert_extern(&mut self, name: String, range: Range<usize>) {
if let Some(src) = &self.ext_src {
self.info.external_refs.insert(
(src.id(), Some(name.clone())),
vec![IdentRef { name, range }],
);
}
}
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) {
let snap = match label {
Ns::Label => &mut self.label_scope,

View file

@ -1,4 +1,8 @@
use std::{collections::HashMap, path::Path, sync::Arc};
use std::{
collections::{HashMap, HashSet},
path::Path,
sync::Arc,
};
use once_cell::sync::OnceCell;
use typst::{
@ -9,7 +13,7 @@ use typst::{
use typst_ts_compiler::{service::WorkspaceProvider, TypstSystemWorld};
use typst_ts_core::{cow_mut::CowMut, ImmutPath, TypstFileId};
use super::DefUseInfo;
use super::{construct_module_dependencies, DefUseInfo, ModuleDependency};
pub struct ModuleAnalysisCache {
source: OnceCell<FileResult<Source>>,
@ -42,27 +46,17 @@ pub struct Analysis {
pub struct AnalysisCaches {
modules: HashMap<TypstFileId, ModuleAnalysisCache>,
root_files: OnceCell<Vec<TypstFileId>>,
module_deps: OnceCell<HashMap<TypstFileId, ModuleDependency>>,
}
// fn search_in_workspace(
// world: &TypstSystemWorld,
// def_id: TypstFileId,
// ident: &str,
// new_name: &str,
// editions: &mut HashMap<Url, Vec<TextEdit>>,
// wq: &mut WorkQueue,
// position_encoding: PositionEncoding,
// ) -> Option<()> {
// }
pub struct AnalysisContext<'a> {
pub world: &'a TypstSystemWorld,
pub analysis: CowMut<'a, Analysis>,
caches: AnalysisCaches,
}
impl<'a> AnalysisContext<'a> {
pub fn new(world: &'a TypstSystemWorld) -> Self {
impl<'w> AnalysisContext<'w> {
pub fn new(world: &'w TypstSystemWorld) -> Self {
Self {
world,
analysis: CowMut::Owned(Analysis {
@ -71,6 +65,7 @@ impl<'a> AnalysisContext<'a> {
caches: AnalysisCaches {
modules: HashMap::new(),
root_files: OnceCell::new(),
module_deps: OnceCell::new(),
},
}
}
@ -84,6 +79,25 @@ impl<'a> AnalysisContext<'a> {
self.caches.root_files.get_or_init(|| self.search_files())
}
pub fn module_dependencies(&mut self) -> &HashMap<TypstFileId, ModuleDependency> {
if self.caches.module_deps.get().is_some() {
return self.caches.module_deps.get().unwrap();
} else {
// may cause multiple times to calculate, but it is okay because we have mutable
// reference to self.
let deps = construct_module_dependencies(self);
self.caches.module_deps.get_or_init(|| deps)
}
}
pub fn fork_for_search<'s>(&'s mut self) -> SearchCtx<'s, 'w> {
SearchCtx {
ctx: self,
searched: Default::default(),
worklist: Default::default(),
}
}
pub fn get_mut(&mut self, file_id: TypstFileId) -> &ModuleAnalysisCache {
self.caches.modules.entry(file_id).or_insert_with(|| {
let source = OnceCell::new();
@ -148,3 +162,28 @@ impl<'a> AnalysisContext<'a> {
res
}
}
pub struct SearchCtx<'b, 'w> {
pub ctx: &'b mut AnalysisContext<'w>,
pub searched: HashSet<TypstFileId>,
pub worklist: Vec<TypstFileId>,
}
impl SearchCtx<'_, '_> {
pub fn push(&mut self, id: TypstFileId) -> bool {
if self.searched.insert(id) {
self.worklist.push(id);
true
} else {
false
}
}
pub fn push_dependents(&mut self, id: TypstFileId) {
let deps = self.ctx.module_dependencies().get(&id);
let dependents = deps.map(|e| e.dependents.clone()).into_iter().flatten();
for dep in dependents {
self.push(dep);
}
}
}

View file

@ -113,7 +113,7 @@ pub enum LexicalVarKind {
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub(crate) enum LexicalKind {
pub enum LexicalKind {
Heading(i16),
Var(LexicalVarKind),
Mod(LexicalModKind),
@ -449,6 +449,26 @@ impl LexicalHierarchyWorker {
}
}
}
SyntaxKind::RenamedImportItem if self.g.affect_import() => {
let src = node
.cast::<ast::RenamedImportItem>()
.ok_or_else(|| anyhow!("cast to renamed import item failed: {:?}", node))?;
let origin_name = src.new_name();
let origin_name_node = node.find(origin_name.span()).context("no pos")?;
let target_name = src.original_name();
let target_name_node = node.find(target_name.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: origin_name.get().to_string(),
kind: LexicalKind::module_import_alias(ImportAlias {
name: target_name.get().to_string(),
range: target_name_node.range(),
}),
range: origin_name_node.range(),
});
}
SyntaxKind::FieldAccess => {
self.get_symbols_in_first_expr(node.children())?;
}
@ -529,23 +549,6 @@ impl LexicalHierarchyWorker {
(name, kind)
}
SyntaxKind::RenamedImportItem if self.g.affect_import() => {
let src = node
.cast::<ast::RenamedImportItem>()
.ok_or_else(|| anyhow!("cast to renamed import item failed: {:?}", node))?;
let name = src.new_name().get().to_string();
let target_name = src.original_name();
let target_name_node = node.find(target_name.span()).context("no pos")?;
(
name,
LexicalKind::module_import_alias(ImportAlias {
name: target_name.get().to_string(),
range: target_name_node.range(),
}),
)
}
SyntaxKind::Equation | SyntaxKind::Raw | SyntaxKind::BlockComment
if self.g.affect_markup() =>
{

View file

@ -0,0 +1,5 @@
#import "base.typ": *
#x
-----
// path: base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,5 @@
#import "base.typ": x
#x
-----
// path: base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,6 @@
// path: /out/main.typ
#import "/out/base.typ": x
#x
-----
// path: /out/base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,5 @@
// path: base.typ
#let x = 1;
-----
#import "base.typ": x as /* ident after */ ff
#ff

View file

@ -0,0 +1,5 @@
#import "base.typ": x as ff
#ff
-----
// path: base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,6 @@
// path: /out/main.typ
#import "base.typ": x
#x
-----
// path: /out/base.typ
#let /* ident after */ x = 1;

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module.typ
---
[
{
"range": "1:1:1:2"
}
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module2.typ
---
[
{
"range": "0:20:0:21"
}
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_absolute.typ
---
[
{
"range": "0:25:0:26"
}
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_alias.typ
---
[
{
"range": "1:1:1:3"
}
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_alias2.typ
---
[
{
"range": "0:20:0:21"
}
]

View file

@ -0,0 +1,10 @@
---
source: crates/tinymist-query/src/references.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/references/cross_module_relative.typ
---
[
{
"range": "0:20:0:21"
}
]

View file

@ -140,10 +140,10 @@ fn inlay_hint(
Value::Func(f) => Some(f),
_ => None,
})?;
log::info!("got function {func:?}");
log::debug!("got function {func:?}");
let call_info = analyze_call(func, args)?;
log::info!("got call_info {call_info:?}");
log::debug!("got call_info {call_info:?}");
let check_single_pos_arg = || {
let mut pos = 0;

View file

@ -1,5 +1,3 @@
use std::ops::Range;
use log::debug;
use crate::{
@ -16,47 +14,31 @@ pub struct ReferencesRequest {
impl ReferencesRequest {
pub fn request(
self,
ctx: &TypstSystemWorld,
ctx: &mut AnalysisContext,
position_encoding: PositionEncoding,
) -> Option<Vec<LspLocation>> {
let mut ctx = AnalysisContext::new(ctx);
let world = ctx.world;
let source = ctx.source_by_path(&self.path).ok()?;
let offset = lsp_to_typst::position(self.position, position_encoding, &source)?;
let cursor = offset + 1;
let w: &dyn World = world;
let ast_node = LinkedNode::new(source.root()).leaf_at(cursor)?;
debug!("ast_node: {ast_node:?}", ast_node = ast_node);
let deref_target = get_deref_target(ast_node)?;
let def_use = get_def_use(&mut ctx, source.clone())?;
let ref_spans = find_declarations(w, def_use, deref_target)?;
let mut locations = vec![];
for ref_range in ref_spans {
let ref_id = source.id();
let ref_source = &source;
let span_path = world.path_for_id(ref_id).ok()?;
let range = typst_to_lsp::range(ref_range, ref_source, position_encoding);
let uri = Url::from_file_path(span_path).ok()?;
locations.push(LspLocation { uri, range });
}
let def_use = get_def_use(ctx, source.clone())?;
let locations = find_references(ctx, def_use, deref_target, position_encoding)?;
debug!("references: {locations:?}");
Some(locations)
}
}
fn find_declarations(
_w: &dyn World,
fn find_references(
ctx: &mut AnalysisContext<'_>,
def_use: Arc<crate::analysis::DefUseInfo>,
deref_target: DerefTarget<'_>,
) -> Option<Vec<Range<usize>>> {
position_encoding: PositionEncoding,
) -> Option<Vec<LspLocation>> {
let node = match deref_target {
DerefTarget::VarAccess(node) => node,
DerefTarget::Callee(node) => node,
@ -91,17 +73,74 @@ fn find_declarations(
// todo: if it is exported, find all the references in the workspace
let ident_ref = IdentRef {
name,
name: name.clone(),
range: ident.range(),
};
let def_fid = ident.span().id()?;
let (id, _) = def_use.get_def(ident.span().id()?, &ident_ref)?;
Some(
def_use
.get_refs(id)
.map(|r| r.range.clone())
.collect::<Vec<_>>(),
)
let (id, _) = def_use.get_def(def_fid, &ident_ref)?;
let def_source = ctx.source_by_id(def_fid).ok()?;
let def_path = ctx.world.path_for_id(def_fid).ok()?;
let uri = Url::from_file_path(def_path).ok()?;
// todo: reuse uri, range to location
let mut references = def_use
.get_refs(id)
.map(|r| {
let range = typst_to_lsp::range(r.range.clone(), &def_source, position_encoding);
LspLocation {
uri: uri.clone(),
range,
}
})
.collect::<Vec<_>>();
if def_use.is_exported(id) {
// Find dependents
let mut ctx = ctx.fork_for_search();
ctx.push_dependents(def_fid);
while let Some(ref_fid) = ctx.worklist.pop() {
let ref_source = ctx.ctx.source_by_id(ref_fid).ok()?;
let def_use = get_def_use(ctx.ctx, ref_source.clone())?;
let uri = ctx.ctx.world.path_for_id(ref_fid).ok()?;
let uri = Url::from_file_path(uri).ok()?;
if let Some((id, _def)) = def_use.get_def(def_fid, &ident_ref) {
references.extend(def_use.get_refs(id).map(|r| {
let range =
typst_to_lsp::range(r.range.clone(), &ref_source, position_encoding);
LspLocation {
uri: uri.clone(),
range,
}
}));
};
references.extend(
def_use
.get_external_refs(def_fid, Some(name.clone()))
.map(|r| {
let range =
typst_to_lsp::range(r.range.clone(), &ref_source, position_encoding);
LspLocation {
uri: uri.clone(),
range,
}
}),
);
if def_use.is_exported(id) {
ctx.push_dependents(ref_fid);
}
}
}
Some(references)
}
#[cfg(test)]
@ -112,8 +151,8 @@ mod tests {
#[test]
fn test() {
// goto_definition
snapshot_testing("references", &|world, path| {
let source = get_suitable_source_in_workspace(world, &path).unwrap();
snapshot_testing2("references", &|world, path| {
let source = world.source_by_path(&path).unwrap();
let request = ReferencesRequest {
path: path.clone(),