feat: add def use analysis for external items (#26)

* feat: add def use analysis for external items

* fix: recursive import wildcard
This commit is contained in:
Myriad-Dreamin 2024-03-13 15:05:52 +08:00 committed by GitHub
parent 1f5f38895d
commit f7860bd4b7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 378 additions and 165 deletions

View file

@ -71,6 +71,14 @@ where
self.undo_log.clear();
}
pub fn entries(&self) -> impl Iterator<Item = (&K, &V)> {
self.map.borrow().iter()
}
pub fn values(&self) -> impl Iterator<Item = &V> {
self.map.borrow().values()
}
pub fn insert(&mut self, key: K, value: V) -> bool {
match self.map.borrow_mut().insert(key.clone(), value) {
None => {

View file

@ -41,8 +41,9 @@ mod lexical_hierarchy_tests {
snapshot_testing(set, &|world, path| {
let source = get_suitable_source_in_workspace(world, &path).unwrap();
let result = get_def_use(source);
let result = result.as_ref().map(DefUseSnapshot);
let world: &dyn World = world;
let result = get_def_use(world.track(), source);
let result = result.as_deref().map(DefUseSnapshot);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});

View file

@ -1,13 +1,20 @@
use core::fmt;
use std::{collections::HashMap, ops::Range};
use std::{
collections::{HashMap, HashSet},
ops::{Deref, Range},
sync::Arc,
};
use comemo::Tracked;
use parking_lot::Mutex;
use serde::Serialize;
use typst::syntax::Source;
use typst::{syntax::Source, World};
use typst_ts_core::{path::unix_slash, TypstFileId};
use crate::adt::snapshot_map::SnapshotMap;
use crate::{adt::snapshot_map::SnapshotMap, analysis::find_source_by_import_path};
use super::{
get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind, LexicalVarKind,
get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind, LexicalVarKind, ModSrc,
};
pub use typst_ts_core::vector::ir::DefId;
@ -50,7 +57,7 @@ impl Serialize for IdentRef {
}
}
#[derive(Serialize)]
#[derive(Serialize, Clone)]
pub struct IdentDef {
name: String,
kind: LexicalKind,
@ -59,9 +66,190 @@ pub struct IdentDef {
#[derive(Default)]
pub struct DefUseInfo {
ident_defs: indexmap::IndexMap<IdentRef, IdentDef>,
ident_defs: indexmap::IndexMap<(TypstFileId, IdentRef), IdentDef>,
ident_refs: HashMap<IdentRef, DefId>,
undefined_refs: Vec<IdentRef>,
exports_refs: Vec<DefId>,
}
pub fn get_def_use(world: Tracked<'_, dyn World>, source: Source) -> Option<Arc<DefUseInfo>> {
let ctx = SearchCtx {
world,
searched: Default::default(),
};
get_def_use_inner(&ctx, source)
}
struct SearchCtx<'a> {
world: Tracked<'a, dyn World>,
searched: Mutex<HashSet<TypstFileId>>,
}
fn get_def_use_inner<'w>(ctx: &'w SearchCtx<'w>, source: Source) -> Option<Arc<DefUseInfo>> {
let current_id = source.id();
if !ctx.searched.lock().insert(current_id) {
return None;
}
let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;
let mut collector = DefUseCollector {
ctx,
info: DefUseInfo::default(),
id_scope: SnapshotMap::default(),
label_scope: SnapshotMap::default(),
current_id,
current_path: None,
};
collector.scan(&e);
collector.calc_exports();
Some(Arc::new(collector.info))
}
struct DefUseCollector<'a, 'w> {
ctx: &'w SearchCtx<'w>,
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
current_id: TypstFileId,
current_path: Option<&'a str>,
}
impl<'a, 'w> DefUseCollector<'a, 'w> {
fn enter<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let id_snap = self.id_scope.snapshot();
let res = f(self);
self.id_scope.rollback_to(id_snap);
res
}
fn calc_exports(&mut self) {
self.info.exports_refs = self.id_scope.values().copied().collect();
}
fn scan(&mut self, e: &'a [LexicalHierarchy]) -> Option<()> {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(LexicalVarKind::Label) => self.insert(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::Function)
| LexicalKind::Var(LexicalVarKind::Variable)
| LexicalKind::Mod(super::LexicalModKind::PathVar)
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias)
| LexicalKind::Mod(super::LexicalModKind::Ident)
| LexicalKind::Mod(super::LexicalModKind::Alias { .. }) => {
self.insert(Ns::Value, e)
}
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
LexicalKind::Block => {
if let Some(e) = &e.children {
self.enter(|this| this.scan(e.as_slice()))?;
}
}
LexicalKind::Mod(super::LexicalModKind::Module(p)) => {
match p {
ModSrc::Expr(_) => {}
ModSrc::Path(p) => {
self.current_path = Some(p.deref());
}
}
// todo: process import star
if let Some(e) = &e.children {
self.scan(e.as_slice())?;
}
self.current_path = None;
}
LexicalKind::Mod(super::LexicalModKind::Star) => {
if let Some(path) = self.current_path {
let external_info =
find_source_by_import_path(self.ctx.world, self.current_id, path)
.and_then(|source| {
Some(source.id()).zip(get_def_use_inner(self.ctx, source))
});
if let Some((_, external_info)) = external_info {
for v in &external_info.exports_refs {
// Use FileId in ident_defs map should lose stacked import
// information, but it is currently
// not a problem.
let ((ext_id, _), ext_sym) =
external_info.ident_defs.get_index(v.0 as usize).unwrap();
let name = ext_sym.name.clone();
let ext_ref = IdentRef {
name: name.clone(),
range: ext_sym.range.clone(),
};
let (id, ..) = self
.info
.ident_defs
.insert_full((*ext_id, ext_ref), ext_sym.clone());
let id = DefId(id as u64);
self.id_scope.insert(name, id);
}
}
}
}
LexicalKind::Mod(super::LexicalModKind::ExternResolved { .. }) => {}
}
}
Some(())
}
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
let (id, ..) = self.info.ident_defs.insert_full(
(self.current_id, id_ref.clone()),
IdentDef {
name: e.info.name.clone(),
kind: e.info.kind.clone(),
range: e.info.range.clone(),
},
);
let id = DefId(id as u64);
snap.insert(e.info.name.clone(), id);
}
fn insert_ref(&mut self, label: Ns, e: &LexicalHierarchy) {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
match snap.get(&e.info.name) {
Some(id) => {
self.info.ident_refs.insert(id_ref, *id);
}
None => {
self.info.undefined_refs.push(id_ref);
}
}
}
}
pub struct DefUseSnapshot<'a>(pub &'a DefUseInfo);
@ -98,7 +286,14 @@ impl<'a> Serialize for DefUseSnapshot<'a> {
refs: references.get(&id).unwrap_or(&empty_ref),
};
state.serialize_entry(&ident_ref.to_string(), &entry)?;
state.serialize_entry(
&format!(
"{}@{}",
ident_ref.1,
unix_slash(ident_ref.0.vpath().as_rootless_path())
),
&entry,
)?;
}
if !self.0.undefined_refs.is_empty() {
@ -118,111 +313,3 @@ impl<'a> Serialize for DefUseSnapshot<'a> {
state.end()
}
}
pub fn get_def_use(source: Source) -> Option<DefUseInfo> {
let e = get_lexical_hierarchy(source, LexicalScopeKind::DefUse)?;
let mut collector = DefUseCollector {
info: DefUseInfo::default(),
id_scope: SnapshotMap::default(),
label_scope: SnapshotMap::default(),
};
collector.scan(&e);
Some(collector.info)
}
struct DefUseCollector {
info: DefUseInfo,
label_scope: SnapshotMap<String, DefId>,
id_scope: SnapshotMap<String, DefId>,
}
impl DefUseCollector {
fn enter<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
let id_snap = self.id_scope.snapshot();
let res = f(self);
self.id_scope.rollback_to(id_snap);
res
}
fn scan(&mut self, e: &[LexicalHierarchy]) -> Option<()> {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(LexicalVarKind::Label) => self.insert(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
LexicalKind::Var(LexicalVarKind::Function)
| LexicalKind::Var(LexicalVarKind::Variable)
| LexicalKind::Mod(super::LexicalModKind::PathVar)
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias)
| LexicalKind::Mod(super::LexicalModKind::Ident)
| LexicalKind::Mod(super::LexicalModKind::Alias { .. }) => {
self.insert(Ns::Value, e)
}
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
LexicalKind::Block => {
if let Some(e) = &e.children {
self.enter(|this| this.scan(e.as_slice()))?;
}
}
LexicalKind::Mod(super::LexicalModKind::Module(..)) => {
// todo: process import star
if let Some(e) = &e.children {
self.scan(e.as_slice())?;
}
}
LexicalKind::Mod(super::LexicalModKind::Star) => {}
}
}
Some(())
}
fn insert(&mut self, label: Ns, e: &LexicalHierarchy) {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
let (id, old_def) = self.info.ident_defs.insert_full(
id_ref.clone(),
IdentDef {
name: e.info.name.clone(),
kind: e.info.kind.clone(),
range: e.info.range.clone(),
},
);
if let Some(old_def) = old_def {
assert_eq!(old_def.kind, e.info.kind);
}
let id = DefId(id as u64);
snap.insert(e.info.name.clone(), id);
}
fn insert_ref(&mut self, label: Ns, e: &LexicalHierarchy) {
let snap = match label {
Ns::Label => &mut self.label_scope,
Ns::Value => &mut self.id_scope,
};
let id_ref = IdentRef {
name: e.info.name.clone(),
range: e.info.range.clone(),
};
match snap.get(&e.info.name) {
Some(id) => {
self.info.ident_refs.insert(id_ref, *id);
}
None => {
self.info.undefined_refs.push(id_ref);
}
}
}
}

View file

@ -6,6 +6,27 @@ use typst_ts_core::{typst::prelude::EcoVec, TypstFileId};
use crate::prelude::*;
pub fn find_source_by_import_path(
world: Tracked<'_, dyn World>,
current: TypstFileId,
import_path: &str,
) -> Option<Source> {
if import_path.starts_with('@') {
// todo: import from package
return None;
}
let path = Path::new(import_path);
let vpath = if path.is_relative() {
current.vpath().join(path)
} else {
VirtualPath::new(path)
};
let id = TypstFileId::new(current.package().cloned(), vpath);
world.source(id).ok()
}
pub fn find_source_by_import(
world: Tracked<'_, dyn World>,
current: TypstFileId,
@ -14,24 +35,7 @@ pub fn find_source_by_import(
// todo: this could be vaild: import("path.typ"), where v is parenthesized
let v = import_node.source();
match v {
ast::Expr::Str(s) => {
let s = s.get();
if s.starts_with('@') {
// todo: import from package
return None;
}
let path = Path::new(s.as_str());
let vpath = if path.is_relative() {
current.vpath().join(path)
} else {
VirtualPath::new(path)
};
let id = TypstFileId::new(current.package().cloned(), vpath);
world.source(id).ok()
}
ast::Expr::Str(s) => find_source_by_import_path(world, current, s.get().as_str()),
_ => None,
}
}

View file

@ -14,7 +14,10 @@ use typst::{
},
util::LazyHash,
};
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
use typst_ts_core::{
typst::prelude::{eco_vec, EcoVec},
TypstFileId,
};
pub(crate) fn get_lexical_hierarchy(
source: Source,
@ -59,7 +62,7 @@ pub enum ModSrc {
Expr(Box<ImportAlias>),
/// `import "" ...`
/// ^^^^^^^^^^^^^
Path(Box<Path>),
Path(Box<str>),
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
@ -81,6 +84,13 @@ pub enum LexicalModKind {
/// `import "foo": *`
/// ^
Star,
/// the symbol inside of `import "foo": *`
/// ^
ExternResolved {
#[serde(skip_serializing, skip_deserializing)]
at: Option<TypstFileId>,
in_mod_kind: Box<LexicalModKind>,
},
}
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
@ -143,11 +153,15 @@ impl LexicalKind {
LexicalKind::Mod(LexicalModKind::Ident)
}
const fn module_star() -> LexicalKind {
LexicalKind::Mod(LexicalModKind::Star)
}
fn module_expr(path: Box<ImportAlias>) -> LexicalKind {
LexicalKind::Mod(LexicalModKind::Module(ModSrc::Expr(path)))
}
fn module(path: Box<Path>) -> LexicalKind {
fn module(path: Box<str>) -> LexicalKind {
LexicalKind::Mod(LexicalModKind::Module(ModSrc::Path(path)))
}
@ -558,8 +572,7 @@ impl LexicalHierarchyWorker {
match src {
ast::Expr::Str(e) => {
let e = e.get();
let e = Path::new(e.as_ref());
(String::new(), LexicalKind::module(e.into()))
(String::new(), LexicalKind::module(e.as_ref().into()))
}
src => {
let e = node
@ -669,7 +682,7 @@ impl LexicalHierarchyWorker {
let v = node.find(wildcard.span()).context("no pos")?;
self.push_leaf(LexicalInfo {
name: "*".to_string(),
kind: LexicalKind::module_path(),
kind: LexicalKind::module_star(),
range: v.range(),
});
}

View file

@ -0,0 +1,2 @@
#import "base.typ": *
#let y = 2;

View file

@ -0,0 +1,8 @@
// path: base.typ
#let x = 1;
#x
-----
#import "base.typ"
#base
#import "base.typ": *
#base, #x

View file

@ -0,0 +1,10 @@
// path: base.typ
#let x = 1;
#x
-----
// path: base2.typ
#import "base.typ": *
#let y = 2;
-----
#import "base2.typ": *
#x, #y

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/base.typ
---
{
"x@5..6": {
"x@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -0,0 +1,17 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/base2.typ
---
{
"y@28..29@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "y",
"range": "28:29"
},
"refs": []
}
}

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias.typ
---
{
"base@8..18": {
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
@ -17,7 +17,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_alias.typ
"base@55..59"
]
},
"foo@49..52": {
"foo@49..52@s0.typ": {
"def": {
"kind": {
"Mod": "ModuleAlias"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
---
{
"base@8..18": {
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
@ -17,7 +17,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
"base@65..69"
]
},
"foo@49..52": {
"foo@49..52@s0.typ": {
"def": {
"kind": {
"Mod": "ModuleAlias"
@ -27,7 +27,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
},
"refs": []
},
"foo@54..62": {
"foo@54..62@s0.typ": {
"def": {
"kind": {
"Mod": {

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident.typ
---
{
"base@8..18": {
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
@ -17,7 +17,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_ident.typ
"base@51..55"
]
},
"x@47..48": {
"x@47..48@s0.typ": {
"def": {
"kind": {
"Mod": "Ident"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
---
{
"base@8..18": {
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"
@ -17,7 +17,7 @@ input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
"base@58..62"
]
},
"foo@47..55": {
"foo@47..55@s0.typ": {
"def": {
"kind": {
"Mod": {

View file

@ -0,0 +1,32 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_star.typ
---
{
"base@8..18@s1.typ": {
"def": {
"kind": {
"Mod": "PathVar"
},
"name": "base",
"range": "8:18"
},
"refs": [
"base@21..25",
"base@51..55"
]
},
"x@5..6@base.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "x",
"range": "5:6"
},
"refs": [
"x@58..59"
]
}
}

View file

@ -0,0 +1,31 @@
---
source: crates/tinymist-query/src/analysis.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/def_use/import_star_recursive.typ
---
{
"x@5..6@base.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "x",
"range": "5:6"
},
"refs": [
"x@25..26"
]
},
"y@28..29@base2.typ": {
"def": {
"kind": {
"Var": "Variable"
},
"name": "y",
"range": "28:29"
},
"refs": [
"y@29..30"
]
}
}

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/base.typ
---
{
"x@5..6": {
"x@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
---
{
"a@29..30": {
"a@29..30@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -14,7 +14,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
},
"refs": []
},
"a@6..7": {
"a@6..7@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -26,7 +26,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
"a@41..42"
]
},
"b@32..33": {
"b@32..33@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -36,7 +36,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
},
"refs": []
},
"b@9..10": {
"b@9..10@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/dict.typ
---
{
"x@18..19": {
"x@18..19@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -14,7 +14,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/dict.typ
},
"refs": []
},
"z@5..6": {
"z@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
---
{
"a@20..21": {
"a@20..21@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -16,7 +16,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
"a@25..26"
]
},
"f@18..19": {
"f@18..19@s0.typ": {
"def": {
"kind": {
"Var": "Function"
@ -26,7 +26,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
},
"refs": []
},
"x@5..6": {
"x@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/import.typ
---
{
"base@8..18": {
"base@8..18@s0.typ": {
"def": {
"kind": {
"Mod": "PathVar"

View file

@ -4,7 +4,7 @@ expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/redefine.typ
---
{
"x@18..19": {
"x@18..19@s0.typ": {
"def": {
"kind": {
"Var": "Variable"
@ -14,7 +14,7 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/redefine.typ
},
"refs": []
},
"x@5..6": {
"x@5..6@s0.typ": {
"def": {
"kind": {
"Var": "Variable"

View file

@ -220,7 +220,7 @@ mod tests {
path = content.strip_prefix("path:").map(|e| e.trim().to_owned())
};
let path = path.unwrap_or_else(|| format!("/source{i}.typ"));
let path = path.unwrap_or_else(|| format!("/s{i}.typ"));
let pw = root.join(Path::new(&path));
world