mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-19 18:55:01 +00:00
feat: add def use analysis for module items (#25)
* feat: analyze lexical hierarchy for import items * feat: add def use analysis for module items
This commit is contained in:
parent
32d74194c0
commit
1f5f38895d
28 changed files with 666 additions and 69 deletions
|
@ -13,6 +13,7 @@ pub use def_use::*;
|
|||
|
||||
#[cfg(test)]
|
||||
mod lexical_hierarchy_tests {
|
||||
use def_use::get_def_use;
|
||||
use def_use::DefUseSnapshot;
|
||||
|
||||
use crate::analysis::def_use;
|
||||
|
@ -35,14 +36,19 @@ mod lexical_hierarchy_tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn def_use() {
|
||||
snapshot_testing("lexical_hierarchy", &|world, path| {
|
||||
let source = get_suitable_source_in_workspace(world, &path).unwrap();
|
||||
fn test_def_use() {
|
||||
fn def_use(set: &str) {
|
||||
snapshot_testing(set, &|world, path| {
|
||||
let source = get_suitable_source_in_workspace(world, &path).unwrap();
|
||||
|
||||
let result = def_use::get_def_use(source);
|
||||
let result = result.as_ref().map(DefUseSnapshot);
|
||||
let result = get_def_use(source);
|
||||
let result = result.as_ref().map(DefUseSnapshot);
|
||||
|
||||
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
|
||||
});
|
||||
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
|
||||
});
|
||||
}
|
||||
|
||||
def_use("lexical_hierarchy");
|
||||
def_use("def_use");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,9 @@ use typst::syntax::Source;
|
|||
|
||||
use crate::adt::snapshot_map::SnapshotMap;
|
||||
|
||||
use super::{get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind};
|
||||
use super::{
|
||||
get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind, LexicalVarKind,
|
||||
};
|
||||
|
||||
pub use typst_ts_core::vector::ir::DefId;
|
||||
|
||||
|
@ -21,6 +23,20 @@ pub struct IdentRef {
|
|||
range: Range<usize>,
|
||||
}
|
||||
|
||||
impl PartialOrd for IdentRef {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for IdentRef {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.name
|
||||
.cmp(&other.name)
|
||||
.then_with(|| self.range.start.cmp(&other.range.start))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for IdentRef {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}@{:?}", self.name, self.range)
|
||||
|
@ -54,13 +70,17 @@ impl<'a> Serialize for DefUseSnapshot<'a> {
|
|||
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
use serde::ser::SerializeMap;
|
||||
// HashMap<IdentRef, DefId>
|
||||
let references: HashMap<DefId, Vec<IdentRef>> = {
|
||||
let mut references: HashMap<DefId, Vec<IdentRef>> = {
|
||||
let mut map = HashMap::new();
|
||||
for (k, v) in &self.0.ident_refs {
|
||||
map.entry(*v).or_insert_with(Vec::new).push(k.clone());
|
||||
}
|
||||
map
|
||||
};
|
||||
// sort
|
||||
for (_, v) in references.iter_mut() {
|
||||
v.sort();
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct DefUseEntry<'a> {
|
||||
|
@ -82,13 +102,15 @@ impl<'a> Serialize for DefUseSnapshot<'a> {
|
|||
}
|
||||
|
||||
if !self.0.undefined_refs.is_empty() {
|
||||
let mut undefined_refs = self.0.undefined_refs.clone();
|
||||
undefined_refs.sort();
|
||||
let entry = DefUseEntry {
|
||||
def: &IdentDef {
|
||||
name: "<nil>".to_string(),
|
||||
kind: LexicalKind::Block,
|
||||
range: 0..0,
|
||||
},
|
||||
refs: &self.0.undefined_refs,
|
||||
refs: &undefined_refs,
|
||||
};
|
||||
state.serialize_entry("<nil>", &entry)?;
|
||||
}
|
||||
|
@ -126,17 +148,31 @@ impl DefUseCollector {
|
|||
|
||||
fn scan(&mut self, e: &[LexicalHierarchy]) -> Option<()> {
|
||||
for e in e {
|
||||
match e.info.kind {
|
||||
match &e.info.kind {
|
||||
LexicalKind::Heading(..) => unreachable!(),
|
||||
LexicalKind::Label => self.insert(Ns::Label, e),
|
||||
LexicalKind::LabelRef => self.insert_ref(Ns::Label, e),
|
||||
LexicalKind::Function | LexicalKind::Variable => self.insert(Ns::Value, e),
|
||||
LexicalKind::ValRef => self.insert_ref(Ns::Value, e),
|
||||
LexicalKind::Var(LexicalVarKind::Label) => self.insert(Ns::Label, e),
|
||||
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
|
||||
LexicalKind::Var(LexicalVarKind::Function)
|
||||
| LexicalKind::Var(LexicalVarKind::Variable)
|
||||
| LexicalKind::Mod(super::LexicalModKind::PathVar)
|
||||
| LexicalKind::Mod(super::LexicalModKind::ModuleAlias)
|
||||
| LexicalKind::Mod(super::LexicalModKind::Ident)
|
||||
| LexicalKind::Mod(super::LexicalModKind::Alias { .. }) => {
|
||||
self.insert(Ns::Value, e)
|
||||
}
|
||||
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
|
||||
LexicalKind::Block => {
|
||||
if let Some(e) = &e.children {
|
||||
self.enter(|this| this.scan(e.as_slice()))?;
|
||||
}
|
||||
}
|
||||
LexicalKind::Mod(super::LexicalModKind::Module(..)) => {
|
||||
// todo: process import star
|
||||
if let Some(e) = &e.children {
|
||||
self.scan(e.as_slice())?;
|
||||
}
|
||||
}
|
||||
LexicalKind::Mod(super::LexicalModKind::Star) => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,7 +193,7 @@ impl DefUseCollector {
|
|||
id_ref.clone(),
|
||||
IdentDef {
|
||||
name: e.info.name.clone(),
|
||||
kind: e.info.kind,
|
||||
kind: e.info.kind.clone(),
|
||||
range: e.info.range.clone(),
|
||||
},
|
||||
);
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
use std::ops::{Deref, Range};
|
||||
use std::{
|
||||
ops::{Deref, Range},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::{anyhow, Context};
|
||||
use log::info;
|
||||
use lsp_types::SymbolKind;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst::{
|
||||
syntax::{ast, LinkedNode, Source, SyntaxKind},
|
||||
syntax::{
|
||||
ast::{self, AstNode},
|
||||
LinkedNode, Source, SyntaxKind,
|
||||
},
|
||||
util::LazyHash,
|
||||
};
|
||||
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
|
||||
|
@ -40,27 +46,128 @@ pub(crate) fn get_lexical_hierarchy(
|
|||
res.map(|_| worker.stack.pop().unwrap().1)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ImportAlias {
|
||||
pub name: String,
|
||||
pub range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum ModSrc {
|
||||
/// `import cetz.draw ...`
|
||||
/// ^^^^^^^^^^^^^^^^^^^^
|
||||
Expr(Box<ImportAlias>),
|
||||
/// `import "" ...`
|
||||
/// ^^^^^^^^^^^^^
|
||||
Path(Box<Path>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum LexicalModKind {
|
||||
/// See [`ModSrc`]
|
||||
Module(ModSrc),
|
||||
/// `import "foo" as bar;`
|
||||
/// ^^^
|
||||
ModuleAlias,
|
||||
/// `import "foo.typ"`
|
||||
/// ^^^
|
||||
PathVar,
|
||||
/// `import "foo": bar`
|
||||
/// ^^^
|
||||
Ident,
|
||||
/// `import "foo": bar as baz`
|
||||
/// ^^^^^^^^^^
|
||||
Alias { target: Box<ImportAlias> },
|
||||
/// `import "foo": *`
|
||||
/// ^
|
||||
Star,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum LexicalVarKind {
|
||||
/// `#foo`
|
||||
/// ^^^
|
||||
ValRef,
|
||||
/// `@foo`
|
||||
/// ^^^
|
||||
LabelRef,
|
||||
/// `<foo>`
|
||||
/// ^^^
|
||||
Label,
|
||||
/// `let foo`
|
||||
/// ^^^
|
||||
Variable,
|
||||
/// `let foo()`
|
||||
/// ^^^
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) enum LexicalKind {
|
||||
Heading(i16),
|
||||
ValRef,
|
||||
LabelRef,
|
||||
Variable,
|
||||
Function,
|
||||
Label,
|
||||
Var(LexicalVarKind),
|
||||
Mod(LexicalModKind),
|
||||
Block,
|
||||
}
|
||||
|
||||
impl LexicalKind {
|
||||
const fn label() -> LexicalKind {
|
||||
LexicalKind::Var(LexicalVarKind::Label)
|
||||
}
|
||||
|
||||
const fn label_ref() -> LexicalKind {
|
||||
LexicalKind::Var(LexicalVarKind::LabelRef)
|
||||
}
|
||||
|
||||
const fn val_ref() -> LexicalKind {
|
||||
LexicalKind::Var(LexicalVarKind::ValRef)
|
||||
}
|
||||
|
||||
const fn function() -> LexicalKind {
|
||||
LexicalKind::Var(LexicalVarKind::Function)
|
||||
}
|
||||
|
||||
const fn variable() -> LexicalKind {
|
||||
LexicalKind::Var(LexicalVarKind::Variable)
|
||||
}
|
||||
|
||||
const fn module_as() -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::ModuleAlias)
|
||||
}
|
||||
|
||||
const fn module_path() -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::PathVar)
|
||||
}
|
||||
|
||||
const fn module_import() -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::Ident)
|
||||
}
|
||||
|
||||
fn module_expr(path: Box<ImportAlias>) -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::Module(ModSrc::Expr(path)))
|
||||
}
|
||||
|
||||
fn module(path: Box<Path>) -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::Module(ModSrc::Path(path)))
|
||||
}
|
||||
|
||||
fn module_import_alias(alias: ImportAlias) -> LexicalKind {
|
||||
LexicalKind::Mod(LexicalModKind::Alias {
|
||||
target: Box::new(alias),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<LexicalKind> for SymbolKind {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: LexicalKind) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
LexicalKind::Heading(..) => Ok(SymbolKind::NAMESPACE),
|
||||
LexicalKind::Variable => Ok(SymbolKind::VARIABLE),
|
||||
LexicalKind::Function => Ok(SymbolKind::FUNCTION),
|
||||
LexicalKind::Label => Ok(SymbolKind::CONSTANT),
|
||||
LexicalKind::ValRef | LexicalKind::LabelRef | LexicalKind::Block => Err(()),
|
||||
LexicalKind::Var(LexicalVarKind::Variable) => Ok(SymbolKind::VARIABLE),
|
||||
LexicalKind::Var(LexicalVarKind::Function) => Ok(SymbolKind::FUNCTION),
|
||||
LexicalKind::Var(LexicalVarKind::Label) => Ok(SymbolKind::CONSTANT),
|
||||
LexicalKind::Var(..) | LexicalKind::Mod(..) | LexicalKind::Block => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -82,6 +189,10 @@ impl LexicalScopeKind {
|
|||
matches!(self, Self::DefUse)
|
||||
}
|
||||
|
||||
fn affect_import(&self) -> bool {
|
||||
matches!(self, Self::DefUse)
|
||||
}
|
||||
|
||||
fn affect_markup(&self) -> bool {
|
||||
matches!(self, Self::Braced)
|
||||
}
|
||||
|
@ -175,6 +286,7 @@ enum IdentContext {
|
|||
Ref,
|
||||
Func,
|
||||
Var,
|
||||
ModImport,
|
||||
Params,
|
||||
}
|
||||
|
||||
|
@ -186,6 +298,14 @@ struct LexicalHierarchyWorker {
|
|||
}
|
||||
|
||||
impl LexicalHierarchyWorker {
|
||||
fn push_leaf(&mut self, symbol: LexicalInfo) {
|
||||
let current = &mut self.stack.last_mut().unwrap().1;
|
||||
current.push(LexicalHierarchy {
|
||||
info: symbol,
|
||||
children: None,
|
||||
});
|
||||
}
|
||||
|
||||
fn symbreak(&mut self) {
|
||||
let (symbol, children) = self.stack.pop().unwrap();
|
||||
let current = &mut self.stack.last_mut().unwrap().1;
|
||||
|
@ -235,8 +355,12 @@ impl LexicalHierarchyWorker {
|
|||
self.stack.push((symbol, eco_vec![]));
|
||||
let stack_height = self.stack.len();
|
||||
|
||||
for child in node.children() {
|
||||
self.get_symbols(child)?;
|
||||
if node.kind() == SyntaxKind::ModuleImport {
|
||||
self.get_symbols_in_import(node)?;
|
||||
} else {
|
||||
for child in node.children() {
|
||||
self.get_symbols(child)?;
|
||||
}
|
||||
}
|
||||
|
||||
if is_heading {
|
||||
|
@ -370,11 +494,11 @@ impl LexicalHierarchyWorker {
|
|||
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
|
||||
let name = ast_node.get().to_string();
|
||||
|
||||
(name, LexicalKind::Label)
|
||||
(name, LexicalKind::label())
|
||||
}
|
||||
SyntaxKind::RefMarker if self.g.affect_ref() => {
|
||||
let name = node.text().trim_start_matches('@').to_owned();
|
||||
(name, LexicalKind::LabelRef)
|
||||
(name, LexicalKind::label_ref())
|
||||
}
|
||||
SyntaxKind::Ident if self.g.affect_symbol() => {
|
||||
let ast_node = node
|
||||
|
@ -382,14 +506,32 @@ impl LexicalHierarchyWorker {
|
|||
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
|
||||
let name = ast_node.get().to_string();
|
||||
let kind = match self.ident_context {
|
||||
IdentContext::Ref if self.g.affect_ref() => LexicalKind::ValRef,
|
||||
IdentContext::Func => LexicalKind::Function,
|
||||
IdentContext::Var | IdentContext::Params => LexicalKind::Variable,
|
||||
IdentContext::Ref if self.g.affect_ref() => LexicalKind::val_ref(),
|
||||
IdentContext::Func => LexicalKind::function(),
|
||||
IdentContext::Var | IdentContext::Params => LexicalKind::variable(),
|
||||
IdentContext::ModImport => LexicalKind::module_import(),
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
||||
(name, kind)
|
||||
}
|
||||
SyntaxKind::RenamedImportItem if self.g.affect_import() => {
|
||||
let src = node
|
||||
.cast::<ast::RenamedImportItem>()
|
||||
.ok_or_else(|| anyhow!("cast to renamed import item failed: {:?}", node))?;
|
||||
|
||||
let name = src.new_name().get().to_string();
|
||||
|
||||
let target_name = src.original_name();
|
||||
let target_name_node = node.find(target_name.span()).context("no pos")?;
|
||||
(
|
||||
name,
|
||||
LexicalKind::module_import_alias(ImportAlias {
|
||||
name: target_name.get().to_string(),
|
||||
range: target_name_node.range(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
SyntaxKind::Equation | SyntaxKind::Raw | SyntaxKind::BlockComment
|
||||
if self.g.affect_markup() =>
|
||||
{
|
||||
|
@ -407,6 +549,31 @@ impl LexicalHierarchyWorker {
|
|||
{
|
||||
(String::new(), LexicalKind::Block)
|
||||
}
|
||||
SyntaxKind::ModuleImport if self.g.affect_import() => {
|
||||
let src = node
|
||||
.cast::<ast::ModuleImport>()
|
||||
.ok_or_else(|| anyhow!("cast to module import failed: {:?}", node))?
|
||||
.source();
|
||||
|
||||
match src {
|
||||
ast::Expr::Str(e) => {
|
||||
let e = e.get();
|
||||
let e = Path::new(e.as_ref());
|
||||
(String::new(), LexicalKind::module(e.into()))
|
||||
}
|
||||
src => {
|
||||
let e = node
|
||||
.find(src.span())
|
||||
.ok_or_else(|| anyhow!("find expression failed: {:?}", src))?;
|
||||
let e = ImportAlias {
|
||||
name: String::new(),
|
||||
range: e.range(),
|
||||
};
|
||||
|
||||
(String::new(), LexicalKind::module_expr(e.into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
SyntaxKind::Markup => {
|
||||
let name = node.get().to_owned().into_text().to_string();
|
||||
if name.is_empty() {
|
||||
|
@ -433,6 +600,87 @@ impl LexicalHierarchyWorker {
|
|||
range: node.range(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn get_symbols_in_import(&mut self, node: LinkedNode) -> anyhow::Result<()> {
|
||||
// todo: other kind
|
||||
if self.g != LexicalScopeKind::DefUse {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let import_node = node.cast::<ast::ModuleImport>().context("not a import")?;
|
||||
let v = import_node.source();
|
||||
let v_linked = node.find(v.span()).context("no source pos")?;
|
||||
match v {
|
||||
ast::Expr::Str(..) => {}
|
||||
_ => {
|
||||
self.get_symbols_with(v_linked.clone(), IdentContext::Ref)?;
|
||||
}
|
||||
}
|
||||
|
||||
let imports = import_node.imports();
|
||||
if let Some(name) = import_node.new_name() {
|
||||
// push `import "foo" as bar;`
|
||||
// ^^^
|
||||
let import_node = node.find(name.span()).context("no pos")?;
|
||||
self.push_leaf(LexicalInfo {
|
||||
name: name.get().to_string(),
|
||||
kind: LexicalKind::module_as(),
|
||||
range: import_node.range(),
|
||||
});
|
||||
|
||||
// note: we can have both:
|
||||
// `import "foo" as bar;`
|
||||
// `import "foo": bar as baz;`
|
||||
} else if imports.is_none() {
|
||||
let v = import_node.source();
|
||||
match v {
|
||||
ast::Expr::Str(e) => {
|
||||
let e = e.get();
|
||||
let e = Path::new(e.as_ref())
|
||||
.file_name()
|
||||
.context("no file name")?
|
||||
.to_string_lossy();
|
||||
let e = e.as_ref();
|
||||
let e = e.strip_suffix(".typ").context("no suffix")?;
|
||||
// return (e == name).then_some(ImportRef::Path(v));
|
||||
self.push_leaf(LexicalInfo {
|
||||
name: e.to_string(),
|
||||
kind: LexicalKind::module_path(),
|
||||
range: v_linked.range(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
// todo: import expr?
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let Some(imports) = imports else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
match imports {
|
||||
ast::Imports::Wildcard => {
|
||||
let wildcard = node
|
||||
.children()
|
||||
.find(|node| node.kind() == SyntaxKind::Star)
|
||||
.context("no star")?;
|
||||
let v = node.find(wildcard.span()).context("no pos")?;
|
||||
self.push_leaf(LexicalInfo {
|
||||
name: "*".to_string(),
|
||||
kind: LexicalKind::module_path(),
|
||||
range: v.range(),
|
||||
});
|
||||
}
|
||||
ast::Imports::Items(items) => {
|
||||
let n = node.find(items.span()).context("no pos")?;
|
||||
self.get_symbols_with(n, IdentContext::ModImport)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
|
||||
|
|
|
@ -35,7 +35,7 @@ fn filter_document_symbols(
|
|||
DocumentSymbol {
|
||||
name: e.info.name.clone(),
|
||||
detail: None,
|
||||
kind: e.info.kind.try_into().unwrap(),
|
||||
kind: e.info.kind.clone().try_into().unwrap(),
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
range: rng,
|
||||
|
|
3
crates/tinymist-query/src/fixtures/def_use/base.typ
Normal file
3
crates/tinymist-query/src/fixtures/def_use/base.typ
Normal file
|
@ -0,0 +1,3 @@
|
|||
// most simple def-use case
|
||||
#let x = 1;
|
||||
#x
|
|
@ -0,0 +1,4 @@
|
|||
#import "base.typ"
|
||||
#base
|
||||
#import "base.typ" as foo
|
||||
#base, #foo
|
|
@ -0,0 +1,4 @@
|
|||
#import "base.typ"
|
||||
#base
|
||||
#import "base.typ" as foo: x as foo
|
||||
#base, #foo
|
|
@ -0,0 +1,4 @@
|
|||
#import "base.typ"
|
||||
#base
|
||||
#import "base.typ": x
|
||||
#base, #x
|
|
@ -0,0 +1,4 @@
|
|||
#import "base.typ"
|
||||
#base
|
||||
#import "base.typ": x as foo
|
||||
#base, #foo
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/def_use/base.typ
|
||||
---
|
||||
{
|
||||
"x@5..6": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
"refs": [
|
||||
"x@14..15"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias.typ
|
||||
---
|
||||
{
|
||||
"base@8..18": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
},
|
||||
"refs": [
|
||||
"base@21..25",
|
||||
"base@55..59"
|
||||
]
|
||||
},
|
||||
"foo@49..52": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "ModuleAlias"
|
||||
},
|
||||
"name": "foo",
|
||||
"range": "49:52"
|
||||
},
|
||||
"refs": [
|
||||
"foo@62..65"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/def_use/import_alias_both.typ
|
||||
---
|
||||
{
|
||||
"base@8..18": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
},
|
||||
"refs": [
|
||||
"base@21..25",
|
||||
"base@65..69"
|
||||
]
|
||||
},
|
||||
"foo@49..52": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "ModuleAlias"
|
||||
},
|
||||
"name": "foo",
|
||||
"range": "49:52"
|
||||
},
|
||||
"refs": []
|
||||
},
|
||||
"foo@54..62": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": {
|
||||
"Alias": {
|
||||
"target": {
|
||||
"name": "x",
|
||||
"range": "54:55"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "foo",
|
||||
"range": "54:62"
|
||||
},
|
||||
"refs": [
|
||||
"foo@72..75"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident.typ
|
||||
---
|
||||
{
|
||||
"base@8..18": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
},
|
||||
"refs": [
|
||||
"base@21..25",
|
||||
"base@51..55"
|
||||
]
|
||||
},
|
||||
"x@47..48": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "Ident"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "47:48"
|
||||
},
|
||||
"refs": [
|
||||
"x@58..59"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/def_use/import_ident_alias.typ
|
||||
---
|
||||
{
|
||||
"base@8..18": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
},
|
||||
"refs": [
|
||||
"base@21..25",
|
||||
"base@58..62"
|
||||
]
|
||||
},
|
||||
"foo@47..55": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": {
|
||||
"Alias": {
|
||||
"target": {
|
||||
"name": "x",
|
||||
"range": "47:48"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "foo",
|
||||
"range": "47:55"
|
||||
},
|
||||
"refs": [
|
||||
"foo@65..68"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
#import "base.typ"
|
||||
#base
|
|
@ -6,7 +6,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/base.typ
|
|||
{
|
||||
"x@5..6": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
|
|
|
@ -6,7 +6,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
|
|||
{
|
||||
"a@29..30": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "29:30"
|
||||
},
|
||||
|
@ -14,7 +16,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
|
|||
},
|
||||
"a@6..7": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "6:7"
|
||||
},
|
||||
|
@ -24,7 +28,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
|
|||
},
|
||||
"b@32..33": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "b",
|
||||
"range": "32:33"
|
||||
},
|
||||
|
@ -32,7 +38,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
|
|||
},
|
||||
"b@9..10": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "b",
|
||||
"range": "9:10"
|
||||
},
|
||||
|
|
|
@ -6,7 +6,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/dict.typ
|
|||
{
|
||||
"x@18..19": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "18:19"
|
||||
},
|
||||
|
@ -14,7 +16,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/dict.typ
|
|||
},
|
||||
"z@5..6": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "z",
|
||||
"range": "5:6"
|
||||
},
|
||||
|
|
|
@ -6,7 +6,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
|
|||
{
|
||||
"a@20..21": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "20:21"
|
||||
},
|
||||
|
@ -16,7 +18,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
|
|||
},
|
||||
"f@18..19": {
|
||||
"def": {
|
||||
"kind": "Function",
|
||||
"kind": {
|
||||
"Var": "Function"
|
||||
},
|
||||
"name": "f",
|
||||
"range": "18:19"
|
||||
},
|
||||
|
@ -24,7 +28,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
|
|||
},
|
||||
"x@5..6": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/import.typ
|
||||
---
|
||||
{
|
||||
"base@8..18": {
|
||||
"def": {
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
},
|
||||
"refs": [
|
||||
"base@21..25"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -6,7 +6,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/redefine.typ
|
|||
{
|
||||
"x@18..19": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "18:19"
|
||||
},
|
||||
|
@ -14,7 +16,9 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/redefine.typ
|
|||
},
|
||||
"x@5..6": {
|
||||
"def": {
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
|
|
|
@ -5,12 +5,16 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/base.typ
|
|||
---
|
||||
[
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "14:15"
|
||||
}
|
||||
|
|
|
@ -5,32 +5,44 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/destructing.typ
|
|||
---
|
||||
[
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "6:7"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "b",
|
||||
"range": "9:10"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "b",
|
||||
"range": "38:39"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "41:42"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "29:30"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "b",
|
||||
"range": "32:33"
|
||||
}
|
||||
|
|
|
@ -5,22 +5,30 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/dict.typ
|
|||
---
|
||||
[
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "z",
|
||||
"range": "5:6"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "z",
|
||||
"range": "30:31"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "z",
|
||||
"range": "43:44"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "18:19"
|
||||
}
|
||||
|
|
|
@ -5,24 +5,32 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/func.typ
|
|||
---
|
||||
[
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
{
|
||||
"kind": "Function",
|
||||
"kind": {
|
||||
"Var": "Function"
|
||||
},
|
||||
"name": "f",
|
||||
"range": "18:19"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "20:21"
|
||||
},
|
||||
{
|
||||
"children": [
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "a",
|
||||
"range": "25:26"
|
||||
}
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/analysis.rs
|
||||
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
|
||||
input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/import.typ
|
||||
---
|
||||
[
|
||||
{
|
||||
"children": [
|
||||
{
|
||||
"kind": {
|
||||
"Mod": "PathVar"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "8:18"
|
||||
}
|
||||
],
|
||||
"kind": {
|
||||
"Mod": {
|
||||
"Module": {
|
||||
"Path": "base.typ"
|
||||
}
|
||||
}
|
||||
},
|
||||
"name": "",
|
||||
"range": "1:18"
|
||||
},
|
||||
{
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "base",
|
||||
"range": "21:25"
|
||||
}
|
||||
]
|
|
@ -5,17 +5,23 @@ input_file: crates/tinymist-query/src/fixtures/lexical_hierarchy/redefine.typ
|
|||
---
|
||||
[
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "5:6"
|
||||
},
|
||||
{
|
||||
"kind": "ValRef",
|
||||
"kind": {
|
||||
"Var": "ValRef"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "22:23"
|
||||
},
|
||||
{
|
||||
"kind": "Variable",
|
||||
"kind": {
|
||||
"Var": "Variable"
|
||||
},
|
||||
"name": "x",
|
||||
"range": "18:19"
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ fn filter_document_symbols(
|
|||
|
||||
SymbolInformation {
|
||||
name: e.info.name.clone(),
|
||||
kind: e.info.kind.try_into().unwrap(),
|
||||
kind: e.info.kind.clone().try_into().unwrap(),
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
location: LspLocation {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue