mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 13:13:43 +00:00
feat: find references for Ref
and Label
(#527)
* add test for `goto_definition` with label reference * simplify test for `goto_definition` with reference * abstract compile doc for test * add snap for goto_definition label reference * basic goto_reference with simple test * basic find_reference for Ref * fix: a bug in linked def * Remove unwanted compile directive compile: true * simply compile and get depended files fail for corss_file_ref_label * update ref reference but still fail to get source from path * fix: reuse find definition effort and handle undefined references for labels * dev: update test case * fix: label reference analysis * fix: rust analyzer doing bad * dev: update snapshot * dev: lift common pattern * fix: unstable snapshot --------- Co-authored-by: Myriad-Dreamin <camiyoru@gmail.com>
This commit is contained in:
parent
0fce237d57
commit
1295c8754a
17 changed files with 249 additions and 169 deletions
|
@ -28,6 +28,8 @@ pub struct DefUseInfo {
|
|||
external_refs: ExternalRefMap,
|
||||
/// The references to defined symbols.
|
||||
pub ident_refs: HashMap<IdentRef, DefId>,
|
||||
/// The references of labels.
|
||||
pub label_refs: HashMap<String, Vec<Range<usize>>>,
|
||||
/// The references to undefined symbols.
|
||||
pub undefined_refs: Vec<IdentRef>,
|
||||
exports_refs: Vec<DefId>,
|
||||
|
@ -56,8 +58,9 @@ impl DefUseInfo {
|
|||
+ 32)
|
||||
+ self.ident_refs.capacity()
|
||||
* (std::mem::size_of::<IdentRef>() + std::mem::size_of::<DefId>() + 32)
|
||||
+ (self.undefined_refs.capacity() * std::mem::size_of::<IdentRef>() + 32)
|
||||
+ (self.exports_refs.capacity() * std::mem::size_of::<DefId>() + 32)
|
||||
+ self.label_refs.capacity() * (std::mem::size_of::<Range<usize>>() + 32)
|
||||
+ self.undefined_refs.capacity() * (std::mem::size_of::<IdentRef>() + 32)
|
||||
+ self.exports_refs.capacity() * (std::mem::size_of::<DefId>() + 32)
|
||||
+ self.exports_defs.capacity()
|
||||
* (std::mem::size_of::<String>() + std::mem::size_of::<DefId>() + 32)
|
||||
}
|
||||
|
@ -226,12 +229,12 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
|
|||
LexicalKind::Var(LexicalVarKind::Label) => {
|
||||
self.insert(Ns::Label, e);
|
||||
}
|
||||
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_ref(Ns::Label, e),
|
||||
LexicalKind::Var(LexicalVarKind::LabelRef) => self.insert_label_ref(e),
|
||||
LexicalKind::Var(LexicalVarKind::Function)
|
||||
| LexicalKind::Var(LexicalVarKind::Variable) => {
|
||||
self.insert(Ns::Value, e);
|
||||
}
|
||||
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_ref(Ns::Value, e),
|
||||
LexicalKind::Var(LexicalVarKind::ValRef) => self.insert_value_ref(e),
|
||||
LexicalKind::Block => {
|
||||
if let Some(e) = &e.children {
|
||||
self.enter(|this| this.scan(e.as_slice()))?;
|
||||
|
@ -266,7 +269,7 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
|
|||
| LexicalKind::Mod(LexicalModKind::ModuleAlias) => self.insert_module(Ns::Value, e),
|
||||
LexicalKind::Mod(LexicalModKind::Ident) => match self.import_name(&e.info.name) {
|
||||
Some(()) => {
|
||||
self.insert_ref(Ns::Value, e);
|
||||
self.insert_value_ref(e);
|
||||
}
|
||||
None => {
|
||||
let def_id = self.insert(Ns::Value, e);
|
||||
|
@ -276,13 +279,10 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
|
|||
LexicalKind::Mod(LexicalModKind::Alias { target }) => {
|
||||
match self.import_name(&target.name) {
|
||||
Some(()) => {
|
||||
self.insert_ident_ref(
|
||||
Ns::Value,
|
||||
IdentRef {
|
||||
name: target.name.clone(),
|
||||
range: target.range.clone(),
|
||||
},
|
||||
);
|
||||
self.insert_value_ref_(IdentRef {
|
||||
name: target.name.clone(),
|
||||
range: target.range.clone(),
|
||||
});
|
||||
self.insert(Ns::Value, e);
|
||||
}
|
||||
None => {
|
||||
|
@ -350,13 +350,8 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
|
|||
id
|
||||
}
|
||||
|
||||
fn insert_ident_ref(&mut self, label: Ns, id_ref: IdentRef) {
|
||||
let snap = match label {
|
||||
Ns::Label => &mut self.label_scope,
|
||||
Ns::Value => &mut self.id_scope,
|
||||
};
|
||||
|
||||
match snap.get(&id_ref.name) {
|
||||
fn insert_value_ref_(&mut self, id_ref: IdentRef) {
|
||||
match self.id_scope.get(&id_ref.name) {
|
||||
Some(id) => {
|
||||
self.info.ident_refs.insert(id_ref, *id);
|
||||
}
|
||||
|
@ -366,13 +361,15 @@ impl<'a, 'b, 'w> DefUseCollector<'a, 'b, 'w> {
|
|||
}
|
||||
}
|
||||
|
||||
fn insert_ref(&mut self, label: Ns, e: &LexicalHierarchy) {
|
||||
self.insert_ident_ref(
|
||||
label,
|
||||
IdentRef {
|
||||
name: e.info.name.clone(),
|
||||
range: e.info.range.clone(),
|
||||
},
|
||||
);
|
||||
fn insert_value_ref(&mut self, e: &LexicalHierarchy) {
|
||||
self.insert_value_ref_(IdentRef {
|
||||
name: e.info.name.clone(),
|
||||
range: e.info.range.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
fn insert_label_ref(&mut self, e: &LexicalHierarchy) {
|
||||
let refs = self.info.label_refs.entry(e.info.name.clone()).or_default();
|
||||
refs.push(e.info.range.clone());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use reflexo::hash::{hash128, FxDashMap};
|
|||
use reflexo::{debug_loc::DataSource, ImmutPath};
|
||||
use typst::eval::Eval;
|
||||
use typst::foundations::{self, Func};
|
||||
use typst::syntax::{LinkedNode, SyntaxNode};
|
||||
use typst::syntax::{FileId, LinkedNode, SyntaxNode};
|
||||
use typst::{
|
||||
diag::{eco_format, FileError, FileResult, PackageError},
|
||||
foundations::Bytes,
|
||||
|
@ -310,8 +310,8 @@ impl<'w> AnalysisContext<'w> {
|
|||
self.get(id).unwrap().source(self, id)
|
||||
}
|
||||
|
||||
/// Get the source of a file by file path.
|
||||
pub fn source_by_path(&mut self, p: &Path) -> FileResult<Source> {
|
||||
/// Get the fileId from its path
|
||||
pub fn file_id_by_path(&self, p: &Path) -> FileResult<FileId> {
|
||||
// todo: source in packages
|
||||
let relative_path = p.strip_prefix(&self.root).map_err(|_| {
|
||||
FileError::Other(Some(eco_format!(
|
||||
|
@ -320,7 +320,13 @@ impl<'w> AnalysisContext<'w> {
|
|||
)))
|
||||
})?;
|
||||
|
||||
let id = TypstFileId::new(None, VirtualPath::new(relative_path));
|
||||
Ok(TypstFileId::new(None, VirtualPath::new(relative_path)))
|
||||
}
|
||||
|
||||
/// Get the source of a file by file path.
|
||||
pub fn source_by_path(&mut self, p: &Path) -> FileResult<Source> {
|
||||
// todo: source in packages
|
||||
let id = self.file_id_by_path(p)?;
|
||||
self.source_by_id(id)
|
||||
}
|
||||
|
||||
|
|
|
@ -74,9 +74,9 @@ pub fn find_definition(
|
|||
}
|
||||
DerefTarget::Label(r) | DerefTarget::Ref(r) => {
|
||||
let ref_expr: ast::Expr = r.cast()?;
|
||||
let ref_node = match ref_expr {
|
||||
ast::Expr::Ref(r) => r.target(),
|
||||
ast::Expr::Label(r) => r.get(),
|
||||
let (ref_node, is_label) = match ref_expr {
|
||||
ast::Expr::Ref(r) => (r.target(), false),
|
||||
ast::Expr::Label(r) => (r.get(), true),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
@ -100,19 +100,34 @@ pub fn find_definition(
|
|||
.or_else(|| {
|
||||
let sel = Selector::Label(label);
|
||||
let elem = introspector.query_first(&sel)?;
|
||||
let span = elem.span();
|
||||
let fid = span.id()?;
|
||||
|
||||
let source = ctx.source_by_id(fid).ok()?;
|
||||
// if it is a label, we put the selection range to itself
|
||||
let (def_at, name_range) = if is_label {
|
||||
let span = r.span();
|
||||
let fid = span.id()?;
|
||||
let source = ctx.source_by_id(fid).ok()?;
|
||||
let rng = source.range(span)?;
|
||||
|
||||
let rng = source.range(span)?;
|
||||
let name_range = rng.start + 1..rng.end - 1;
|
||||
let name_range = (name_range.start <= name_range.end).then_some(name_range);
|
||||
(Some((fid, rng)), name_range)
|
||||
} else {
|
||||
// otherwise, it is estimated to the span of the pointed content
|
||||
// todo: get the label's span
|
||||
let span = elem.span();
|
||||
let fid = span.id()?;
|
||||
let source = ctx.source_by_id(fid).ok()?;
|
||||
let rng = source.range(span)?;
|
||||
|
||||
(Some((fid, rng)), None)
|
||||
};
|
||||
|
||||
Some(DefinitionLink {
|
||||
kind: LexicalKind::Var(LexicalVarKind::Label),
|
||||
name: ref_node.to_owned(),
|
||||
value: Some(Value::Content(elem)),
|
||||
def_at: Some((fid, rng.clone())),
|
||||
name_range: Some(rng.clone()),
|
||||
def_at,
|
||||
name_range,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
|
|
@ -320,12 +320,7 @@ mod tests {
|
|||
let mut includes = HashSet::new();
|
||||
let mut excludes = HashSet::new();
|
||||
|
||||
let must_compile = has_test_property(&properties, "compile");
|
||||
let doc = if must_compile {
|
||||
compile_doc_for_test(ctx)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let doc = compile_doc_for_test(ctx, &properties);
|
||||
|
||||
for kk in properties.get("contains").iter().flat_map(|v| v.split(',')) {
|
||||
// split first char
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
// path: base2.typ
|
||||
== Base 2 <b2>
|
||||
Ref to b1 @b1
|
||||
Ref to b2 @b2
|
||||
Ref to b1 @b1 again
|
||||
-----
|
||||
// path: base1.typ
|
||||
== Base 1 <b1>
|
||||
Ref to b1 @b1
|
||||
Ref to b2 @b2
|
||||
-----
|
||||
// compile:true
|
||||
|
||||
#set heading(numbering: "1.")
|
||||
= Test Ref Label
|
||||
#include "base1.typ"
|
||||
#include "base2.typ"
|
||||
Ref to b1 /* position after */ @b1
|
||||
Ref to b2 @b2
|
6
crates/tinymist-query/src/fixtures/references/label.typ
Normal file
6
crates/tinymist-query/src/fixtures/references/label.typ
Normal file
|
@ -0,0 +1,6 @@
|
|||
// compile: true
|
||||
#set heading(numbering: "1.")
|
||||
|
||||
= Labeled <title_label> /* ident */
|
||||
|
||||
@title_label
|
10
crates/tinymist-query/src/fixtures/references/ref_label.typ
Normal file
10
crates/tinymist-query/src/fixtures/references/ref_label.typ
Normal file
|
@ -0,0 +1,10 @@
|
|||
// compile: true
|
||||
#set heading(numbering: "1.")
|
||||
|
||||
= Labeled <title_label>
|
||||
|
||||
@title_label
|
||||
|
||||
/* position after */ @title_label
|
||||
|
||||
@title_label
|
|
@ -0,0 +1,11 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/references.rs
|
||||
expression: "JsonRepr::new_pure(result)"
|
||||
input_file: crates/tinymist-query/src/fixtures/references/cross_file_ref_label.typ
|
||||
---
|
||||
[
|
||||
"/base1.typ@1:10:1:13",
|
||||
"/base2.typ@1:10:1:13",
|
||||
"/base2.typ@3:10:3:13",
|
||||
"/s2.typ@6:31:6:34"
|
||||
]
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/references.rs
|
||||
expression: "JsonRepr::new_pure(result)"
|
||||
input_file: crates/tinymist-query/src/fixtures/references/label.typ
|
||||
---
|
||||
[
|
||||
"/s0.typ@5:0:5:12"
|
||||
]
|
|
@ -5,7 +5,7 @@ input_file: crates/tinymist-query/src/fixtures/references/recursive_import.typ
|
|||
---
|
||||
[
|
||||
"/base2.typ@0:20:0:21",
|
||||
"/s0.typ@0:21:0:22",
|
||||
"/base2.typ@1:1:1:2",
|
||||
"/s0.typ@0:21:0:22",
|
||||
"/s0.typ@1:1:1:2"
|
||||
]
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
source: crates/tinymist-query/src/references.rs
|
||||
expression: "JsonRepr::new_pure(result)"
|
||||
input_file: crates/tinymist-query/src/fixtures/references/ref_label.typ
|
||||
---
|
||||
[
|
||||
"/s0.typ@5:0:5:12",
|
||||
"/s0.typ@7:21:7:33",
|
||||
"/s0.typ@9:0:9:12"
|
||||
]
|
|
@ -57,8 +57,8 @@ impl StatefulRequest for GotoDefinitionRequest {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::syntax::find_module_level_docs;
|
||||
use super::*;
|
||||
use crate::syntax::find_module_level_docs;
|
||||
use crate::tests::*;
|
||||
|
||||
#[test]
|
||||
|
@ -68,12 +68,7 @@ mod tests {
|
|||
|
||||
let docs = find_module_level_docs(&source).unwrap_or_default();
|
||||
let properties = get_test_properties(&docs);
|
||||
let must_compile = has_test_property(&properties, "compile");
|
||||
let doc = if must_compile {
|
||||
compile_doc_for_test(ctx)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let doc = compile_doc_for_test(ctx, &properties);
|
||||
|
||||
let request = GotoDefinitionRequest {
|
||||
path: path.clone(),
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
use std::ops::Range;
|
||||
|
||||
use log::debug;
|
||||
|
||||
use crate::{
|
||||
analysis::SearchCtx,
|
||||
analysis::{find_definition, SearchCtx},
|
||||
prelude::*,
|
||||
syntax::{DerefTarget, IdentRef},
|
||||
SemanticRequest,
|
||||
};
|
||||
|
||||
/// The [`textDocument/references`] request is sent from the client to the
|
||||
|
@ -20,15 +21,18 @@ pub struct ReferencesRequest {
|
|||
pub position: LspPosition,
|
||||
}
|
||||
|
||||
impl SemanticRequest for ReferencesRequest {
|
||||
impl StatefulRequest for ReferencesRequest {
|
||||
type Response = Vec<LspLocation>;
|
||||
|
||||
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
|
||||
fn request(
|
||||
self,
|
||||
ctx: &mut AnalysisContext,
|
||||
doc: Option<VersionedDocument>,
|
||||
) -> Option<Self::Response> {
|
||||
let source = ctx.source_by_path(&self.path).ok()?;
|
||||
let deref_target = ctx.deref_syntax_at(&source, self.position, 1)?;
|
||||
|
||||
let def_use = ctx.def_use(source.clone())?;
|
||||
let locations = find_references(ctx, def_use, deref_target)?;
|
||||
let locations = find_references(ctx, source.clone(), doc.as_ref(), deref_target)?;
|
||||
|
||||
debug!("references: {locations:?}");
|
||||
Some(locations)
|
||||
|
@ -37,72 +41,45 @@ impl SemanticRequest for ReferencesRequest {
|
|||
|
||||
pub(crate) fn find_references(
|
||||
ctx: &mut AnalysisContext<'_>,
|
||||
def_use: Arc<crate::analysis::DefUseInfo>,
|
||||
source: Source,
|
||||
document: Option<&VersionedDocument>,
|
||||
deref_target: DerefTarget<'_>,
|
||||
) -> Option<Vec<LspLocation>> {
|
||||
let node = match deref_target {
|
||||
DerefTarget::VarAccess(node) => node,
|
||||
DerefTarget::Callee(node) => node,
|
||||
DerefTarget::ImportPath(..) | DerefTarget::IncludePath(..) => {
|
||||
return None;
|
||||
}
|
||||
// todo: label, reference
|
||||
DerefTarget::Label(..) | DerefTarget::Ref(..) | DerefTarget::Normal(..) => {
|
||||
let finding_label = match deref_target {
|
||||
DerefTarget::VarAccess(..) | DerefTarget::Callee(..) => false,
|
||||
DerefTarget::Label(..) | DerefTarget::Ref(..) => true,
|
||||
DerefTarget::ImportPath(..) | DerefTarget::IncludePath(..) | DerefTarget::Normal(..) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let mut may_ident = node.cast::<ast::Expr>()?;
|
||||
let name;
|
||||
loop {
|
||||
match may_ident {
|
||||
ast::Expr::Parenthesized(e) => {
|
||||
may_ident = e.expr();
|
||||
}
|
||||
ast::Expr::FieldAccess(e) => {
|
||||
may_ident = e.target();
|
||||
}
|
||||
ast::Expr::MathIdent(e) => {
|
||||
name = e.get().to_string();
|
||||
break;
|
||||
}
|
||||
ast::Expr::Ident(e) => {
|
||||
name = e.get().to_string();
|
||||
break;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
let def = find_definition(ctx, source, document, deref_target)?;
|
||||
|
||||
let ident = node.find(may_ident.span())?;
|
||||
// todo: reference of builtin items?
|
||||
let (def_fid, def_range) = def.def_at?;
|
||||
|
||||
// todo: if it is exported, find all the references in the workspace
|
||||
let ident_ref = IdentRef {
|
||||
name: name.clone(),
|
||||
range: ident.range(),
|
||||
};
|
||||
let cur_fid = ident.span().id()?;
|
||||
|
||||
let def_id = def_use.get_ref(&ident_ref);
|
||||
let def_id = def_id.or_else(|| Some(def_use.get_def(cur_fid, &ident_ref)?.0));
|
||||
let (def_fid, def) = def_id.and_then(|def_id| def_use.get_def_by_id(def_id))?;
|
||||
let def_ident = IdentRef {
|
||||
name: def.name.clone(),
|
||||
range: def.range.clone(),
|
||||
range: def_range,
|
||||
};
|
||||
|
||||
let def_source = ctx.source_by_id(def_fid).ok()?;
|
||||
let root_def_use = ctx.def_use(def_source)?;
|
||||
let root_def_id = root_def_use.get_def(def_fid, &def_ident)?.0;
|
||||
let root_def_id = root_def_use.get_def(def_fid, &def_ident).map(|e| e.0);
|
||||
|
||||
let worker = ReferencesWorker {
|
||||
ctx: ctx.fork_for_search(),
|
||||
references: vec![],
|
||||
def_fid,
|
||||
def_ident,
|
||||
finding_label,
|
||||
};
|
||||
|
||||
worker.root(root_def_use, root_def_id)
|
||||
if finding_label {
|
||||
worker.label_root()
|
||||
} else {
|
||||
worker.ident_root(root_def_use, root_def_id?)
|
||||
}
|
||||
}
|
||||
|
||||
struct ReferencesWorker<'a, 'w> {
|
||||
|
@ -110,38 +87,28 @@ struct ReferencesWorker<'a, 'w> {
|
|||
references: Vec<LspLocation>,
|
||||
def_fid: TypstFileId,
|
||||
def_ident: IdentRef,
|
||||
finding_label: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'w> ReferencesWorker<'a, 'w> {
|
||||
fn file(&mut self, ref_fid: TypstFileId) -> Option<()> {
|
||||
log::debug!("references: file: {ref_fid:?}");
|
||||
let ref_source = self.ctx.ctx.source_by_id(ref_fid).ok()?;
|
||||
let def_use = self.ctx.ctx.def_use(ref_source.clone())?;
|
||||
fn label_root(mut self) -> Option<Vec<LspLocation>> {
|
||||
let mut ids = vec![];
|
||||
|
||||
let uri = self.ctx.ctx.uri_for_id(ref_fid).ok()?;
|
||||
|
||||
let mut redefines = vec![];
|
||||
if let Some((id, _def)) = def_use.get_def(self.def_fid, &self.def_ident) {
|
||||
self.references.extend(def_use.get_refs(id).map(|r| {
|
||||
log::debug!("references: at file: {ref_fid:?}, {r:?}");
|
||||
let range = self.ctx.ctx.to_lsp_range(r.range.clone(), &ref_source);
|
||||
|
||||
LspLocation {
|
||||
uri: uri.clone(),
|
||||
range,
|
||||
}
|
||||
}));
|
||||
redefines.push(id);
|
||||
|
||||
if def_use.is_exported(id) {
|
||||
self.ctx.push_dependents(ref_fid);
|
||||
// Collect ids first to avoid deadlocks
|
||||
self.ctx.ctx.resources.iter_dependencies(&mut |path| {
|
||||
if let Ok(ref_fid) = self.ctx.ctx.file_id_by_path(&path) {
|
||||
ids.push(ref_fid);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
Some(())
|
||||
for ref_fid in ids {
|
||||
self.file(ref_fid)?;
|
||||
}
|
||||
|
||||
Some(self.references)
|
||||
}
|
||||
|
||||
fn root(
|
||||
fn ident_root(
|
||||
mut self,
|
||||
def_use: Arc<crate::analysis::DefUseInfo>,
|
||||
def_id: DefId,
|
||||
|
@ -149,18 +116,7 @@ impl<'a, 'w> ReferencesWorker<'a, 'w> {
|
|||
let def_source = self.ctx.ctx.source_by_id(self.def_fid).ok()?;
|
||||
let uri = self.ctx.ctx.uri_for_id(self.def_fid).ok()?;
|
||||
|
||||
// todo: reuse uri, range to location
|
||||
self.references = def_use
|
||||
.get_refs(def_id)
|
||||
.map(|r| {
|
||||
let range = self.ctx.ctx.to_lsp_range(r.range.clone(), &def_source);
|
||||
|
||||
LspLocation {
|
||||
uri: uri.clone(),
|
||||
range,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.push_idents(&def_source, &uri, def_use.get_refs(def_id));
|
||||
|
||||
if def_use.is_exported(def_id) {
|
||||
// Find dependents
|
||||
|
@ -172,6 +128,49 @@ impl<'a, 'w> ReferencesWorker<'a, 'w> {
|
|||
|
||||
Some(self.references)
|
||||
}
|
||||
|
||||
fn file(&mut self, ref_fid: TypstFileId) -> Option<()> {
|
||||
log::debug!("references: file: {ref_fid:?}");
|
||||
let ref_source = self.ctx.ctx.source_by_id(ref_fid).ok()?;
|
||||
let def_use = self.ctx.ctx.def_use(ref_source.clone())?;
|
||||
let uri = self.ctx.ctx.uri_for_id(ref_fid).ok()?;
|
||||
|
||||
let mut redefines = vec![];
|
||||
if let Some((id, _def)) = def_use.get_def(self.def_fid, &self.def_ident) {
|
||||
self.push_idents(&ref_source, &uri, def_use.get_refs(id));
|
||||
|
||||
redefines.push(id);
|
||||
|
||||
if def_use.is_exported(id) {
|
||||
self.ctx.push_dependents(ref_fid);
|
||||
}
|
||||
};
|
||||
|
||||
// All references are not resolved since static analyzers doesn't know anything
|
||||
// about labels (which is working at runtime).
|
||||
if self.finding_label {
|
||||
let label_refs = def_use.label_refs.get(&self.def_ident.name);
|
||||
self.push_ranges(&ref_source, &uri, label_refs.into_iter().flatten());
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn push_idents<'b>(&mut self, s: &Source, u: &Url, idents: impl Iterator<Item = &'b IdentRef>) {
|
||||
self.push_ranges(s, u, idents.map(|e| &e.range));
|
||||
}
|
||||
|
||||
fn push_ranges<'b>(&mut self, s: &Source, u: &Url, rs: impl Iterator<Item = &'b Range<usize>>) {
|
||||
self.references.extend(rs.map(|rng| {
|
||||
log::debug!("references: at file: {s:?}, {rng:?}");
|
||||
|
||||
let range = self.ctx.ctx.to_lsp_range(rng.clone(), s);
|
||||
LspLocation {
|
||||
uri: u.clone(),
|
||||
range,
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -179,30 +178,25 @@ mod tests {
|
|||
use typst_ts_core::path::unix_slash;
|
||||
|
||||
use super::*;
|
||||
use crate::syntax::find_module_level_docs;
|
||||
use crate::{tests::*, url_to_path};
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
// goto_definition
|
||||
snapshot_testing("references", &|world, path| {
|
||||
let source = world.source_by_path(&path).unwrap();
|
||||
snapshot_testing("references", &|ctx, path| {
|
||||
let source = ctx.source_by_path(&path).unwrap();
|
||||
|
||||
let docs = find_module_level_docs(&source).unwrap_or_default();
|
||||
let properties = get_test_properties(&docs);
|
||||
let doc = compile_doc_for_test(ctx, &properties);
|
||||
|
||||
let request = ReferencesRequest {
|
||||
path: path.clone(),
|
||||
position: find_test_position(&source),
|
||||
};
|
||||
|
||||
let result = request.request(world);
|
||||
// sort
|
||||
let result = result.map(|mut e| {
|
||||
e.sort_by(|a, b| match a.range.start.cmp(&b.range.start) {
|
||||
std::cmp::Ordering::Equal => a.range.end.cmp(&b.range.end),
|
||||
e => e,
|
||||
});
|
||||
e
|
||||
});
|
||||
|
||||
let result = result.map(|v| {
|
||||
let result = request.request(ctx, doc);
|
||||
let mut result = result.map(|v| {
|
||||
v.into_iter()
|
||||
.map(|l| {
|
||||
let fp = unix_slash(&url_to_path(l.uri));
|
||||
|
@ -217,6 +211,10 @@ mod tests {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
// sort
|
||||
if let Some(result) = result.as_mut() {
|
||||
result.sort();
|
||||
}
|
||||
|
||||
assert_snapshot!(JsonRepr::new_pure(result));
|
||||
});
|
||||
|
|
|
@ -30,8 +30,7 @@ impl StatefulRequest for RenameRequest {
|
|||
|
||||
validate_renaming_definition(&lnk)?;
|
||||
|
||||
let def_use = ctx.def_use(source.clone())?;
|
||||
let references = find_references(ctx, def_use, deref_target)?;
|
||||
let references = find_references(ctx, source.clone(), doc.as_ref(), deref_target)?;
|
||||
|
||||
let mut editions = HashMap::new();
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
use core::fmt;
|
||||
use std::sync::Arc;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::{HashMap, HashSet},
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
pub use serde::Serialize;
|
||||
use serde_json::{ser::PrettyFormatter, Serializer, Value};
|
||||
|
@ -26,7 +27,11 @@ use typst_ts_core::{config::CompileOpts, package::PackageSpec};
|
|||
pub use insta::assert_snapshot;
|
||||
pub use typst_ts_compiler::TypstSystemWorld;
|
||||
|
||||
use crate::{analysis::{Analysis, AnalysisResources}, prelude::AnalysisContext, typst_to_lsp, LspPosition, PositionEncoding, VersionedDocument};
|
||||
use crate::{
|
||||
analysis::{Analysis, AnalysisResources},
|
||||
prelude::AnalysisContext,
|
||||
typst_to_lsp, LspPosition, PositionEncoding, VersionedDocument,
|
||||
};
|
||||
|
||||
struct WrapWorld<'a>(&'a mut TypstSystemWorld);
|
||||
|
||||
|
@ -91,14 +96,19 @@ pub fn get_test_properties(s: &str) -> HashMap<&'_ str, &'_ str> {
|
|||
props
|
||||
}
|
||||
|
||||
pub fn has_test_property(properties: &HashMap<&'_ str, &'_ str>, prop: &str) -> bool {
|
||||
properties
|
||||
.get(prop)
|
||||
pub fn compile_doc_for_test(
|
||||
ctx: &mut AnalysisContext,
|
||||
properties: &HashMap<&str, &str>,
|
||||
) -> Option<VersionedDocument> {
|
||||
let must_compile = properties
|
||||
.get("compile")
|
||||
.map(|v| v.trim() == "true")
|
||||
.unwrap_or(false)
|
||||
}
|
||||
.unwrap_or(false);
|
||||
|
||||
if !must_compile {
|
||||
return None;
|
||||
}
|
||||
|
||||
pub fn compile_doc_for_test(ctx: &mut AnalysisContext) -> Option<VersionedDocument> {
|
||||
let doc = typst::compile(ctx.world(), &mut Default::default()).unwrap();
|
||||
Some(VersionedDocument {
|
||||
version: 0,
|
||||
|
@ -212,9 +222,10 @@ pub fn find_test_position_(s: &Source, offset: usize) -> LspPosition {
|
|||
}
|
||||
use AstMatcher::*;
|
||||
|
||||
let re = s.text()
|
||||
.find("/* position */")
|
||||
.zip(Some(MatchAny { prev: true }));
|
||||
let re = s
|
||||
.text()
|
||||
.find("/* position */")
|
||||
.zip(Some(MatchAny { prev: true }));
|
||||
let re = re.or_else(|| {
|
||||
s.text()
|
||||
.find("/* position after */")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue