feat: find definition of label references (#235)

* feat: find definition of label references

* dev: move dependencies declaration

* dev: remove serde_spanned
This commit is contained in:
Myriad-Dreamin 2024-05-05 18:53:18 +08:00 committed by GitHub
parent c133d81d36
commit d774304574
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 563 additions and 37 deletions

View file

@ -1,5 +1,7 @@
//! Semantic static and dynamic analysis of the source code.
mod bib;
pub(crate) use bib::*;
pub mod call;
pub use call::*;
pub mod color_exprs;

View file

@ -0,0 +1,234 @@
use std::{ffi::OsStr, sync::Arc};
use ecow::EcoVec;
use typst::foundations::Bytes;
use super::prelude::*;
use yaml_rust2::{parser::Event, parser::MarkedEventReceiver, scanner::Marker};
#[derive(Debug, Clone)]
struct BibSpanned<T> {
value: T,
span: Range<usize>,
}
#[derive(Default)]
struct YamlBibLoader {
depth: usize,
start: Option<BibSpanned<String>>,
key: Option<BibSpanned<String>>,
content: Vec<(BibSpanned<String>, Range<usize>)>,
}
impl MarkedEventReceiver for YamlBibLoader {
fn on_event(&mut self, event: Event, mark: Marker) {
match event {
Event::MappingStart(..) => {
if self.depth == 1 {
log::debug!("mapping start: {:?} {:?}", self.key, mark.index());
self.start = self.key.take();
}
self.depth += 1;
}
Event::Scalar(s, ..) => {
log::debug!("scalar: {:?} {:?}", s, mark.index());
if self.depth == 1 {
self.key = Some(BibSpanned {
value: s.to_owned(),
span: mark.index()..mark.index() + s.chars().count(),
});
}
}
Event::MappingEnd => {
self.depth -= 1;
if self.depth == 1 {
let end = mark.index();
let start = self.start.take();
let Some(start) = start else {
return;
};
let span = start.span.start..end;
self.content.push((start, span));
log::debug!("mapping end: {:?} {:?}", self.key, mark.index());
}
}
_ => {}
}
}
}
struct YamlBib {
entries: Vec<(String, BibEntry)>,
}
impl YamlBib {
fn from_content(content: &str, file_id: TypstFileId) -> Self {
let mut parser = yaml_rust2::parser::Parser::new(content.chars());
let mut loader = YamlBibLoader::default();
parser.load(&mut loader, true).ok();
let mut span_mapper = Vec::from_iter(
loader
.content
.iter()
.flat_map(|(k, span)| [k.span.start, k.span.end, span.start, span.end])
.map(|e| (e, None)),
);
span_mapper.sort_by_key(|e| e.0);
span_mapper.dedup_by_key(|e| e.0);
let mut span_cursor = 0;
let mut byte_offset = 0;
for (off, ch) in content.chars().chain(Some('\0')).enumerate() {
if span_cursor < span_mapper.len() {
let (span, w) = &mut span_mapper[span_cursor];
if off == *span {
*w = Some(byte_offset);
span_cursor += 1;
}
}
byte_offset += ch.len_utf8();
}
let span_map = HashMap::<usize, usize>::from_iter(
span_mapper
.into_iter()
.filter_map(|(span, offset)| offset.map(|offset| (span, offset))),
);
let map_span = |span: Range<usize>| {
let start = span_map.get(&span.start).copied()?;
let end = span_map.get(&span.end).copied()?;
Some(start..end)
};
let entries = loader
.content
.into_iter()
.filter_map(|(k, span)| {
let k_span = map_span(k.span)?;
let span = map_span(span)?;
let entry = BibEntry {
file_id,
name_span: k_span.clone(),
span: span.clone(),
};
Some((k.value, entry))
})
.collect();
Self { entries }
}
}
#[derive(Debug, Clone)]
pub struct BibEntry {
pub file_id: TypstFileId,
pub name_span: Range<usize>,
pub span: Range<usize>,
}
#[derive(Default)]
pub struct BibInfo {
/// The bibliography entries.
pub entries: indexmap::IndexMap<String, BibEntry>,
}
pub(crate) fn analyze_bib(paths: EcoVec<(TypstFileId, Bytes)>) -> Option<Arc<BibInfo>> {
let mut worker = BibWorker {
info: BibInfo::default(),
};
// We might have multiple bib/yaml files
for (path, content) in paths.clone() {
worker.analyze_path(path, content);
}
log::debug!(
"bib analysis: {paths:?} -> {entries:?}",
entries = worker.info.entries
);
Some(Arc::new(worker.info))
}
struct BibWorker {
info: BibInfo,
}
impl BibWorker {
fn analyze_path(&mut self, path: TypstFileId, content: Bytes) -> Option<()> {
let content = std::str::from_utf8(&content).ok()?;
let ext = path
.vpath()
.as_rootless_path()
.extension()
.and_then(OsStr::to_str)
.unwrap_or_default();
match ext.to_lowercase().as_str() {
"yml" | "yaml" => {
let yaml = YamlBib::from_content(content, path);
self.info.entries.extend(yaml.entries);
}
"bib" => {
let bibliography = biblatex::RawBibliography::parse(content).ok()?;
for e in bibliography.entries {
let k = e.v.key;
let span = e.span;
self.info.entries.insert(
k.v.to_owned(),
BibEntry {
file_id: path,
name_span: k.span,
span,
},
);
}
}
_ => return None,
};
Some(())
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use typst::syntax::VirtualPath;
use typst_ts_core::TypstFileId;
#[test]
fn yaml_bib_test() {
let content = r#"
Euclid:
type: article
title: '{Elements, {V}ols.\ 1--13}'
Euclid2:
type: article
title: '{Elements, {V}ols.\ 2--13}'
"#;
let yaml = super::YamlBib::from_content(
content,
TypstFileId::new_fake(VirtualPath::new(Path::new("test.yml"))),
);
assert_eq!(yaml.entries.len(), 2);
assert_eq!(yaml.entries[0].0, "Euclid");
assert_eq!(yaml.entries[1].0, "Euclid2");
}
#[test]
fn yaml_bib_imcomplete() {
let content = r#"
Euclid:
type: article
title: '{Elements, {V}ols.\ 1--13}'
Euclid3
"#;
super::YamlBib::from_content(
content,
TypstFileId::new_fake(VirtualPath::new(Path::new("test.yml"))),
);
}
}

View file

@ -199,7 +199,9 @@ impl<'a, 'w> DefUseCollector<'a, 'w> {
fn scan(&mut self, e: &'a [LexicalHierarchy]) -> Option<()> {
for e in e {
match &e.info.kind {
LexicalKind::Heading(..) => unreachable!(),
LexicalKind::Var(LexicalVarKind::BibKey) | LexicalKind::Heading(..) => {
unreachable!()
}
LexicalKind::Var(LexicalVarKind::Label) => {
self.insert(Ns::Label, e);
}

View file

@ -6,7 +6,7 @@ use std::{
sync::Arc,
};
use ecow::EcoVec;
use ecow::{EcoString, EcoVec};
use once_cell::sync::OnceCell;
use parking_lot::RwLock;
use reflexo::hash::hash128;
@ -16,6 +16,7 @@ use typst::foundations;
use typst::syntax::{LinkedNode, SyntaxNode};
use typst::{
diag::{eco_format, FileError, FileResult, PackageError},
foundations::Bytes,
syntax::{package::PackageSpec, Source, Span, VirtualPath},
World,
};
@ -23,9 +24,10 @@ use typst::{foundations::Value, syntax::ast, text::Font};
use typst::{layout::Position, syntax::FileId as TypstFileId};
use super::{
post_type_check, DefUseInfo, FlowType, ImportInfo, PathPreference, Signature, SignatureTarget,
TypeCheckInfo,
analyze_bib, post_type_check, BibInfo, DefUseInfo, FlowType, ImportInfo, PathPreference,
Signature, SignatureTarget, TypeCheckInfo,
};
use crate::syntax::resolve_id_by_path;
use crate::{
lsp_to_typst,
syntax::{
@ -39,13 +41,20 @@ use crate::{
/// You should not holds across requests, because source code may change.
#[derive(Default)]
pub struct ModuleAnalysisCache {
file: OnceCell<FileResult<Bytes>>,
source: OnceCell<FileResult<Source>>,
import_info: OnceCell<Option<Arc<ImportInfo>>>,
def_use: OnceCell<Option<Arc<DefUseInfo>>>,
type_check: OnceCell<Option<Arc<TypeCheckInfo>>>,
bibliography: OnceCell<Option<Arc<BibInfo>>>,
}
impl ModuleAnalysisCache {
/// Get the bytes content of a file.
pub fn file(&self, ctx: &AnalysisContext, file_id: TypstFileId) -> FileResult<Bytes> {
self.file.get_or_init(|| ctx.world().file(file_id)).clone()
}
/// Get the source of a file.
pub fn source(&self, ctx: &AnalysisContext, file_id: TypstFileId) -> FileResult<Source> {
self.source
@ -53,12 +62,12 @@ impl ModuleAnalysisCache {
.clone()
}
/// Try to get the def-use information of a file.
/// Try to get the import information of a file.
pub fn import_info(&self) -> Option<Arc<ImportInfo>> {
self.import_info.get().cloned().flatten()
}
/// Compute the def-use information of a file.
/// Compute the import information of a file.
pub(crate) fn compute_import(
&self,
f: impl FnOnce() -> Option<Arc<ImportInfo>>,
@ -91,6 +100,19 @@ impl ModuleAnalysisCache {
) -> Option<Arc<TypeCheckInfo>> {
self.type_check.get_or_init(f).clone()
}
/// Try to get the bibliography information of a file.
pub fn bibliography(&self) -> Option<Arc<BibInfo>> {
self.bibliography.get().cloned().flatten()
}
/// Compute the bibliography information of a file.
pub(crate) fn compute_bibliography(
&self,
f: impl FnOnce() -> Option<Arc<BibInfo>>,
) -> Option<Arc<BibInfo>> {
self.bibliography.get_or_init(f).clone()
}
}
/// The analysis data holds globally.
@ -154,6 +176,11 @@ impl ComputeDebug for EcoVec<LexicalHierarchy> {
self.len()
}
}
impl ComputeDebug for EcoVec<(TypstFileId, Bytes)> {
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
self.len()
}
}
impl ComputeDebug for Arc<ImportInfo> {
fn compute_debug_repr(&self) -> impl std::fmt::Debug {
@ -280,6 +307,7 @@ pub struct ModuleAnalysisGlobalCache {
type_check: Arc<ComputingNode<Source, Arc<TypeCheckInfo>>>,
def_use: Arc<ComputingNode<(EcoVec<LexicalHierarchy>, Arc<ImportInfo>), Arc<DefUseInfo>>>,
bibliography: Arc<ComputingNode<EcoVec<(TypstFileId, Bytes)>, Arc<BibInfo>>>,
import: Arc<ComputingNode<EcoVec<LexicalHierarchy>, Arc<ImportInfo>>>,
signature_source: Option<Source>,
signatures: HashMap<usize, Signature>,
@ -292,6 +320,7 @@ impl Default for ModuleAnalysisGlobalCache {
type_check: Arc::new(ComputingNode::new("type_check")),
import: Arc::new(ComputingNode::new("import")),
def_use: Arc::new(ComputingNode::new("def_use")),
bibliography: Arc::new(ComputingNode::new("bibliography")),
signature_source: None,
signatures: Default::default(),
@ -510,6 +539,12 @@ impl<'w> AnalysisContext<'w> {
id.vpath().resolve(&root).ok_or(FileError::AccessDenied)
}
/// Get the content of a file by file id.
pub fn file_by_id(&mut self, id: TypstFileId) -> FileResult<Bytes> {
self.get_mut(id);
self.get(id).unwrap().file(self, id)
}
/// Get the source of a file by file id.
pub fn source_by_id(&mut self, id: TypstFileId) -> FileResult<Source> {
self.get_mut(id);
@ -574,6 +609,29 @@ impl<'w> AnalysisContext<'w> {
typst_to_lsp::range(position, src, self.analysis.position_encoding)
}
/// Convert a Typst range to a LSP range.
pub fn to_lsp_range_(&mut self, position: TypstRange, fid: TypstFileId) -> Option<LspRange> {
let w = fid
.vpath()
.as_rootless_path()
.extension()
.and_then(|e| e.to_str());
// yaml/yml/bib
if matches!(w, Some("yaml" | "yml" | "bib")) {
let bytes = self.file_by_id(fid).ok()?;
let bytes_len = bytes.len();
let loc = get_loc_info(bytes)?;
// binary search
let start = find_loc(bytes_len, &loc, position.start, self.position_encoding())?;
let end = find_loc(bytes_len, &loc, position.end, self.position_encoding())?;
return Some(LspRange { start, end });
}
let source = self.source_by_id(fid).ok()?;
Some(self.to_lsp_range(position, &source))
}
/// Get the type check information of a source file.
pub(crate) fn type_check(&mut self, source: Source) -> Option<Arc<TypeCheckInfo>> {
let fid = source.id();
@ -680,6 +738,41 @@ impl<'w> AnalysisContext<'w> {
res
}
pub(crate) fn analyze_bib(
&mut self,
span: Span,
bib_paths: impl Iterator<Item = EcoString>,
) -> Option<Arc<BibInfo>> {
let id = span.id()?;
if let Some(res) = self.caches.modules.entry(id).or_default().bibliography() {
return Some(res);
}
// the order are important
let paths = bib_paths
.flat_map(|s| {
let id = resolve_id_by_path(self.world(), id, &s)?;
Some((id, self.file_by_id(id).ok()?))
})
.collect::<EcoVec<_>>();
let cache = self.at_module(id);
let res = cache
.bibliography
.clone()
.compute(paths, |_, after| analyze_bib(after))
.ok()
.flatten();
self.caches
.modules
.entry(id)
.or_default()
.compute_bibliography(|| res.clone());
res
}
fn at_module(&mut self, fid: TypstFileId) -> &mut ModuleAnalysisGlobalCache {
self.analysis.caches.modules.entry(fid).or_default()
}
@ -759,6 +852,51 @@ impl<'w> AnalysisContext<'w> {
}
}
#[comemo::memoize]
fn get_loc_info(bytes: Bytes) -> Option<EcoVec<(usize, String)>> {
let mut loc = EcoVec::new();
let mut offset = 0;
for line in bytes.split(|e| *e == b'\n') {
loc.push((offset, String::from_utf8(line.to_owned()).ok()?));
offset += line.len() + 1;
}
Some(loc)
}
fn find_loc(
len: usize,
loc: &EcoVec<(usize, String)>,
mut offset: usize,
encoding: PositionEncoding,
) -> Option<LspPosition> {
if offset > len {
offset = len;
}
let r = match loc.binary_search_by_key(&offset, |line| line.0) {
Ok(i) => i,
Err(i) => i - 1,
};
let (start, s) = loc.get(r)?;
let byte_offset = offset.saturating_sub(*start);
let column_prefix = if byte_offset <= s.len() {
&s[..byte_offset]
} else {
let line = (r + 1) as u32;
return Some(LspPosition { line, character: 0 });
};
let line = r as u32;
let character = match encoding {
PositionEncoding::Utf8 => column_prefix.chars().count(),
PositionEncoding::Utf16 => column_prefix.chars().map(|c| c.len_utf16()).sum(),
} as u32;
Some(LspPosition { line, character })
}
/// The context for searching in the workspace.
pub struct SearchCtx<'b, 'w> {
/// The inner analysis context.

View file

@ -4,11 +4,12 @@ use std::ops::Range;
use log::debug;
use once_cell::sync::Lazy;
use typst::foundations::Type;
use typst::foundations::{IntoValue, Label, Selector, Type};
use typst::model::BibliographyElem;
use typst::syntax::FileId as TypstFileId;
use typst::{foundations::Value, syntax::Span};
use super::prelude::*;
use super::{prelude::*, BibInfo};
use crate::{
prelude::*,
syntax::{
@ -36,6 +37,7 @@ pub struct DefinitionLink {
pub fn find_definition(
ctx: &mut AnalysisContext<'_>,
source: Source,
document: Option<&VersionedDocument>,
deref_target: DerefTarget<'_>,
) -> Option<DefinitionLink> {
let source_id = source.id();
@ -70,8 +72,45 @@ pub fn find_definition(
name_range: None,
});
}
// todo: label, reference
DerefTarget::Label(..) | DerefTarget::Ref(..) | DerefTarget::Normal(..) => {
DerefTarget::Ref(r) => {
let ref_node = r.cast::<ast::Ref>()?.target();
let doc = document?;
let introspector = &doc.document.introspector;
let label = Label::new(ref_node);
let bib_elem = BibliographyElem::find(introspector.track())
.ok()
.and_then(|bib_elem| {
ctx.analyze_bib(bib_elem.span(), {
let Value::Array(arr) = bib_elem.path().clone().into_value() else {
return None;
};
arr.into_iter().map(Value::cast).flat_map(|e| e.ok())
})
});
return bib_elem
.and_then(|e| find_bib_definition(e, ref_node))
.or_else(|| {
let sel = Selector::Label(label);
let elem = introspector.query_first(&sel)?;
let span = elem.span();
let fid = span.id()?;
let source = ctx.source_by_id(fid).ok()?;
let rng = source.range(span)?;
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::Label),
name: r.text().to_string(),
value: None,
def_at: Some((fid, rng.clone())),
name_range: Some(rng.clone()),
})
});
}
DerefTarget::Label(..) | DerefTarget::Normal(..) => {
return None;
}
};
@ -122,7 +161,9 @@ pub fn find_definition(
};
match def.kind {
LexicalKind::Heading(..) | LexicalKind::Block => unreachable!(),
LexicalKind::Var(LexicalVarKind::BibKey)
| LexicalKind::Heading(..)
| LexicalKind::Block => unreachable!(),
LexicalKind::Var(
LexicalVarKind::Variable
| LexicalVarKind::ValRef
@ -168,6 +209,20 @@ pub fn find_definition(
}
}
fn find_bib_definition(bib_elem: Arc<BibInfo>, key: &str) -> Option<DefinitionLink> {
let entry = bib_elem.entries.get(key);
log::debug!("find_bib_definition: {key} => {entry:?}");
let entry = entry?;
Some(DefinitionLink {
kind: LexicalKind::Var(LexicalVarKind::BibKey),
name: key.to_string(),
value: None,
def_at: Some((entry.file_id, entry.span.clone())),
// todo: rename with regard to string format: yaml-key/bib etc.
name_range: Some(entry.span.clone()),
})
}
/// The target of a dynamic call.
#[derive(Debug, Clone)]
pub struct DynCallTarget {
@ -282,7 +337,7 @@ fn resolve_callee_(
let node = source.find(callee.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = find_definition(ctx, source.clone(), deref_target)?;
let def = find_definition(ctx, source.clone(), None, deref_target)?;
match def.kind {
LexicalKind::Var(LexicalVarKind::Function) => match def.value {
Some(Value::Func(f)) => Some(f),

View file

@ -5,6 +5,7 @@ pub use std::{
sync::Arc,
};
pub use comemo::Track;
pub use reflexo::vector::ir::DefId;
pub use serde::Serialize;
pub use typst::syntax::FileId as TypstFileId;

View file

@ -281,7 +281,7 @@ fn resolve_callee_v2(
let node = source.find(callee.span())?;
let cursor = node.offset();
let deref_target = get_deref_target(node, cursor)?;
let def = find_definition(ctx, source.clone(), deref_target)?;
let def = find_definition(ctx, source.clone(), None, deref_target)?;
if let LexicalKind::Var(LexicalVarKind::Function) = def.kind {
if let Some(Value::Func(f)) = def.value {
return Some(TryResolveCalleeResult::Runtime(f));

View file

@ -1,6 +1,6 @@
use log::debug;
use crate::{analysis::find_definition, prelude::*, syntax::get_deref_target, SemanticRequest};
use crate::{analysis::find_definition, prelude::*, syntax::get_deref_target};
/// The [`textDocument/definition`] request asks the server for the definition
/// location of a symbol at a given text document position.
@ -25,10 +25,14 @@ pub struct GotoDefinitionRequest {
pub position: LspPosition,
}
impl SemanticRequest for GotoDefinitionRequest {
impl StatefulRequest for GotoDefinitionRequest {
type Response = GotoDefinitionResponse;
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
fn request(
self,
ctx: &mut AnalysisContext,
doc: Option<VersionedDocument>,
) -> Option<Self::Response> {
let source = ctx.source_by_path(&self.path).ok()?;
let offset = ctx.to_typst_pos(self.position, &source)?;
let cursor = offset + 1;
@ -40,15 +44,14 @@ impl SemanticRequest for GotoDefinitionRequest {
let use_site = deref_target.node().clone();
let origin_selection_range = ctx.to_lsp_range(use_site.range(), &source);
let def = find_definition(ctx, source.clone(), deref_target)?;
let def = find_definition(ctx, source.clone(), doc.as_ref(), deref_target)?;
let (fid, def_range) = def.def_at?;
let span_path = ctx.path_for_id(fid).ok()?;
let uri = path_to_url(&span_path).ok()?;
let span_source = ctx.source_by_id(fid).ok()?;
let range = ctx.to_lsp_range(def_range, &span_source);
let range = ctx.to_lsp_range_(def_range, fid)?;
let res = Some(GotoDefinitionResponse::Link(vec![LocationLink {
origin_selection_range: Some(origin_selection_range),
@ -57,7 +60,7 @@ impl SemanticRequest for GotoDefinitionRequest {
target_selection_range: range,
}]));
debug!("goto_definition: {fid:?} {res:?}");
log::info!("goto_definition: {fid:?} {res:?}");
res
}
}
@ -77,7 +80,7 @@ mod tests {
position: find_test_position(&source),
};
let result = request.request(world);
let result = request.request(world, None);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}

View file

@ -39,7 +39,7 @@ impl StatefulRequest for HoverRequest {
// the typst's cursor is 1-based, so we need to add 1 to the offset
let cursor = offset + 1;
let contents = def_tooltip(ctx, &source, cursor).or_else(|| {
let contents = def_tooltip(ctx, &source, doc.as_ref(), cursor).or_else(|| {
Some(typst_to_lsp::tooltip(&tooltip(
ctx.world(),
doc_ref,
@ -118,13 +118,14 @@ impl StatefulRequest for HoverRequest {
fn def_tooltip(
ctx: &mut AnalysisContext,
source: &Source,
document: Option<&VersionedDocument>,
cursor: usize,
) -> Option<LspHoverContents> {
let leaf = LinkedNode::new(source.root()).leaf_at(cursor)?;
let deref_target = get_deref_target(leaf.clone(), cursor)?;
let lnk = find_definition(ctx, source.clone(), deref_target.clone())?;
let lnk = find_definition(ctx, source.clone(), document, deref_target.clone())?;
let mut results = vec![];
@ -135,6 +136,11 @@ fn def_tooltip(
| LexicalKind::Var(LexicalVarKind::ValRef)
| LexicalKind::Block
| LexicalKind::Heading(..) => None,
LexicalKind::Var(LexicalVarKind::BibKey) => {
results.push(MarkedString::String(format!("Bibliography: @{}", lnk.name)));
Some(LspHoverContents::Array(results))
}
LexicalKind::Var(LexicalVarKind::Function) => {
let sig = if let Some(Value::Func(func)) = &lnk.value {
Some(analyze_dyn_signature(ctx, func.clone()))

View file

@ -34,4 +34,4 @@ pub use crate::lsp_typst_boundary::{
lsp_to_typst, path_to_url, typst_to_lsp, LspDiagnostic, LspRange, LspSeverity,
PositionEncoding, TypstDiagnostic, TypstSeverity, TypstSpan,
};
pub use crate::VersionedDocument;
pub use crate::{StatefulRequest, VersionedDocument};

View file

@ -2,7 +2,6 @@ use crate::{
analysis::{find_definition, DefinitionLink},
prelude::*,
syntax::get_deref_target,
SemanticRequest,
};
use log::debug;
@ -31,10 +30,14 @@ pub struct PrepareRenameRequest {
// todo: rename alias
// todo: rename import path?
impl SemanticRequest for PrepareRenameRequest {
impl StatefulRequest for PrepareRenameRequest {
type Response = PrepareRenameResponse;
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
fn request(
self,
ctx: &mut AnalysisContext,
doc: Option<VersionedDocument>,
) -> Option<Self::Response> {
let source = ctx.source_by_path(&self.path).ok()?;
let offset = ctx.to_typst_pos(self.position, &source)?;
@ -47,7 +50,7 @@ impl SemanticRequest for PrepareRenameRequest {
let use_site = deref_target.node().clone();
let origin_selection_range = ctx.to_lsp_range(use_site.range(), &source);
let lnk = find_definition(ctx, source.clone(), deref_target)?;
let lnk = find_definition(ctx, source.clone(), doc.as_ref(), deref_target)?;
validate_renaming_definition(&lnk)?;
debug!("prepare_rename: {}", lnk.name);
@ -114,7 +117,7 @@ mod tests {
position: find_test_position(&source),
};
let result = request.request(world);
let result = request.request(world, None);
assert_snapshot!(JsonRepr::new_redacted(result, &REDACT_LOC));
});
}

View file

@ -3,7 +3,7 @@ use lsp_types::TextEdit;
use crate::{
analysis::find_definition, find_references, prelude::*, syntax::get_deref_target,
validate_renaming_definition, SemanticRequest,
validate_renaming_definition,
};
/// The [`textDocument/rename`] request is sent from the client to the server to
@ -21,10 +21,14 @@ pub struct RenameRequest {
pub new_name: String,
}
impl SemanticRequest for RenameRequest {
impl StatefulRequest for RenameRequest {
type Response = WorkspaceEdit;
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
fn request(
self,
ctx: &mut AnalysisContext,
doc: Option<VersionedDocument>,
) -> Option<Self::Response> {
let source = ctx.source_by_path(&self.path).ok()?;
let offset = ctx.to_typst_pos(self.position, &source)?;
@ -35,7 +39,7 @@ impl SemanticRequest for RenameRequest {
let deref_target = get_deref_target(ast_node, cursor)?;
let lnk = find_definition(ctx, source.clone(), deref_target.clone())?;
let lnk = find_definition(ctx, source.clone(), doc.as_ref(), deref_target.clone())?;
validate_renaming_definition(&lnk)?;

View file

@ -100,6 +100,9 @@ pub enum LexicalVarKind {
/// `<foo>`
/// ^^^
Label,
/// `x:`
/// ^^
BibKey,
/// `let foo`
/// ^^^
Variable,