mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-19 10:45:02 +00:00
feat: nested document symbols
This commit is contained in:
parent
50ca444915
commit
22a68003fc
10 changed files with 503 additions and 97 deletions
|
@ -1,3 +1,7 @@
|
|||
use std::ops::Range;
|
||||
|
||||
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -13,40 +17,146 @@ impl DocumentSymbolRequest {
|
|||
) -> Option<DocumentSymbolResponse> {
|
||||
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
|
||||
|
||||
let uri = Url::from_file_path(self.path).unwrap();
|
||||
let symbols = get_document_symbols(source, uri, position_encoding);
|
||||
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None);
|
||||
|
||||
symbols.map(DocumentSymbolResponse::Flat)
|
||||
let symbols =
|
||||
symbols.map(|symbols| filter_document_symbols(&symbols, &source, position_encoding));
|
||||
symbols.map(DocumentSymbolResponse::Nested)
|
||||
}
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
pub(crate) fn get_document_symbols(
|
||||
source: Source,
|
||||
uri: Url,
|
||||
#[allow(deprecated)]
|
||||
fn filter_document_symbols(
|
||||
symbols: &[LexicalHierarchy],
|
||||
source: &Source,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Option<Vec<SymbolInformation>> {
|
||||
struct DocumentSymbolWorker {
|
||||
symbols: Vec<SymbolInformation>,
|
||||
) -> Vec<DocumentSymbol> {
|
||||
symbols
|
||||
.iter()
|
||||
.map(|e| {
|
||||
let rng =
|
||||
typst_to_lsp::range(e.info.range.clone(), source, position_encoding).raw_range;
|
||||
|
||||
DocumentSymbol {
|
||||
name: e.info.name.clone(),
|
||||
detail: None,
|
||||
kind: match e.info.kind {
|
||||
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
|
||||
LexicalKind::Variable => SymbolKind::VARIABLE,
|
||||
LexicalKind::Function => SymbolKind::FUNCTION,
|
||||
LexicalKind::Constant => SymbolKind::CONSTANT,
|
||||
LexicalKind::Block => unreachable!(),
|
||||
},
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
range: rng,
|
||||
selection_range: rng,
|
||||
// .raw_range,
|
||||
children: e
|
||||
.children
|
||||
.as_ref()
|
||||
.map(|ch| filter_document_symbols(ch, source, position_encoding)),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
pub(crate) enum LexicalKind {
|
||||
Namespace(i16),
|
||||
Variable,
|
||||
Function,
|
||||
Constant,
|
||||
Block,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Hash, Default, PartialEq, Eq)]
|
||||
pub(crate) enum LexicalScopeGranularity {
|
||||
#[default]
|
||||
None,
|
||||
Block,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
pub(crate) struct LexicalInfo {
|
||||
pub name: String,
|
||||
pub kind: LexicalKind,
|
||||
pub range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash)]
|
||||
pub(crate) struct LexicalHierarchy {
|
||||
pub info: LexicalInfo,
|
||||
pub children: Option<comemo::Prehashed<EcoVec<LexicalHierarchy>>>,
|
||||
}
|
||||
|
||||
pub(crate) fn get_lexical_hierarchy(
|
||||
source: Source,
|
||||
g: LexicalScopeGranularity,
|
||||
) -> Option<EcoVec<LexicalHierarchy>> {
|
||||
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
|
||||
LexicalHierarchy {
|
||||
info: sym,
|
||||
children: if curr.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(comemo::Prehashed::new(curr))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentSymbolWorker {
|
||||
/// Get all symbols for a node recursively.
|
||||
pub fn get_symbols<'a>(
|
||||
&mut self,
|
||||
node: LinkedNode<'a>,
|
||||
source: &'a Source,
|
||||
uri: &'a Url,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> anyhow::Result<()> {
|
||||
let own_symbol = get_ident(&node, source, uri, position_encoding)?;
|
||||
#[derive(Default)]
|
||||
struct LexicalHierarchyWorker {
|
||||
g: LexicalScopeGranularity,
|
||||
stack: Vec<(LexicalInfo, EcoVec<LexicalHierarchy>)>,
|
||||
}
|
||||
|
||||
for child in node.children() {
|
||||
self.get_symbols(child, source, uri, position_encoding)?;
|
||||
}
|
||||
impl LexicalHierarchyWorker {
|
||||
fn symbreak(&mut self) {
|
||||
let (symbol, children) = self.stack.pop().unwrap();
|
||||
let current = &mut self.stack.last_mut().unwrap().1;
|
||||
current.push(symbreak(symbol, children));
|
||||
}
|
||||
|
||||
/// Get all symbols for a node recursively.
|
||||
fn get_symbols(&mut self, node: LinkedNode) -> anyhow::Result<()> {
|
||||
let own_symbol = get_ident(&node, self.g)?;
|
||||
|
||||
if let Some(symbol) = own_symbol {
|
||||
self.symbols.push(symbol);
|
||||
if let LexicalKind::Namespace(level) = symbol.kind {
|
||||
'heading_break: while let Some((w, _)) = self.stack.last() {
|
||||
match w.kind {
|
||||
LexicalKind::Namespace(l) if l < level => break 'heading_break,
|
||||
LexicalKind::Block => break 'heading_break,
|
||||
_ if self.stack.len() <= 1 => break 'heading_break,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.symbreak();
|
||||
}
|
||||
}
|
||||
let is_heading = matches!(symbol.kind, LexicalKind::Namespace(..));
|
||||
|
||||
self.stack.push((symbol, eco_vec![]));
|
||||
let stack_height = self.stack.len();
|
||||
|
||||
for child in node.children() {
|
||||
self.get_symbols(child)?;
|
||||
}
|
||||
|
||||
if is_heading {
|
||||
while stack_height < self.stack.len() {
|
||||
self.symbreak();
|
||||
}
|
||||
} else {
|
||||
while stack_height <= self.stack.len() {
|
||||
self.symbreak();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for child in node.children() {
|
||||
self.get_symbols(child)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -58,29 +168,21 @@ pub(crate) fn get_document_symbols(
|
|||
#[allow(deprecated)]
|
||||
fn get_ident(
|
||||
node: &LinkedNode,
|
||||
source: &Source,
|
||||
uri: &Url,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> anyhow::Result<Option<SymbolInformation>> {
|
||||
match node.kind() {
|
||||
g: LexicalScopeGranularity,
|
||||
) -> anyhow::Result<Option<LexicalInfo>> {
|
||||
let (name, kind) = match node.kind() {
|
||||
SyntaxKind::Label => {
|
||||
let ast_node = node
|
||||
.cast::<ast::Label>()
|
||||
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
|
||||
let name = ast_node.get().to_string();
|
||||
let symbol = SymbolInformation {
|
||||
name,
|
||||
kind: SymbolKind::CONSTANT,
|
||||
tags: None,
|
||||
deprecated: None, // do not use, deprecated, use `tags` instead
|
||||
location: LspLocation {
|
||||
uri: uri.clone(),
|
||||
range: typst_to_lsp::range(node.range(), source, position_encoding)
|
||||
.raw_range,
|
||||
},
|
||||
container_name: None,
|
||||
};
|
||||
Ok(Some(symbol))
|
||||
|
||||
(name, LexicalKind::Constant)
|
||||
}
|
||||
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock
|
||||
if LexicalScopeGranularity::None != g =>
|
||||
{
|
||||
(String::new(), LexicalKind::Block)
|
||||
}
|
||||
SyntaxKind::Ident => {
|
||||
let ast_node = node
|
||||
|
@ -92,7 +194,7 @@ pub(crate) fn get_document_symbols(
|
|||
};
|
||||
let kind = match parent.kind() {
|
||||
// for variable definitions, the Let binding holds an Ident
|
||||
SyntaxKind::LetBinding => SymbolKind::VARIABLE,
|
||||
SyntaxKind::LetBinding => LexicalKind::Variable,
|
||||
// for function definitions, the Let binding holds a Closure which holds the
|
||||
// Ident
|
||||
SyntaxKind::Closure => {
|
||||
|
@ -100,25 +202,14 @@ pub(crate) fn get_document_symbols(
|
|||
return Ok(None);
|
||||
};
|
||||
match grand_parent.kind() {
|
||||
SyntaxKind::LetBinding => SymbolKind::FUNCTION,
|
||||
SyntaxKind::LetBinding => LexicalKind::Function,
|
||||
_ => return Ok(None),
|
||||
}
|
||||
}
|
||||
_ => return Ok(None),
|
||||
};
|
||||
let symbol = SymbolInformation {
|
||||
name,
|
||||
kind,
|
||||
tags: None,
|
||||
deprecated: None, // do not use, deprecated, use `tags` instead
|
||||
location: LspLocation {
|
||||
uri: uri.clone(),
|
||||
range: typst_to_lsp::range(node.range(), source, position_encoding)
|
||||
.raw_range,
|
||||
},
|
||||
container_name: None,
|
||||
};
|
||||
Ok(Some(symbol))
|
||||
|
||||
(name, kind)
|
||||
}
|
||||
SyntaxKind::Markup => {
|
||||
let name = node.get().to_owned().into_text().to_string();
|
||||
|
@ -129,34 +220,114 @@ pub(crate) fn get_document_symbols(
|
|||
return Ok(None);
|
||||
};
|
||||
let kind = match parent.kind() {
|
||||
SyntaxKind::Heading => SymbolKind::NAMESPACE,
|
||||
SyntaxKind::Heading => LexicalKind::Namespace(
|
||||
parent.cast::<ast::Heading>().unwrap().level().get() as i16,
|
||||
),
|
||||
_ => return Ok(None),
|
||||
};
|
||||
let symbol = SymbolInformation {
|
||||
name,
|
||||
kind,
|
||||
tags: None,
|
||||
deprecated: None, // do not use, deprecated, use `tags` instead
|
||||
location: LspLocation {
|
||||
uri: uri.clone(),
|
||||
range: typst_to_lsp::range(node.range(), source, position_encoding)
|
||||
.raw_range,
|
||||
},
|
||||
container_name: None,
|
||||
};
|
||||
Ok(Some(symbol))
|
||||
|
||||
(name, kind)
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
||||
Ok(Some(LexicalInfo {
|
||||
name,
|
||||
kind,
|
||||
range: node.range(),
|
||||
}))
|
||||
}
|
||||
|
||||
let root = LinkedNode::new(source.root());
|
||||
|
||||
let mut worker = DocumentSymbolWorker { symbols: vec![] };
|
||||
let mut worker = LexicalHierarchyWorker {
|
||||
g,
|
||||
..LexicalHierarchyWorker::default()
|
||||
};
|
||||
worker.stack.push((
|
||||
LexicalInfo {
|
||||
name: "deadbeef".to_string(),
|
||||
kind: LexicalKind::Namespace(-1),
|
||||
range: 0..0,
|
||||
},
|
||||
eco_vec![],
|
||||
));
|
||||
let res = worker.get_symbols(root).ok();
|
||||
|
||||
let res = worker
|
||||
.get_symbols(root, &source, &uri, position_encoding)
|
||||
.ok();
|
||||
|
||||
res.map(|_| worker.symbols)
|
||||
while worker.stack.len() > 1 {
|
||||
worker.symbreak();
|
||||
}
|
||||
res.map(|_| worker.stack.pop().unwrap().1)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::*;
|
||||
use crate::tests::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_document_symbols() {
|
||||
run_with_source(
|
||||
r#"
|
||||
= Heading 1
|
||||
#let a = 1;
|
||||
== Heading 2
|
||||
#let b = 1;
|
||||
= Heading 3
|
||||
#let c = 1;
|
||||
#let d = {
|
||||
#let e = 1;
|
||||
0
|
||||
}
|
||||
"#,
|
||||
|world, path| {
|
||||
let request = DocumentSymbolRequest { path };
|
||||
let result = request.request(world, PositionEncoding::Utf16);
|
||||
assert_snapshot!(JsonRepr::new_redacted(result.unwrap(), &REDACT_LOC), @r###"
|
||||
[
|
||||
{
|
||||
"children": [
|
||||
{
|
||||
"kind": 13,
|
||||
"name": "a"
|
||||
},
|
||||
{
|
||||
"children": [
|
||||
{
|
||||
"kind": 13,
|
||||
"name": "b"
|
||||
}
|
||||
],
|
||||
"kind": 3,
|
||||
"name": "Heading 2"
|
||||
}
|
||||
],
|
||||
"kind": 3,
|
||||
"name": "Heading 1"
|
||||
},
|
||||
{
|
||||
"children": [
|
||||
{
|
||||
"kind": 13,
|
||||
"name": "c"
|
||||
},
|
||||
{
|
||||
"kind": 13,
|
||||
"name": "d"
|
||||
},
|
||||
{
|
||||
"kind": 13,
|
||||
"name": "e"
|
||||
}
|
||||
],
|
||||
"kind": 3,
|
||||
"name": "Heading 3"
|
||||
}
|
||||
]
|
||||
"###);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
38
crates/tinymist-query/src/folding_range.rs
Normal file
38
crates/tinymist-query/src/folding_range.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
use crate::{get_lexical_hierarchy, prelude::*, LexicalScopeGranularity};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FoldingRangeRequest {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl FoldingRangeRequest {
|
||||
pub fn request(
|
||||
self,
|
||||
world: &TypstSystemWorld,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Option<Vec<FoldingRange>> {
|
||||
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
|
||||
|
||||
let symbols = get_lexical_hierarchy(source, LexicalScopeGranularity::Block)?;
|
||||
|
||||
let _ = symbols;
|
||||
let _ = position_encoding;
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::tests::*;
|
||||
|
||||
#[test]
|
||||
fn test_folding_range_request() {
|
||||
run_with_source("let a = 1;", |world, path| {
|
||||
let request = FoldingRangeRequest { path };
|
||||
let result = request.request(world, PositionEncoding::Utf16);
|
||||
assert_eq!(result, None);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -18,6 +18,8 @@ pub(crate) mod hover;
|
|||
pub use hover::*;
|
||||
pub(crate) mod completion;
|
||||
pub use completion::*;
|
||||
pub(crate) mod folding_range;
|
||||
pub use folding_range::*;
|
||||
pub(crate) mod selection_range;
|
||||
pub use selection_range::*;
|
||||
|
||||
|
@ -25,3 +27,106 @@ pub mod lsp_typst_boundary;
|
|||
pub use lsp_typst_boundary::*;
|
||||
|
||||
mod prelude;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use core::fmt;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Serialize;
|
||||
use serde_json::{ser::PrettyFormatter, Serializer, Value};
|
||||
use typst_ts_compiler::ShadowApiExt;
|
||||
pub use typst_ts_compiler::TypstSystemWorld;
|
||||
use typst_ts_core::{config::CompileOpts, Bytes};
|
||||
|
||||
pub fn run_with_source<T>(
|
||||
source: &str,
|
||||
f: impl FnOnce(&mut TypstSystemWorld, PathBuf) -> T,
|
||||
) -> T {
|
||||
let root = if cfg!(windows) {
|
||||
PathBuf::from("C:\\")
|
||||
} else {
|
||||
PathBuf::from("/")
|
||||
};
|
||||
let mut world = TypstSystemWorld::new(CompileOpts {
|
||||
root_dir: root.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.unwrap();
|
||||
let pw = &root.join(Path::new("/main.typ"));
|
||||
world
|
||||
.with_shadow_file(pw, Bytes::from(source.as_bytes()), move |e| {
|
||||
Ok(f(e, pw.to_owned()))
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// pub static REDACT_URI: Lazy<RedactFields> = Lazy::new(||
|
||||
// RedactFields::from_iter(["uri"]));
|
||||
pub static REDACT_LOC: Lazy<RedactFields> =
|
||||
Lazy::new(|| RedactFields::from_iter(["location", "range", "selectionRange"]));
|
||||
|
||||
pub struct JsonRepr(Value);
|
||||
|
||||
impl JsonRepr {
|
||||
// pub fn new(v: impl serde::Serialize) -> Self {
|
||||
// let s = serde_json::to_value(v).unwrap();
|
||||
// Self(REDACT_URI.redact(s))
|
||||
// }
|
||||
|
||||
pub fn new_redacted(v: impl serde::Serialize, rm: &RedactFields) -> Self {
|
||||
let s = serde_json::to_value(v).unwrap();
|
||||
Self(rm.redact(s))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for JsonRepr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let w = std::io::BufWriter::new(Vec::new());
|
||||
let mut ser = Serializer::with_formatter(w, PrettyFormatter::with_indent(b" "));
|
||||
self.0.serialize(&mut ser).unwrap();
|
||||
|
||||
f.write_str(&String::from_utf8(ser.into_inner().into_inner().unwrap()).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Redact {
|
||||
fn redact(&self, v: Value) -> Value;
|
||||
}
|
||||
|
||||
pub struct RedactFields(HashSet<&'static str>);
|
||||
|
||||
impl FromIterator<&'static str> for RedactFields {
|
||||
fn from_iter<T: IntoIterator<Item = &'static str>>(iter: T) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl Redact for RedactFields {
|
||||
fn redact(&self, v: Value) -> Value {
|
||||
match v {
|
||||
Value::Object(mut m) => {
|
||||
for (_, v) in m.iter_mut() {
|
||||
*v = self.redact(v.clone());
|
||||
}
|
||||
for k in self.0.iter() {
|
||||
m.remove(*k);
|
||||
}
|
||||
Value::Object(m)
|
||||
}
|
||||
Value::Array(mut a) => {
|
||||
for v in a.iter_mut() {
|
||||
*v = self.redact(v.clone());
|
||||
}
|
||||
Value::Array(a)
|
||||
}
|
||||
Value::String(s) => Value::String(s),
|
||||
v => v,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,10 +9,11 @@ pub use anyhow::anyhow;
|
|||
pub use itertools::{Format, Itertools};
|
||||
pub use log::{error, trace};
|
||||
pub use tower_lsp::lsp_types::{
|
||||
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbolResponse, Documentation, Hover,
|
||||
Location as LspLocation, MarkupContent, MarkupKind, Position as LspPosition, SelectionRange,
|
||||
SemanticTokens, SemanticTokensDelta, SemanticTokensFullDeltaResult, SemanticTokensResult,
|
||||
SignatureHelp, SignatureInformation, SymbolInformation, SymbolKind, Url,
|
||||
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbol, DocumentSymbolResponse,
|
||||
Documentation, FoldingRange, Hover, Location as LspLocation, MarkupContent, MarkupKind,
|
||||
Position as LspPosition, SelectionRange, SemanticTokens, SemanticTokensDelta,
|
||||
SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SignatureInformation,
|
||||
SymbolInformation, SymbolKind, Url,
|
||||
};
|
||||
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
|
||||
pub use typst::foundations::{Func, ParamInfo, Value};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use typst_ts_compiler::NotifyApi;
|
||||
|
||||
use crate::document_symbol::get_document_symbols;
|
||||
use crate::prelude::*;
|
||||
use crate::document_symbol::get_lexical_hierarchy;
|
||||
use crate::{prelude::*, LexicalHierarchy, LexicalKind, LexicalScopeGranularity};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SymbolRequest {
|
||||
|
@ -23,11 +23,12 @@ impl SymbolRequest {
|
|||
return;
|
||||
};
|
||||
let uri = Url::from_file_path(path).unwrap();
|
||||
let res = get_document_symbols(source, uri, position_encoding).and_then(|symbols| {
|
||||
self.pattern
|
||||
.as_ref()
|
||||
.map(|pattern| filter_document_symbols(symbols, pattern))
|
||||
});
|
||||
let res = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None)
|
||||
.and_then(|symbols| {
|
||||
self.pattern.as_ref().map(|pattern| {
|
||||
filter_document_symbols(&symbols, pattern, &source, &uri, position_encoding)
|
||||
})
|
||||
});
|
||||
|
||||
if let Some(mut res) = res {
|
||||
symbols.append(&mut res)
|
||||
|
@ -38,12 +39,42 @@ impl SymbolRequest {
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
fn filter_document_symbols(
|
||||
symbols: Vec<SymbolInformation>,
|
||||
symbols: &[LexicalHierarchy],
|
||||
query_string: &str,
|
||||
source: &Source,
|
||||
uri: &Url,
|
||||
position_encoding: PositionEncoding,
|
||||
) -> Vec<SymbolInformation> {
|
||||
symbols
|
||||
.into_iter()
|
||||
.filter(|e| e.name.contains(query_string))
|
||||
.iter()
|
||||
.flat_map(|e| {
|
||||
[e].into_iter()
|
||||
.chain(e.children.as_deref().into_iter().flatten())
|
||||
})
|
||||
.filter(|e| e.info.name.contains(query_string))
|
||||
.map(|e| {
|
||||
let rng =
|
||||
typst_to_lsp::range(e.info.range.clone(), source, position_encoding).raw_range;
|
||||
|
||||
SymbolInformation {
|
||||
name: e.info.name.clone(),
|
||||
kind: match e.info.kind {
|
||||
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
|
||||
LexicalKind::Variable => SymbolKind::VARIABLE,
|
||||
LexicalKind::Function => SymbolKind::FUNCTION,
|
||||
LexicalKind::Constant => SymbolKind::CONSTANT,
|
||||
LexicalKind::Block => unreachable!(),
|
||||
},
|
||||
tags: None,
|
||||
deprecated: None,
|
||||
location: LspLocation {
|
||||
uri: uri.clone(),
|
||||
range: rng,
|
||||
},
|
||||
container_name: None,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue