feat: support folding_range api

This commit is contained in:
Myriad-Dreamin 2024-03-07 21:34:47 +08:00
parent 22a68003fc
commit bc6e981e81
9 changed files with 405 additions and 243 deletions

View file

@ -0,0 +1,222 @@
use std::ops::Range;
use anyhow::anyhow;
use tower_lsp::lsp_types::SymbolKind;
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
#[derive(Debug, Clone, Copy, Hash)]
pub(crate) enum LexicalKind {
Namespace(i16),
Variable,
Function,
Constant,
Block,
}
impl TryFrom<LexicalKind> for SymbolKind {
type Error = ();
fn try_from(value: LexicalKind) -> Result<Self, Self::Error> {
match value {
LexicalKind::Namespace(..) => Ok(SymbolKind::NAMESPACE),
LexicalKind::Variable => Ok(SymbolKind::VARIABLE),
LexicalKind::Function => Ok(SymbolKind::FUNCTION),
LexicalKind::Constant => Ok(SymbolKind::CONSTANT),
LexicalKind::Block => Err(()),
}
}
}
#[derive(Debug, Clone, Copy, Hash, Default, PartialEq, Eq)]
pub(crate) enum LexicalScopeKind {
#[default]
Symbol,
Block,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalInfo {
pub name: String,
pub kind: LexicalKind,
pub range: Range<usize>,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalHierarchy {
pub info: LexicalInfo,
pub children: Option<comemo::Prehashed<EcoVec<LexicalHierarchy>>>,
}
pub(crate) fn get_lexical_hierarchy(
source: Source,
g: LexicalScopeKind,
) -> Option<EcoVec<LexicalHierarchy>> {
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
LexicalHierarchy {
info: sym,
children: if curr.is_empty() {
None
} else {
Some(comemo::Prehashed::new(curr))
},
}
}
#[derive(Default)]
struct LexicalHierarchyWorker {
g: LexicalScopeKind,
stack: Vec<(LexicalInfo, EcoVec<LexicalHierarchy>)>,
}
impl LexicalHierarchyWorker {
fn symbreak(&mut self) {
let (symbol, children) = self.stack.pop().unwrap();
let current = &mut self.stack.last_mut().unwrap().1;
// symbol.wide_range = children
// .iter()
// .map(|c| c.info.wide_range.clone())
// .fold(symbol.range.clone(), |acc, r| {
// acc.start.min(r.start)..acc.end.max(r.end)
// });
current.push(symbreak(symbol, children));
}
/// Get all symbols for a node recursively.
fn get_symbols(&mut self, node: LinkedNode) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, self.g)?;
if let Some(symbol) = own_symbol {
if let LexicalKind::Namespace(level) = symbol.kind {
'heading_break: while let Some((w, _)) = self.stack.last() {
match w.kind {
LexicalKind::Namespace(l) if l < level => break 'heading_break,
LexicalKind::Block => break 'heading_break,
_ if self.stack.len() <= 1 => break 'heading_break,
_ => {}
}
self.symbreak();
}
}
let is_heading = matches!(symbol.kind, LexicalKind::Namespace(..));
self.stack.push((symbol, eco_vec![]));
let stack_height = self.stack.len();
for child in node.children() {
self.get_symbols(child)?;
}
if is_heading {
while stack_height < self.stack.len() {
self.symbreak();
}
} else {
while stack_height <= self.stack.len() {
self.symbreak();
}
}
} else {
for child in node.children() {
self.get_symbols(child)?;
}
}
Ok(())
}
}
/// Get symbol for a leaf node of a valid type, or `None` if the node is an
/// invalid type.
#[allow(deprecated)]
fn get_ident(node: &LinkedNode, g: LexicalScopeKind) -> anyhow::Result<Option<LexicalInfo>> {
let (name, kind) = match node.kind() {
SyntaxKind::Label if LexicalScopeKind::Block != g => {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
(name, LexicalKind::Constant)
}
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock if LexicalScopeKind::Symbol != g => {
(String::new(), LexicalKind::Block)
}
SyntaxKind::Ident if LexicalScopeKind::Block != g => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
// for variable definitions, the Let binding holds an Ident
SyntaxKind::LetBinding => LexicalKind::Variable,
// for function definitions, the Let binding holds a Closure which holds the
// Ident
SyntaxKind::Closure => {
let Some(grand_parent) = parent.parent() else {
return Ok(None);
};
match grand_parent.kind() {
SyntaxKind::LetBinding => LexicalKind::Function,
_ => return Ok(None),
}
}
_ => return Ok(None),
};
(name, kind)
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
if name.is_empty() {
return Ok(None);
}
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
SyntaxKind::Heading => LexicalKind::Namespace(
parent.cast::<ast::Heading>().unwrap().level().get() as i16,
),
_ => return Ok(None),
};
(name, kind)
}
_ => return Ok(None),
};
Ok(Some(LexicalInfo {
name,
kind,
range: node.range(),
}))
}
let root = LinkedNode::new(source.root());
let mut worker = LexicalHierarchyWorker {
g,
..LexicalHierarchyWorker::default()
};
worker.stack.push((
LexicalInfo {
name: "deadbeef".to_string(),
kind: LexicalKind::Namespace(-1),
range: 0..0,
},
eco_vec![],
));
let res = worker.get_symbols(root).ok();
while worker.stack.len() > 1 {
worker.symbreak();
}
res.map(|_| worker.stack.pop().unwrap().1)
}

View file

@ -1 +1,5 @@
pub mod analyze;
pub mod track_values;
pub use track_values::*;
pub mod lexical_hierarchy;
pub(crate) use lexical_hierarchy::*;

View file

@ -1,8 +1,7 @@
use std::ops::Range;
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
use crate::prelude::*;
use crate::{
analysis::{get_lexical_hierarchy, LexicalHierarchy, LexicalScopeKind},
prelude::*,
};
#[derive(Debug, Clone)]
pub struct DocumentSymbolRequest {
@ -17,11 +16,10 @@ impl DocumentSymbolRequest {
) -> Option<DocumentSymbolResponse> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None);
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeKind::Symbol)?;
let symbols =
symbols.map(|symbols| filter_document_symbols(&symbols, &source, position_encoding));
symbols.map(DocumentSymbolResponse::Nested)
let symbols = filter_document_symbols(&symbols, &source, position_encoding);
Some(DocumentSymbolResponse::Nested(symbols))
}
}
@ -40,13 +38,7 @@ fn filter_document_symbols(
DocumentSymbol {
name: e.info.name.clone(),
detail: None,
kind: match e.info.kind {
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
LexicalKind::Variable => SymbolKind::VARIABLE,
LexicalKind::Function => SymbolKind::FUNCTION,
LexicalKind::Constant => SymbolKind::CONSTANT,
LexicalKind::Block => unreachable!(),
},
kind: e.info.kind.try_into().unwrap(),
tags: None,
deprecated: None,
range: rng,
@ -61,209 +53,8 @@ fn filter_document_symbols(
.collect()
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum LexicalKind {
Namespace(i16),
Variable,
Function,
Constant,
Block,
}
#[derive(Debug, Clone, Copy, Hash, Default, PartialEq, Eq)]
pub(crate) enum LexicalScopeGranularity {
#[default]
None,
Block,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalInfo {
pub name: String,
pub kind: LexicalKind,
pub range: Range<usize>,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalHierarchy {
pub info: LexicalInfo,
pub children: Option<comemo::Prehashed<EcoVec<LexicalHierarchy>>>,
}
pub(crate) fn get_lexical_hierarchy(
source: Source,
g: LexicalScopeGranularity,
) -> Option<EcoVec<LexicalHierarchy>> {
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
LexicalHierarchy {
info: sym,
children: if curr.is_empty() {
None
} else {
Some(comemo::Prehashed::new(curr))
},
}
}
#[derive(Default)]
struct LexicalHierarchyWorker {
g: LexicalScopeGranularity,
stack: Vec<(LexicalInfo, EcoVec<LexicalHierarchy>)>,
}
impl LexicalHierarchyWorker {
fn symbreak(&mut self) {
let (symbol, children) = self.stack.pop().unwrap();
let current = &mut self.stack.last_mut().unwrap().1;
current.push(symbreak(symbol, children));
}
/// Get all symbols for a node recursively.
fn get_symbols(&mut self, node: LinkedNode) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, self.g)?;
if let Some(symbol) = own_symbol {
if let LexicalKind::Namespace(level) = symbol.kind {
'heading_break: while let Some((w, _)) = self.stack.last() {
match w.kind {
LexicalKind::Namespace(l) if l < level => break 'heading_break,
LexicalKind::Block => break 'heading_break,
_ if self.stack.len() <= 1 => break 'heading_break,
_ => {}
}
self.symbreak();
}
}
let is_heading = matches!(symbol.kind, LexicalKind::Namespace(..));
self.stack.push((symbol, eco_vec![]));
let stack_height = self.stack.len();
for child in node.children() {
self.get_symbols(child)?;
}
if is_heading {
while stack_height < self.stack.len() {
self.symbreak();
}
} else {
while stack_height <= self.stack.len() {
self.symbreak();
}
}
} else {
for child in node.children() {
self.get_symbols(child)?;
}
}
Ok(())
}
}
/// Get symbol for a leaf node of a valid type, or `None` if the node is an
/// invalid type.
#[allow(deprecated)]
fn get_ident(
node: &LinkedNode,
g: LexicalScopeGranularity,
) -> anyhow::Result<Option<LexicalInfo>> {
let (name, kind) = match node.kind() {
SyntaxKind::Label => {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
(name, LexicalKind::Constant)
}
SyntaxKind::CodeBlock | SyntaxKind::ContentBlock
if LexicalScopeGranularity::None != g =>
{
(String::new(), LexicalKind::Block)
}
SyntaxKind::Ident => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
// for variable definitions, the Let binding holds an Ident
SyntaxKind::LetBinding => LexicalKind::Variable,
// for function definitions, the Let binding holds a Closure which holds the
// Ident
SyntaxKind::Closure => {
let Some(grand_parent) = parent.parent() else {
return Ok(None);
};
match grand_parent.kind() {
SyntaxKind::LetBinding => LexicalKind::Function,
_ => return Ok(None),
}
}
_ => return Ok(None),
};
(name, kind)
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
if name.is_empty() {
return Ok(None);
}
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
SyntaxKind::Heading => LexicalKind::Namespace(
parent.cast::<ast::Heading>().unwrap().level().get() as i16,
),
_ => return Ok(None),
};
(name, kind)
}
_ => return Ok(None),
};
Ok(Some(LexicalInfo {
name,
kind,
range: node.range(),
}))
}
let root = LinkedNode::new(source.root());
let mut worker = LexicalHierarchyWorker {
g,
..LexicalHierarchyWorker::default()
};
worker.stack.push((
LexicalInfo {
name: "deadbeef".to_string(),
kind: LexicalKind::Namespace(-1),
range: 0..0,
},
eco_vec![],
));
let res = worker.get_symbols(root).ok();
while worker.stack.len() > 1 {
worker.symbreak();
}
res.map(|_| worker.stack.pop().unwrap().1)
}
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use super::*;
use crate::tests::*;

View file

@ -1,8 +1,12 @@
use crate::{get_lexical_hierarchy, prelude::*, LexicalScopeGranularity};
use crate::{
analysis::{get_lexical_hierarchy, LexicalHierarchy, LexicalKind, LexicalScopeKind},
prelude::*,
};
#[derive(Debug, Clone)]
pub struct FoldingRangeRequest {
pub path: PathBuf,
pub line_folding_only: bool,
}
impl FoldingRangeRequest {
@ -11,14 +15,100 @@ impl FoldingRangeRequest {
world: &TypstSystemWorld,
position_encoding: PositionEncoding,
) -> Option<Vec<FoldingRange>> {
let line_folding_only = self.line_folding_only;
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let symbols = get_lexical_hierarchy(source, LexicalScopeGranularity::Block)?;
let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeKind::Block)?;
let _ = symbols;
let _ = position_encoding;
let mut results = vec![];
let LspPosition { line, character } =
typst_to_lsp::offset_to_position(source.text().len(), position_encoding, &source);
let loc = (line, Some(character));
None
calc_folding_range(
&symbols,
&source,
position_encoding,
line_folding_only,
loc,
loc,
true,
&mut results,
);
trace!("FoldingRangeRequest(line_folding_only={line_folding_only}) symbols: {symbols:#?} results: {results:#?}");
Some(results)
}
}
type LoC = (u32, Option<u32>);
#[allow(clippy::too_many_arguments)]
#[allow(deprecated)]
fn calc_folding_range(
symbols: &[LexicalHierarchy],
source: &Source,
position_encoding: PositionEncoding,
line_folding_only: bool,
parent_last_loc: LoC,
last_loc: LoC,
is_last_range: bool,
ranges: &mut Vec<FoldingRange>,
) {
for (i, e) in symbols.iter().enumerate() {
let rng = typst_to_lsp::range(e.info.range.clone(), source, position_encoding).raw_range;
let is_not_last_range = i + 1 < symbols.len();
let is_not_final_last_range = !is_last_range || is_not_last_range;
let mut range = FoldingRange {
start_line: rng.start.line,
start_character: Some(rng.start.character),
end_line: rng.end.line,
end_character: line_folding_only.then_some(rng.end.character),
kind: None,
collapsed_text: Some(e.info.name.clone()),
};
let next_start = if is_not_last_range {
let next = &symbols[i + 1];
let next_rng =
typst_to_lsp::range(next.info.range.clone(), source, position_encoding).raw_range;
(next_rng.start.line, Some(next_rng.start.character))
} else if is_not_final_last_range {
parent_last_loc
} else {
last_loc
};
if matches!(e.info.kind, LexicalKind::Namespace(..)) {
range.end_line = range.end_line.max(if is_not_last_range {
next_start.0.saturating_sub(1)
} else {
next_start.0
});
}
if let Some(ch) = &e.children {
let parent_last_loc = if is_not_last_range {
(rng.end.line, Some(rng.end.character))
} else {
parent_last_loc
};
calc_folding_range(
ch,
source,
position_encoding,
line_folding_only,
parent_last_loc,
last_loc,
!is_not_final_last_range,
ranges,
);
}
ranges.push(range);
}
}
@ -29,10 +119,41 @@ mod tests {
#[test]
fn test_folding_range_request() {
run_with_source("let a = 1;", |world, path| {
let request = FoldingRangeRequest { path };
run_with_source("#let a = 1;", |world, path| {
let request = FoldingRangeRequest {
path,
line_folding_only: true,
};
let result = request.request(world, PositionEncoding::Utf16);
assert_eq!(result, None);
assert_snapshot!(JsonRepr::new_pure(result.unwrap()), @"[]");
});
let t = r#"#let a = {
let b = {
}
}"#;
run_with_source(t, |world, path| {
let request = FoldingRangeRequest {
path,
line_folding_only: true,
};
let result = request.request(world, PositionEncoding::Utf16);
assert_snapshot!(JsonRepr::new_pure(result.unwrap()), @r###"
[
{
"collapsedText": "",
"endLine": 0,
"startCharacter": 9,
"startLine": 0
},
{
"collapsedText": "",
"endLine": 3,
"startCharacter": 10,
"startLine": 1
}
]
"###);
});
}
}

View file

@ -40,9 +40,11 @@ mod tests {
use serde::Serialize;
use serde_json::{ser::PrettyFormatter, Serializer, Value};
use typst_ts_compiler::ShadowApiExt;
pub use typst_ts_compiler::TypstSystemWorld;
use typst_ts_core::{config::CompileOpts, Bytes};
pub use insta::assert_snapshot;
pub use typst_ts_compiler::TypstSystemWorld;
pub fn run_with_source<T>(
source: &str,
f: impl FnOnce(&mut TypstSystemWorld, PathBuf) -> T,
@ -73,6 +75,11 @@ mod tests {
pub struct JsonRepr(Value);
impl JsonRepr {
pub fn new_pure(v: impl serde::Serialize) -> Self {
let s = serde_json::to_value(v).unwrap();
Self(s)
}
// pub fn new(v: impl serde::Serialize) -> Self {
// let s = serde_json::to_value(v).unwrap();
// Self(REDACT_URI.redact(s))

View file

@ -5,7 +5,6 @@ pub use std::{
sync::Arc,
};
pub use anyhow::anyhow;
pub use itertools::{Format, Itertools};
pub use log::{error, trace};
pub use tower_lsp::lsp_types::{
@ -13,7 +12,7 @@ pub use tower_lsp::lsp_types::{
Documentation, FoldingRange, Hover, Location as LspLocation, MarkupContent, MarkupKind,
Position as LspPosition, SelectionRange, SemanticTokens, SemanticTokensDelta,
SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SignatureInformation,
SymbolInformation, SymbolKind, Url,
SymbolInformation, Url,
};
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::foundations::{Func, ParamInfo, Value};
@ -26,7 +25,7 @@ use typst_ts_compiler::service::WorkspaceProvider;
pub use typst_ts_compiler::TypstSystemWorld;
pub use typst_ts_core::{TypstDocument, TypstFileId};
pub use crate::analysis::analyze::analyze_expr;
pub use crate::analysis::analyze_expr;
pub use crate::lsp_typst_boundary::{
lsp_to_typst, typst_to_lsp, LspDiagnostic, LspRange, LspRawRange, LspSeverity,
PositionEncoding, TypstDiagnostic, TypstSeverity, TypstSpan,

View file

@ -1,7 +1,9 @@
use typst_ts_compiler::NotifyApi;
use crate::document_symbol::get_lexical_hierarchy;
use crate::{prelude::*, LexicalHierarchy, LexicalKind, LexicalScopeGranularity};
use crate::{
analysis::{get_lexical_hierarchy, LexicalHierarchy, LexicalScopeKind},
prelude::*,
};
#[derive(Debug, Clone)]
pub struct SymbolRequest {
@ -23,12 +25,13 @@ impl SymbolRequest {
return;
};
let uri = Url::from_file_path(path).unwrap();
let res = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None)
.and_then(|symbols| {
let res = get_lexical_hierarchy(source.clone(), LexicalScopeKind::Symbol).and_then(
|symbols| {
self.pattern.as_ref().map(|pattern| {
filter_document_symbols(&symbols, pattern, &source, &uri, position_encoding)
})
});
},
);
if let Some(mut res) = res {
symbols.append(&mut res)
@ -60,13 +63,7 @@ fn filter_document_symbols(
SymbolInformation {
name: e.info.name.clone(),
kind: match e.info.kind {
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
LexicalKind::Variable => SymbolKind::VARIABLE,
LexicalKind::Function => SymbolKind::FUNCTION,
LexicalKind::Constant => SymbolKind::CONSTANT,
LexicalKind::Block => unreachable!(),
},
kind: e.info.kind.try_into().unwrap(),
tags: None,
deprecated: None,
location: LspLocation {