feat: fold continue line comments (#1043)

* feat: fold continue line comments

* update e2e hash

* fix: duplicate comment add to fold

* add test

* test: update snapshot

* feat: remove a clone and assign comment kind folding range

* test: update snapshot

* move to hierachy

* refact hierachy

* add todo

* update collapsed text

* fix: format

* add todo

* refactor: to help review

* refactor: no need to return error

* feat: split `CommentGroupMatcher`

* dev: reimplement nodes checker

* update e2e hash

* test: update snapshot

---------

Co-authored-by: Myriad-Dreamin <camiyoru@gmail.com>
This commit is contained in:
Yifan Song 2024-12-28 12:17:34 +08:00 committed by GitHub
parent 978e4d6f4e
commit c5981b81db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 239 additions and 95 deletions

View file

@ -1,3 +1,5 @@
use lsp_types::SymbolKind;
use crate::{
prelude::*,
syntax::{get_lexical_hierarchy, LexicalHierarchy, LexicalScopeKind},
@ -44,6 +46,7 @@ fn symbols_in_hierarchy(
) -> Vec<DocumentSymbol> {
hierarchy
.iter()
.filter(|hierarchy| TryInto::<SymbolKind>::try_into(hierarchy.info.kind.clone()).is_ok())
.map(|hierarchy| {
let range = to_lsp_range(hierarchy.info.range.clone(), source, position_encoding);

View file

@ -0,0 +1,10 @@
// Document comment
// Test for folding comment
/// No body write comment like this
/// This function computes the cardinal sine, $sinc(x)=sin(x)/x$.
/// This function computes the cardinal sine, $sinc(x)=sin(x)/x$.
/// cdsc
/// - x (int, float): The argument for the cardinal sine function.
/// -> float
content

View file

@ -0,0 +1,40 @@
---
source: crates/tinymist-query/src/folding_range.rs
expression: "JsonRepr::new_pure(json!({ \"false\": result_false, \"true\": result_true, }))"
input_file: crates/tinymist-query/src/fixtures/folding_range/comment.typ
snapshot_kind: text
---
{
"false": [
{
"collapsedText": "",
"endCharacter": 14,
"endLine": 8,
"kind": "comment",
"startCharacter": 2,
"startLine": 3
},
{
"collapsedText": "",
"endCharacter": 27,
"endLine": 1,
"kind": "comment",
"startCharacter": 0,
"startLine": 0
}
],
"true": [
{
"collapsedText": "",
"endLine": 8,
"kind": "comment",
"startLine": 3
},
{
"collapsedText": "",
"endLine": 1,
"kind": "comment",
"startLine": 0
}
]
}

View file

@ -13,6 +13,14 @@ snapshot_kind: text
"startCharacter": 8,
"startLine": 2
},
{
"collapsedText": "",
"endCharacter": 27,
"endLine": 4,
"kind": "comment",
"startCharacter": 4,
"startLine": 4
},
{
"collapsedText": "",
"endCharacter": 13,

View file

@ -132,6 +132,10 @@ fn calc_folding_range(
});
}
if matches!(child.info.kind, LexicalKind::CommentGroup) {
folding_range.kind = Some(lsp_types::FoldingRangeKind::Comment);
}
if let Some(ch) = &child.children {
let parent_last_loc = if is_not_last_range {
(range.end.line, Some(range.end.character))
@ -149,7 +153,6 @@ fn calc_folding_range(
folding_ranges,
);
}
folding_ranges.push(folding_range);
}
}

View file

@ -46,6 +46,60 @@ fn extract_mod_docs_between(
matcher.collect()
}
pub enum CommentGroupSignal {
Hash,
Space,
LineComment,
BlockComment,
BreakGroup,
}
#[derive(Default)]
pub struct CommentGroupMatcher {
newline_count: u32,
}
impl CommentGroupMatcher {
pub fn process(&mut self, n: &SyntaxNode) -> CommentGroupSignal {
match n.kind() {
SyntaxKind::Hash => {
self.newline_count = 0;
CommentGroupSignal::Hash
}
SyntaxKind::Space => {
if n.text().contains('\n') {
self.newline_count += 1;
}
if self.newline_count > 1 {
return CommentGroupSignal::BreakGroup;
}
CommentGroupSignal::Space
}
SyntaxKind::Parbreak => {
self.newline_count = 2;
CommentGroupSignal::BreakGroup
}
SyntaxKind::LineComment => {
self.newline_count = 0;
CommentGroupSignal::LineComment
}
SyntaxKind::BlockComment => {
self.newline_count = 0;
CommentGroupSignal::BlockComment
}
_ => {
self.newline_count = 0;
CommentGroupSignal::BreakGroup
}
}
}
pub fn reset(&mut self) {
self.newline_count = 0;
}
}
enum RawComment {
Line(EcoString),
Block(EcoString),
@ -54,45 +108,29 @@ enum RawComment {
#[derive(Default)]
pub struct DocCommentMatcher {
comments: Vec<RawComment>,
newline_count: usize,
group_matcher: CommentGroupMatcher,
strict: bool,
}
impl DocCommentMatcher {
pub fn process(&mut self, n: &SyntaxNode) -> bool {
match n.kind() {
SyntaxKind::Hash => {
self.newline_count = 0;
}
SyntaxKind::Space => {
if n.text().contains('\n') {
self.newline_count += 1;
}
if self.newline_count > 1 {
return true;
}
}
SyntaxKind::Parbreak => {
self.newline_count = 2;
return true;
}
SyntaxKind::LineComment => {
self.newline_count = 0;
match self.group_matcher.process(n) {
CommentGroupSignal::LineComment => {
let text = n.text();
if !self.strict || text.starts_with("///") {
self.comments.push(RawComment::Line(text.clone()));
}
}
SyntaxKind::BlockComment => {
self.newline_count = 0;
CommentGroupSignal::BlockComment => {
let text = n.text();
if !self.strict {
self.comments.push(RawComment::Block(text.clone()));
}
}
_ => {
self.newline_count = 0;
CommentGroupSignal::BreakGroup => {
return true;
}
CommentGroupSignal::Hash | CommentGroupSignal::Space => {}
}
false
@ -148,8 +186,8 @@ impl DocCommentMatcher {
res
}
pub(crate) fn reset(&mut self) {
pub fn reset(&mut self) {
self.comments.clear();
self.newline_count = 0;
self.group_matcher.reset();
}
}

View file

@ -1,6 +1,5 @@
use std::ops::{Deref, Range};
use anyhow::anyhow;
use ecow::{eco_vec, EcoString, EcoVec};
use lsp_types::SymbolKind;
use serde::{Deserialize, Serialize};
@ -10,6 +9,8 @@ use typst::syntax::{
};
use typst_shim::utils::LazyHash;
use super::{is_mark, CommentGroupMatcher};
pub(crate) fn get_lexical_hierarchy(
source: &Source,
scope_kind: LexicalScopeKind,
@ -29,16 +30,16 @@ pub(crate) fn get_lexical_hierarchy(
},
eco_vec![],
));
let res = match worker.get_symbols(root) {
Ok(()) => Some(()),
Err(err) => {
log::error!("lexical hierarchy analysis failed: {err:?}");
let res = match worker.check_node(root) {
Some(()) => Some(()),
None => {
log::error!("lexical hierarchy analysis failed");
None
}
};
while worker.stack.len() > 1 {
worker.symbreak();
worker.finish_hierarchy();
}
crate::log_debug_ct!("lexical hierarchy analysis took {:?}", start.elapsed());
@ -72,6 +73,7 @@ pub enum LexicalKind {
Heading(i16),
Var(LexicalVarKind),
Block,
CommentGroup,
}
impl LexicalKind {
@ -97,7 +99,7 @@ impl TryFrom<LexicalKind> for SymbolKind {
LexicalKind::Var(LexicalVarKind::Variable) => Ok(SymbolKind::VARIABLE),
LexicalKind::Var(LexicalVarKind::Function) => Ok(SymbolKind::FUNCTION),
LexicalKind::Var(LexicalVarKind::Label) => Ok(SymbolKind::CONSTANT),
LexicalKind::Var(..) | LexicalKind::Block => Err(()),
LexicalKind::Var(..) | LexicalKind::Block | LexicalKind::CommentGroup => Err(()),
}
}
}
@ -218,14 +220,20 @@ struct LexicalHierarchyWorker {
}
impl LexicalHierarchyWorker {
fn symbreak(&mut self) {
fn is_plain_token(kind: SyntaxKind) -> bool {
kind.is_trivia() || kind.is_keyword() || is_mark(kind) || kind.is_error()
}
/// Finish the current top of the stack.
fn finish_hierarchy(&mut self) {
let (symbol, children) = self.stack.pop().unwrap();
let current = &mut self.stack.last_mut().unwrap().1;
current.push(symbreak(symbol, children));
current.push(finish_hierarchy(symbol, children));
}
fn enter_symbol_context(&mut self, node: &LinkedNode) -> anyhow::Result<IdentContext> {
/// Enter a node and setup the context.
fn enter_node(&mut self, node: &LinkedNode) -> Option<IdentContext> {
let checkpoint = self.ident_context;
match node.kind() {
SyntaxKind::RefMarker => self.ident_context = IdentContext::Ref,
@ -235,19 +243,57 @@ impl LexicalHierarchyWorker {
_ => {}
}
Ok(checkpoint)
Some(checkpoint)
}
fn exit_symbol_context(&mut self, checkpoint: IdentContext) -> anyhow::Result<()> {
/// Exit a node and restore the context.
fn exit_node(&mut self, checkpoint: IdentContext) -> Option<()> {
self.ident_context = checkpoint;
Ok(())
Some(())
}
/// Get all symbols for a node recursively.
fn get_symbols(&mut self, node: LinkedNode) -> anyhow::Result<()> {
/// Check nodes in a list recursively.
fn check_nodes(&mut self, node: LinkedNode) -> Option<()> {
let mut group_matcher = CommentGroupMatcher::default();
let mut comment_range: Option<Range<usize>> = None;
for child in node.children() {
match group_matcher.process(&child) {
super::CommentGroupSignal::Space => {}
super::CommentGroupSignal::LineComment
| super::CommentGroupSignal::BlockComment => {
let child_range = child.range();
match comment_range {
Some(ref mut comment_range) => comment_range.end = child_range.end,
None => comment_range = Some(child_range),
}
}
super::CommentGroupSignal::Hash | super::CommentGroupSignal::BreakGroup => {
if let Some(comment_range) = comment_range.take() {
self.stack.push((
LexicalInfo {
name: "".into(),
kind: LexicalKind::CommentGroup,
range: comment_range,
},
eco_vec![],
));
}
if !Self::is_plain_token(child.kind()) {
self.check_node(child)?;
}
}
}
}
Some(())
}
/// Check lexical hierarchy a node recursively.
fn check_node(&mut self, node: LinkedNode) -> Option<()> {
let own_symbol = self.get_ident(&node)?;
let checkpoint = self.enter_symbol_context(&node)?;
let checkpoint = self.enter_node(&node)?;
if let Some(symbol) = own_symbol {
if let LexicalKind::Heading(level) = symbol.kind {
@ -259,7 +305,7 @@ impl LexicalHierarchyWorker {
_ => {}
}
self.symbreak();
self.finish_hierarchy();
}
}
let is_heading = matches!(symbol.kind, LexicalKind::Heading(..));
@ -268,18 +314,16 @@ impl LexicalHierarchyWorker {
let stack_height = self.stack.len();
if node.kind() != SyntaxKind::ModuleImport {
for child in node.children() {
self.get_symbols(child)?;
}
self.check_nodes(node)?;
}
if is_heading {
while stack_height < self.stack.len() {
self.symbreak();
self.finish_hierarchy();
}
} else {
while stack_height <= self.stack.len() {
self.symbreak();
self.finish_hierarchy();
}
}
} else {
@ -295,15 +339,15 @@ impl LexicalHierarchyWorker {
// up.
if matches!(pat, ast::Pattern::Normal(ast::Expr::Closure(..))) {
let closure = name.clone();
self.get_symbols_with(closure, IdentContext::Ref)?;
self.check_node_with(closure, IdentContext::Ref)?;
break 'let_binding;
}
}
// reverse order for correct symbol affection
let name_offset = pattern.as_ref().map(|node| node.offset());
self.get_symbols_in_opt_with(pattern, IdentContext::Var)?;
self.get_symbols_in_first_expr(node.children().rev(), name_offset)?;
self.check_opt_node_with(pattern, IdentContext::Var)?;
self.check_first_sub_expr(node.children().rev(), name_offset)?;
}
SyntaxKind::ForLoop => {
let pattern = node.children().find(|child| child.is::<ast::Pattern>());
@ -313,16 +357,16 @@ impl LexicalHierarchyWorker {
.find(|child| child.is::<ast::Expr>());
let iterable_offset = iterable.as_ref().map(|node| node.offset());
self.get_symbols_in_opt_with(iterable, IdentContext::Ref)?;
self.get_symbols_in_opt_with(pattern, IdentContext::Var)?;
self.get_symbols_in_first_expr(node.children().rev(), iterable_offset)?;
self.check_opt_node_with(iterable, IdentContext::Ref)?;
self.check_opt_node_with(pattern, IdentContext::Var)?;
self.check_first_sub_expr(node.children().rev(), iterable_offset)?;
}
SyntaxKind::Closure => {
let first_child = node.children().next();
let current = self.stack.last_mut().unwrap().1.len();
if let Some(first_child) = first_child {
if first_child.kind() == SyntaxKind::Ident {
self.get_symbols_with(first_child, IdentContext::Func)?;
self.check_node_with(first_child, IdentContext::Func)?;
}
}
let body = node
@ -347,71 +391,73 @@ impl LexicalHierarchyWorker {
self.stack.push((symbol, eco_vec![]));
let stack_height = self.stack.len();
self.get_symbols_with(body, IdentContext::Ref)?;
self.check_node_with(body, IdentContext::Ref)?;
while stack_height <= self.stack.len() {
self.symbreak();
self.finish_hierarchy();
}
}
}
SyntaxKind::FieldAccess => {
self.get_symbols_in_first_expr(node.children(), None)?;
self.check_first_sub_expr(node.children(), None)?;
}
SyntaxKind::Named => {
self.get_symbols_in_first_expr(node.children().rev(), None)?;
self.check_first_sub_expr(node.children().rev(), None)?;
if self.ident_context == IdentContext::Params {
let ident = node.children().find(|n| n.kind() == SyntaxKind::Ident);
self.get_symbols_in_opt_with(ident, IdentContext::Var)?;
self.check_opt_node_with(ident, IdentContext::Var)?;
}
}
kind if kind.is_trivia() || kind.is_keyword() || kind.is_error() => {}
kind if Self::is_plain_token(kind) => {}
_ => {
for child in node.children() {
self.get_symbols(child)?;
}
self.check_nodes(node)?;
}
}
}
self.exit_symbol_context(checkpoint)?;
self.exit_node(checkpoint)?;
Ok(())
Some(())
}
/// Check a possible node with a specific context.
#[inline(always)]
fn get_symbols_in_opt_with(
fn check_opt_node_with(
&mut self,
node: Option<LinkedNode>,
context: IdentContext,
) -> anyhow::Result<()> {
) -> Option<()> {
if let Some(node) = node {
self.get_symbols_with(node, context)?;
self.check_node_with(node, context)?;
}
Ok(())
Some(())
}
fn get_symbols_in_first_expr<'a>(
/// Check the first sub-expression of a node. If an offset is provided, it
/// only checks the sub-expression if it starts after the offset.
fn check_first_sub_expr<'a>(
&mut self,
mut nodes: impl Iterator<Item = LinkedNode<'a>>,
iterable_offset: Option<usize>,
) -> anyhow::Result<()> {
after_offset: Option<usize>,
) -> Option<()> {
let body = nodes.find(|n| n.is::<ast::Expr>());
if let Some(body) = body {
if iterable_offset.is_some_and(|offset| offset >= body.offset()) {
return Ok(());
if after_offset.is_some_and(|offset| offset >= body.offset()) {
return Some(());
}
self.get_symbols_with(body, IdentContext::Ref)?;
self.check_node_with(body, IdentContext::Ref)?;
}
Ok(())
Some(())
}
fn get_symbols_with(&mut self, node: LinkedNode, context: IdentContext) -> anyhow::Result<()> {
/// Check a node with a specific context.
fn check_node_with(&mut self, node: LinkedNode, context: IdentContext) -> Option<()> {
let parent_context = self.ident_context;
self.ident_context = context;
let res = self.get_symbols(node);
let res = self.check_node(node);
self.ident_context = parent_context;
res
@ -420,7 +466,7 @@ impl LexicalHierarchyWorker {
/// Get symbol for a leaf node of a valid type, or `None` if the node is an
/// invalid type.
#[allow(deprecated)]
fn get_ident(&self, node: &LinkedNode) -> anyhow::Result<Option<LexicalInfo>> {
fn get_ident(&self, node: &LinkedNode) -> Option<Option<LexicalInfo>> {
let (name, kind) = match node.kind() {
SyntaxKind::Label if self.sk.affect_symbol() => {
// filter out label in code context.
@ -435,24 +481,20 @@ impl LexicalHierarchyWorker {
| SyntaxKind::Colon
) || prev_kind.is_keyword()
}) {
return Ok(None);
return Some(None);
}
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let ast_node = node.cast::<ast::Label>()?;
let name = ast_node.get().into();
(name, LexicalKind::label())
}
SyntaxKind::Ident if self.sk.affect_symbol() => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let ast_node = node.cast::<ast::Ident>()?;
let name = ast_node.get().clone();
let kind = match self.ident_context {
IdentContext::Func => LexicalKind::function(),
IdentContext::Var | IdentContext::Params => LexicalKind::variable(),
_ => return Ok(None),
_ => return Some(None),
};
(name, kind)
@ -477,24 +519,24 @@ impl LexicalHierarchyWorker {
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text();
if name.is_empty() {
return Ok(None);
return Some(None);
}
let Some(parent) = node.parent() else {
return Ok(None);
return Some(None);
};
let kind = match parent.kind() {
SyntaxKind::Heading if self.sk.affect_heading() => LexicalKind::Heading(
parent.cast::<ast::Heading>().unwrap().depth().get() as i16,
),
_ => return Ok(None),
_ => return Some(None),
};
(name, kind)
}
_ => return Ok(None),
_ => return Some(None),
};
Ok(Some(LexicalInfo {
Some(Some(LexicalInfo {
name,
kind,
range: node.range(),
@ -502,7 +544,7 @@ impl LexicalHierarchyWorker {
}
}
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
fn finish_hierarchy(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
LexicalHierarchy {
info: sym,
children: if curr.is_empty() {

View file

@ -225,7 +225,7 @@ pub fn previous_decls<T>(
}
/// Whether the node can be recognized as a mark.
fn is_mark(sk: SyntaxKind) -> bool {
pub fn is_mark(sk: SyntaxKind) -> bool {
use SyntaxKind::*;
#[allow(clippy::match_like_matches_macro)]
match sk {