diff --git a/crates/sync-lsp/src/server.rs b/crates/sync-lsp/src/server.rs index 3dfdc096..2d4258eb 100644 --- a/crates/sync-lsp/src/server.rs +++ b/crates/sync-lsp/src/server.rs @@ -690,7 +690,6 @@ impl ServiceState<'_, A, S> { } #[derive(Debug, Clone, PartialEq, Eq)] -#[allow(dead_code)] enum State { Uninitialized(Option>), Initializing(S), diff --git a/crates/tinymist-analysis/src/adt/snapshot_map.rs b/crates/tinymist-analysis/src/adt/snapshot_map.rs index ff85eac9..a9141a33 100644 --- a/crates/tinymist-analysis/src/adt/snapshot_map.rs +++ b/crates/tinymist-analysis/src/adt/snapshot_map.rs @@ -4,7 +4,6 @@ //! //! [rustc_data_structures::snapshot_map]: https://github.com/rust-lang/rust/blob/master/compiler/rustc_data_structures/src/snapshot_map/mod.rs -#![allow(missing_docs)] #![allow(unused)] use ena::undo_log::{Rollback, Snapshots, UndoLogs, VecLog}; diff --git a/crates/tinymist-analysis/src/docs.rs b/crates/tinymist-analysis/src/docs.rs index c814504e..3fec85d4 100644 --- a/crates/tinymist-analysis/src/docs.rs +++ b/crates/tinymist-analysis/src/docs.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs)] +//! The documentation analyzers. mod def; pub use def::*; diff --git a/crates/tinymist-analysis/src/docs/def.rs b/crates/tinymist-analysis/src/docs/def.rs index c73086b8..ce4bd1f9 100644 --- a/crates/tinymist-analysis/src/docs/def.rs +++ b/crates/tinymist-analysis/src/docs/def.rs @@ -329,6 +329,7 @@ pub struct ParamDocsT { } impl ParamDocs { + /// Create a new parameter documentation. pub fn new(param: &ParamTy, ty: Option<&Ty>) -> Self { Self { name: param.name.as_ref().into(), @@ -340,6 +341,7 @@ impl ParamDocs { } } +/// Formats the type. pub fn format_ty(ty: Option<&Ty>) -> TypeRepr { let ty = ty?; let short = ty.repr().unwrap_or_else(|| "any".into()); diff --git a/crates/tinymist-analysis/src/docs/tidy.rs b/crates/tinymist-analysis/src/docs/tidy.rs index 957ef4bb..1f4e5863 100644 --- a/crates/tinymist-analysis/src/docs/tidy.rs +++ b/crates/tinymist-analysis/src/docs/tidy.rs @@ -1,28 +1,42 @@ +//! The documentation models for tidy. + use ecow::EcoString; use itertools::Itertools; use serde::{Deserialize, Serialize}; use typst::diag::StrResult; +/// A parameter documentation. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TidyParamDocs { + /// The name of the parameter. pub name: EcoString, + /// The documentation of the parameter. pub docs: EcoString, + /// The types of the parameter. pub types: EcoString, + /// The default value of the parameter. pub default: Option, } +/// A pattern documentation. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TidyPatDocs { + /// The documentation of the pattern. pub docs: EcoString, + /// The return type of the pattern. pub return_ty: Option, + /// The parameters of the pattern. pub params: Vec, } +/// A module documentation. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct TidyModuleDocs { + /// The documentation of the module. pub docs: EcoString, } +/// Removes the list annotations from the string. pub fn remove_list_annotations(s: &str) -> String { static REG: std::sync::LazyLock = std::sync::LazyLock::new(|| { regex::Regex::new(r"").unwrap() @@ -30,6 +44,7 @@ pub fn remove_list_annotations(s: &str) -> String { REG.replace_all(s, "").to_string() } +/// Identifies the pattern documentation. For example, `#let (a, b) = x`. pub fn identify_pat_docs(converted: &str) -> StrResult { let lines = converted.lines().collect::>(); @@ -150,6 +165,7 @@ pub fn identify_pat_docs(converted: &str) -> StrResult { }) } +/// Identifies the module documentation. pub fn identify_tidy_module_docs(docs: EcoString) -> StrResult { Ok(TidyModuleDocs { docs: remove_list_annotations(&docs).into(), diff --git a/crates/tinymist-analysis/src/syntax.rs b/crates/tinymist-analysis/src/syntax.rs index 81c9c0a5..66feca67 100644 --- a/crates/tinymist-analysis/src/syntax.rs +++ b/crates/tinymist-analysis/src/syntax.rs @@ -2,9 +2,6 @@ //! //! This module must hide all **AST details** from the rest of the codebase. -// todo: remove this -#![allow(missing_docs)] - pub mod import; pub use import::*; pub mod comment; diff --git a/crates/tinymist-analysis/src/syntax/comment.rs b/crates/tinymist-analysis/src/syntax/comment.rs index 65873af9..c2db8cba 100644 --- a/crates/tinymist-analysis/src/syntax/comment.rs +++ b/crates/tinymist-analysis/src/syntax/comment.rs @@ -4,7 +4,7 @@ use itertools::Itertools; use crate::prelude::*; -/// Extract the module-level documentation from a source. +/// Extracts the module-level documentation from a source. pub fn find_module_level_docs(src: &Source) -> Option { crate::log_debug_ct!("finding docs at: {id:?}", id = src.id()); @@ -20,6 +20,7 @@ pub fn find_module_level_docs(src: &Source) -> Option { extract_mod_docs_between(&root, 0..src.text().len(), true) } +/// Extracts the module-level documentation from a source. fn extract_mod_docs_between( node: &LinkedNode, rng: Range, @@ -72,13 +73,13 @@ pub struct CommentGroupMatcher { } impl CommentGroupMatcher { - /// Reset the matcher. This usually happens after a group is collected or + /// Resets the matcher. This usually happens after a group is collected or /// when some other child item is breaking the comment group manually. pub fn reset(&mut self) { self.newline_count = 0; } - /// Process a child relative to some [`SyntaxNode`]. + /// Processes a child relative to some [`SyntaxNode`]. /// /// ## Example /// @@ -119,28 +120,35 @@ impl CommentGroupMatcher { } } } + +/// A raw comment. enum RawComment { + /// A line comment. Line(EcoString), + /// A block comment. Block(EcoString), } /// A matcher that collects documentation comments. #[derive(Default)] pub struct DocCommentMatcher { + /// The collected comments. comments: Vec, + /// The matcher for grouping comments. group_matcher: CommentGroupMatcher, + /// Whether to strictly match the comment format. strict: bool, } impl DocCommentMatcher { - /// Reset the matcher. This usually happens after a group is collected or + /// Resets the matcher. This usually happens after a group is collected or /// when some other child item is breaking the comment group manually. pub fn reset(&mut self) { self.comments.clear(); self.group_matcher.reset(); } - /// Process a child relative to some [`SyntaxNode`]. + /// Processes a child relative to some [`SyntaxNode`]. pub fn process(&mut self, n: &SyntaxNode) -> bool { match self.group_matcher.process(n) { CommentGroupSignal::LineComment => { @@ -164,7 +172,7 @@ impl DocCommentMatcher { false } - /// Collect the comments and return the result. + /// Collects the comments and returns the result. pub fn collect(&mut self) -> Option { let comments = &self.comments; if comments.is_empty() { diff --git a/crates/tinymist-analysis/src/syntax/def.rs b/crates/tinymist-analysis/src/syntax/def.rs index c22199b9..d99f9c6c 100644 --- a/crates/tinymist-analysis/src/syntax/def.rs +++ b/crates/tinymist-analysis/src/syntax/def.rs @@ -1,3 +1,5 @@ +//! Definitions of syntax structures. + use core::fmt; use std::{ collections::BTreeMap, @@ -57,25 +59,25 @@ impl Deref for ExprInfo { /// documentation strings, imports, and exports. #[derive(Debug)] pub struct ExprInfoRepr { - /// The file ID this expression information belongs to + /// The file ID this expression information belongs to. pub fid: TypstFileId, - /// Revision number for tracking changes to the file + /// Revision number for tracking changes to the file. pub revision: usize, - /// The source code content + /// The source code content. pub source: Source, - /// The root expression of the file + /// The root expression of the file. pub root: Expr, - /// Documentation string for the module + /// Documentation string for the module. pub module_docstring: Arc, - /// The lexical scope of exported symbols from this file + /// The lexical scope of exported symbols from this file. pub exports: Arc>, - /// Map from file IDs to imported lexical scopes + /// Map from file IDs to imported lexical scopes. pub imports: FxHashMap>>, - /// Map from spans to expressions for scope analysis + /// Map from spans to expressions for scope analysis. pub exprs: FxHashMap, - /// Map from spans to resolved reference expressions + /// Map from spans to resolved reference expressions. pub resolves: FxHashMap>, - /// Map from declarations to their documentation strings + /// Map from declarations to their documentation strings. pub docstrings: FxHashMap>, } @@ -133,6 +135,7 @@ impl ExprInfoRepr { }) } + /// Shows the expression information. #[allow(dead_code)] fn show(&self) { use std::io::Write; @@ -167,55 +170,55 @@ impl ExprInfoRepr { /// source code, from basic literals to complex control flow constructs. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Expr { - /// A sequence of expressions + /// A sequence of expressions: `{ x; y; z }` Block(Interned>), - /// An array literal + /// An array literal: `(1, 2, 3)` Array(Interned), - /// A dict literal + /// A dict literal: `(a: 1, b: 2)` Dict(Interned), - /// An args literal + /// An args literal: `(1, 2, 3)` Args(Interned), - /// A pattern + /// A pattern: `(x, y, ..z)` Pattern(Interned), - /// An element literal + /// An element literal: `[*Hi* there!]` Element(Interned), - /// An unary operation + /// An unary operation: `-x` Unary(Interned), - /// A binary operation + /// A binary operation: `x + y` Binary(Interned), - /// A function call + /// A function call: `f(x, y)` Apply(Interned), - /// A function + /// A function: `(x, y) => x + y` Func(Interned), - /// A let + /// A let: `let x = 1` Let(Interned), - /// A show + /// A show: `show heading: it => emph(it.body)` Show(Interned), - /// A set + /// A set: `set text(...)` Set(Interned), - /// A reference + /// A reference: `#x` Ref(Interned), - /// A content reference + /// A content reference: `@x` ContentRef(Interned), - /// A select + /// A select: `x.y` Select(Interned), - /// An import + /// An import expression: `import "path.typ": x` Import(Interned), - /// An include + /// An include expression: `include "path.typ"` Include(Interned), - /// A contextual + /// A contextual expression: `context text.lang` Contextual(Interned), - /// A conditional + /// A conditional expression: `if x { y } else { z }` Conditional(Interned), - /// A while loop + /// A while loop: `while x { y }` WhileLoop(Interned), - /// A for loop + /// A for loop: `for x in y { z }` ForLoop(Interned), - /// A type + /// A type: `str` Type(Ty), - /// A declaration + /// A declaration: `x` Decl(DeclExpr), - /// A star import + /// A star import: `*` Star, } @@ -263,9 +266,13 @@ pub type LexicalScope = rpds::RedBlackTreeMapSync, Expr>; /// including lexical scopes, modules, functions, and types. #[derive(Debug, Clone)] pub enum ExprScope { + /// A lexical scope extracted from a source file. Lexical(LexicalScope), + /// A module instance which is either built-in or evaluated during analysis. Module(Module), + /// A scope bound to a function. Func(Func), + /// A scope bound to a type. Type(Type), } @@ -358,18 +365,18 @@ fn select_of(source: Interned, name: Interned) -> Expr { #[derive(Debug, Default, Clone, Copy, Hash, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum DefKind { - /// A definition for some constant. + /// A definition for some constant: `let x = 1` #[default] Constant, - /// A definition for some function. + /// A definition for some function: `(x, y) => x + y` Function, - /// A definition for some variable. + /// A definition for some variable: `let x = (x, y) => x + y` Variable, /// A definition for some module. Module, - /// A definition for some struct. + /// A definition for some struct (type). Struct, - /// A definition for some reference. + /// A definition for some reference: `