docs: document all public items in rust codebase (#2058)
Some checks failed
tinymist::auto_tag / auto-tag (push) Has been cancelled
tinymist::ci / Duplicate Actions Detection (push) Has been cancelled
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Has been cancelled
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Has been cancelled
tinymist::ci / prepare-build (push) Has been cancelled
tinymist::gh_pages / build-gh-pages (push) Has been cancelled
tinymist::ci / announce (push) Has been cancelled
tinymist::ci / build (push) Has been cancelled

This commit is contained in:
Myriad-Dreamin 2025-08-21 12:50:03 +08:00 committed by GitHub
parent 4ce0a59862
commit 532f25abe9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
75 changed files with 1429 additions and 533 deletions

View file

@ -690,7 +690,6 @@ impl<A, S> ServiceState<'_, A, S> {
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
#[allow(dead_code)]
enum State<Args, S> { enum State<Args, S> {
Uninitialized(Option<Box<Args>>), Uninitialized(Option<Box<Args>>),
Initializing(S), Initializing(S),

View file

@ -4,7 +4,6 @@
//! //!
//! [rustc_data_structures::snapshot_map]: https://github.com/rust-lang/rust/blob/master/compiler/rustc_data_structures/src/snapshot_map/mod.rs //! [rustc_data_structures::snapshot_map]: https://github.com/rust-lang/rust/blob/master/compiler/rustc_data_structures/src/snapshot_map/mod.rs
#![allow(missing_docs)]
#![allow(unused)] #![allow(unused)]
use ena::undo_log::{Rollback, Snapshots, UndoLogs, VecLog}; use ena::undo_log::{Rollback, Snapshots, UndoLogs, VecLog};

View file

@ -1,4 +1,4 @@
#![allow(missing_docs)] //! The documentation analyzers.
mod def; mod def;
pub use def::*; pub use def::*;

View file

@ -329,6 +329,7 @@ pub struct ParamDocsT<T> {
} }
impl ParamDocs { impl ParamDocs {
/// Create a new parameter documentation.
pub fn new(param: &ParamTy, ty: Option<&Ty>) -> Self { pub fn new(param: &ParamTy, ty: Option<&Ty>) -> Self {
Self { Self {
name: param.name.as_ref().into(), name: param.name.as_ref().into(),
@ -340,6 +341,7 @@ impl ParamDocs {
} }
} }
/// Formats the type.
pub fn format_ty(ty: Option<&Ty>) -> TypeRepr { pub fn format_ty(ty: Option<&Ty>) -> TypeRepr {
let ty = ty?; let ty = ty?;
let short = ty.repr().unwrap_or_else(|| "any".into()); let short = ty.repr().unwrap_or_else(|| "any".into());

View file

@ -1,28 +1,42 @@
//! The documentation models for tidy.
use ecow::EcoString; use ecow::EcoString;
use itertools::Itertools; use itertools::Itertools;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use typst::diag::StrResult; use typst::diag::StrResult;
/// A parameter documentation.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyParamDocs { pub struct TidyParamDocs {
/// The name of the parameter.
pub name: EcoString, pub name: EcoString,
/// The documentation of the parameter.
pub docs: EcoString, pub docs: EcoString,
/// The types of the parameter.
pub types: EcoString, pub types: EcoString,
/// The default value of the parameter.
pub default: Option<EcoString>, pub default: Option<EcoString>,
} }
/// A pattern documentation.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyPatDocs { pub struct TidyPatDocs {
/// The documentation of the pattern.
pub docs: EcoString, pub docs: EcoString,
/// The return type of the pattern.
pub return_ty: Option<EcoString>, pub return_ty: Option<EcoString>,
/// The parameters of the pattern.
pub params: Vec<TidyParamDocs>, pub params: Vec<TidyParamDocs>,
} }
/// A module documentation.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TidyModuleDocs { pub struct TidyModuleDocs {
/// The documentation of the module.
pub docs: EcoString, pub docs: EcoString,
} }
/// Removes the list annotations from the string.
pub fn remove_list_annotations(s: &str) -> String { pub fn remove_list_annotations(s: &str) -> String {
static REG: std::sync::LazyLock<regex::Regex> = std::sync::LazyLock::new(|| { static REG: std::sync::LazyLock<regex::Regex> = std::sync::LazyLock::new(|| {
regex::Regex::new(r"<!-- typlite:(?:begin|end):[\w\-]+ \d+ -->").unwrap() regex::Regex::new(r"<!-- typlite:(?:begin|end):[\w\-]+ \d+ -->").unwrap()
@ -30,6 +44,7 @@ pub fn remove_list_annotations(s: &str) -> String {
REG.replace_all(s, "").to_string() REG.replace_all(s, "").to_string()
} }
/// Identifies the pattern documentation. For example, `#let (a, b) = x`.
pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> { pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
let lines = converted.lines().collect::<Vec<_>>(); let lines = converted.lines().collect::<Vec<_>>();
@ -150,6 +165,7 @@ pub fn identify_pat_docs(converted: &str) -> StrResult<TidyPatDocs> {
}) })
} }
/// Identifies the module documentation.
pub fn identify_tidy_module_docs(docs: EcoString) -> StrResult<TidyModuleDocs> { pub fn identify_tidy_module_docs(docs: EcoString) -> StrResult<TidyModuleDocs> {
Ok(TidyModuleDocs { Ok(TidyModuleDocs {
docs: remove_list_annotations(&docs).into(), docs: remove_list_annotations(&docs).into(),

View file

@ -2,9 +2,6 @@
//! //!
//! This module must hide all **AST details** from the rest of the codebase. //! This module must hide all **AST details** from the rest of the codebase.
// todo: remove this
#![allow(missing_docs)]
pub mod import; pub mod import;
pub use import::*; pub use import::*;
pub mod comment; pub mod comment;

View file

@ -4,7 +4,7 @@ use itertools::Itertools;
use crate::prelude::*; use crate::prelude::*;
/// Extract the module-level documentation from a source. /// Extracts the module-level documentation from a source.
pub fn find_module_level_docs(src: &Source) -> Option<String> { pub fn find_module_level_docs(src: &Source) -> Option<String> {
crate::log_debug_ct!("finding docs at: {id:?}", id = src.id()); crate::log_debug_ct!("finding docs at: {id:?}", id = src.id());
@ -20,6 +20,7 @@ pub fn find_module_level_docs(src: &Source) -> Option<String> {
extract_mod_docs_between(&root, 0..src.text().len(), true) extract_mod_docs_between(&root, 0..src.text().len(), true)
} }
/// Extracts the module-level documentation from a source.
fn extract_mod_docs_between( fn extract_mod_docs_between(
node: &LinkedNode, node: &LinkedNode,
rng: Range<usize>, rng: Range<usize>,
@ -72,13 +73,13 @@ pub struct CommentGroupMatcher {
} }
impl CommentGroupMatcher { impl CommentGroupMatcher {
/// Reset the matcher. This usually happens after a group is collected or /// Resets the matcher. This usually happens after a group is collected or
/// when some other child item is breaking the comment group manually. /// when some other child item is breaking the comment group manually.
pub fn reset(&mut self) { pub fn reset(&mut self) {
self.newline_count = 0; self.newline_count = 0;
} }
/// Process a child relative to some [`SyntaxNode`]. /// Processes a child relative to some [`SyntaxNode`].
/// ///
/// ## Example /// ## Example
/// ///
@ -119,28 +120,35 @@ impl CommentGroupMatcher {
} }
} }
} }
/// A raw comment.
enum RawComment { enum RawComment {
/// A line comment.
Line(EcoString), Line(EcoString),
/// A block comment.
Block(EcoString), Block(EcoString),
} }
/// A matcher that collects documentation comments. /// A matcher that collects documentation comments.
#[derive(Default)] #[derive(Default)]
pub struct DocCommentMatcher { pub struct DocCommentMatcher {
/// The collected comments.
comments: Vec<RawComment>, comments: Vec<RawComment>,
/// The matcher for grouping comments.
group_matcher: CommentGroupMatcher, group_matcher: CommentGroupMatcher,
/// Whether to strictly match the comment format.
strict: bool, strict: bool,
} }
impl DocCommentMatcher { impl DocCommentMatcher {
/// Reset the matcher. This usually happens after a group is collected or /// Resets the matcher. This usually happens after a group is collected or
/// when some other child item is breaking the comment group manually. /// when some other child item is breaking the comment group manually.
pub fn reset(&mut self) { pub fn reset(&mut self) {
self.comments.clear(); self.comments.clear();
self.group_matcher.reset(); self.group_matcher.reset();
} }
/// Process a child relative to some [`SyntaxNode`]. /// Processes a child relative to some [`SyntaxNode`].
pub fn process(&mut self, n: &SyntaxNode) -> bool { pub fn process(&mut self, n: &SyntaxNode) -> bool {
match self.group_matcher.process(n) { match self.group_matcher.process(n) {
CommentGroupSignal::LineComment => { CommentGroupSignal::LineComment => {
@ -164,7 +172,7 @@ impl DocCommentMatcher {
false false
} }
/// Collect the comments and return the result. /// Collects the comments and returns the result.
pub fn collect(&mut self) -> Option<String> { pub fn collect(&mut self) -> Option<String> {
let comments = &self.comments; let comments = &self.comments;
if comments.is_empty() { if comments.is_empty() {

View file

@ -1,3 +1,5 @@
//! Definitions of syntax structures.
use core::fmt; use core::fmt;
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
@ -57,25 +59,25 @@ impl Deref for ExprInfo {
/// documentation strings, imports, and exports. /// documentation strings, imports, and exports.
#[derive(Debug)] #[derive(Debug)]
pub struct ExprInfoRepr { pub struct ExprInfoRepr {
/// The file ID this expression information belongs to /// The file ID this expression information belongs to.
pub fid: TypstFileId, pub fid: TypstFileId,
/// Revision number for tracking changes to the file /// Revision number for tracking changes to the file.
pub revision: usize, pub revision: usize,
/// The source code content /// The source code content.
pub source: Source, pub source: Source,
/// The root expression of the file /// The root expression of the file.
pub root: Expr, pub root: Expr,
/// Documentation string for the module /// Documentation string for the module.
pub module_docstring: Arc<DocString>, pub module_docstring: Arc<DocString>,
/// The lexical scope of exported symbols from this file /// The lexical scope of exported symbols from this file.
pub exports: Arc<LazyHash<LexicalScope>>, pub exports: Arc<LazyHash<LexicalScope>>,
/// Map from file IDs to imported lexical scopes /// Map from file IDs to imported lexical scopes.
pub imports: FxHashMap<TypstFileId, Arc<LazyHash<LexicalScope>>>, pub imports: FxHashMap<TypstFileId, Arc<LazyHash<LexicalScope>>>,
/// Map from spans to expressions for scope analysis /// Map from spans to expressions for scope analysis.
pub exprs: FxHashMap<Span, Expr>, pub exprs: FxHashMap<Span, Expr>,
/// Map from spans to resolved reference expressions /// Map from spans to resolved reference expressions.
pub resolves: FxHashMap<Span, Interned<RefExpr>>, pub resolves: FxHashMap<Span, Interned<RefExpr>>,
/// Map from declarations to their documentation strings /// Map from declarations to their documentation strings.
pub docstrings: FxHashMap<DeclExpr, Arc<DocString>>, pub docstrings: FxHashMap<DeclExpr, Arc<DocString>>,
} }
@ -133,6 +135,7 @@ impl ExprInfoRepr {
}) })
} }
/// Shows the expression information.
#[allow(dead_code)] #[allow(dead_code)]
fn show(&self) { fn show(&self) {
use std::io::Write; use std::io::Write;
@ -167,55 +170,55 @@ impl ExprInfoRepr {
/// source code, from basic literals to complex control flow constructs. /// source code, from basic literals to complex control flow constructs.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Expr { pub enum Expr {
/// A sequence of expressions /// A sequence of expressions: `{ x; y; z }`
Block(Interned<Vec<Expr>>), Block(Interned<Vec<Expr>>),
/// An array literal /// An array literal: `(1, 2, 3)`
Array(Interned<ArgsExpr>), Array(Interned<ArgsExpr>),
/// A dict literal /// A dict literal: `(a: 1, b: 2)`
Dict(Interned<ArgsExpr>), Dict(Interned<ArgsExpr>),
/// An args literal /// An args literal: `(1, 2, 3)`
Args(Interned<ArgsExpr>), Args(Interned<ArgsExpr>),
/// A pattern /// A pattern: `(x, y, ..z)`
Pattern(Interned<Pattern>), Pattern(Interned<Pattern>),
/// An element literal /// An element literal: `[*Hi* there!]`
Element(Interned<ElementExpr>), Element(Interned<ElementExpr>),
/// An unary operation /// An unary operation: `-x`
Unary(Interned<UnExpr>), Unary(Interned<UnExpr>),
/// A binary operation /// A binary operation: `x + y`
Binary(Interned<BinExpr>), Binary(Interned<BinExpr>),
/// A function call /// A function call: `f(x, y)`
Apply(Interned<ApplyExpr>), Apply(Interned<ApplyExpr>),
/// A function /// A function: `(x, y) => x + y`
Func(Interned<FuncExpr>), Func(Interned<FuncExpr>),
/// A let /// A let: `let x = 1`
Let(Interned<LetExpr>), Let(Interned<LetExpr>),
/// A show /// A show: `show heading: it => emph(it.body)`
Show(Interned<ShowExpr>), Show(Interned<ShowExpr>),
/// A set /// A set: `set text(...)`
Set(Interned<SetExpr>), Set(Interned<SetExpr>),
/// A reference /// A reference: `#x`
Ref(Interned<RefExpr>), Ref(Interned<RefExpr>),
/// A content reference /// A content reference: `@x`
ContentRef(Interned<ContentRefExpr>), ContentRef(Interned<ContentRefExpr>),
/// A select /// A select: `x.y`
Select(Interned<SelectExpr>), Select(Interned<SelectExpr>),
/// An import /// An import expression: `import "path.typ": x`
Import(Interned<ImportExpr>), Import(Interned<ImportExpr>),
/// An include /// An include expression: `include "path.typ"`
Include(Interned<IncludeExpr>), Include(Interned<IncludeExpr>),
/// A contextual /// A contextual expression: `context text.lang`
Contextual(Interned<Expr>), Contextual(Interned<Expr>),
/// A conditional /// A conditional expression: `if x { y } else { z }`
Conditional(Interned<IfExpr>), Conditional(Interned<IfExpr>),
/// A while loop /// A while loop: `while x { y }`
WhileLoop(Interned<WhileExpr>), WhileLoop(Interned<WhileExpr>),
/// A for loop /// A for loop: `for x in y { z }`
ForLoop(Interned<ForExpr>), ForLoop(Interned<ForExpr>),
/// A type /// A type: `str`
Type(Ty), Type(Ty),
/// A declaration /// A declaration: `x`
Decl(DeclExpr), Decl(DeclExpr),
/// A star import /// A star import: `*`
Star, Star,
} }
@ -263,9 +266,13 @@ pub type LexicalScope = rpds::RedBlackTreeMapSync<Interned<str>, Expr>;
/// including lexical scopes, modules, functions, and types. /// including lexical scopes, modules, functions, and types.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExprScope { pub enum ExprScope {
/// A lexical scope extracted from a source file.
Lexical(LexicalScope), Lexical(LexicalScope),
/// A module instance which is either built-in or evaluated during analysis.
Module(Module), Module(Module),
/// A scope bound to a function.
Func(Func), Func(Func),
/// A scope bound to a type.
Type(Type), Type(Type),
} }
@ -358,18 +365,18 @@ fn select_of(source: Interned<Ty>, name: Interned<str>) -> Expr {
#[derive(Debug, Default, Clone, Copy, Hash, Serialize, Deserialize)] #[derive(Debug, Default, Clone, Copy, Hash, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum DefKind { pub enum DefKind {
/// A definition for some constant. /// A definition for some constant: `let x = 1`
#[default] #[default]
Constant, Constant,
/// A definition for some function. /// A definition for some function: `(x, y) => x + y`
Function, Function,
/// A definition for some variable. /// A definition for some variable: `let x = (x, y) => x + y`
Variable, Variable,
/// A definition for some module. /// A definition for some module.
Module, Module,
/// A definition for some struct. /// A definition for some struct (type).
Struct, Struct,
/// A definition for some reference. /// A definition for some reference: `<label>`
Reference, Reference,
} }
@ -392,27 +399,49 @@ pub type DeclExpr = Interned<Decl>;
/// Represents different kinds of declarations in the language. /// Represents different kinds of declarations in the language.
#[derive(Clone, PartialEq, Eq, Hash, DeclEnum)] #[derive(Clone, PartialEq, Eq, Hash, DeclEnum)]
pub enum Decl { pub enum Decl {
/// A function declaration: `(x, y) => x + y`
Func(SpannedDecl), Func(SpannedDecl),
/// An import alias declaration: `import "path.typ": x`
ImportAlias(SpannedDecl), ImportAlias(SpannedDecl),
/// A variable declaration: `let x = 1`
Var(SpannedDecl), Var(SpannedDecl),
/// An identifier reference declaration: `x`
IdentRef(SpannedDecl), IdentRef(SpannedDecl),
/// A module declaration: `import calc`
Module(ModuleDecl), Module(ModuleDecl),
/// A module alias declaration: `import "path.typ" as x`
ModuleAlias(SpannedDecl), ModuleAlias(SpannedDecl),
/// A path stem declaration: `path.typ`
PathStem(SpannedDecl), PathStem(SpannedDecl),
/// An import path declaration: `import "path.typ"`
ImportPath(SpannedDecl), ImportPath(SpannedDecl),
/// An include path declaration: `include "path.typ"`
IncludePath(SpannedDecl), IncludePath(SpannedDecl),
/// An import declaration: `import "path.typ"`
Import(SpannedDecl), Import(SpannedDecl),
/// A content reference declaration: `@x`
ContentRef(SpannedDecl), ContentRef(SpannedDecl),
/// A label declaration: `label`
Label(SpannedDecl), Label(SpannedDecl),
/// A string name declaration: `"x"`
StrName(SpannedDecl), StrName(SpannedDecl),
/// A module import declaration: `import "path.typ": *`
ModuleImport(SpanDecl), ModuleImport(SpanDecl),
/// A closure declaration: `(x, y) => x + y`
Closure(SpanDecl), Closure(SpanDecl),
/// A pattern declaration: `let (x, y, ..z) = 1`
Pattern(SpanDecl), Pattern(SpanDecl),
/// A spread declaration: `..z`
Spread(SpanDecl), Spread(SpanDecl),
/// A content declaration: `#[text]`
Content(SpanDecl), Content(SpanDecl),
/// A constant declaration: `let x = 1`
Constant(SpanDecl), Constant(SpanDecl),
/// A bib entry declaration: `@entry`
BibEntry(NameRangeDecl), BibEntry(NameRangeDecl),
/// A docs declaration created by the compiler.
Docs(DocsDecl), Docs(DocsDecl),
/// A generated declaration created by the compiler.
Generated(GeneratedDecl), Generated(GeneratedDecl),
} }
@ -755,15 +784,19 @@ impl From<DeclExpr> for Expr {
/// A declaration with an associated name and span location. /// A declaration with an associated name and span location.
#[derive(Clone, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
pub struct SpannedDecl { pub struct SpannedDecl {
/// The name of the declaration.
name: Interned<str>, name: Interned<str>,
/// The span location of the declaration.
at: Span, at: Span,
} }
impl SpannedDecl { impl SpannedDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
&self.name &self.name
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
self.at self.at
} }
@ -778,17 +811,19 @@ impl fmt::Debug for SpannedDecl {
/// A declaration with a name and range information. /// A declaration with a name and range information.
#[derive(Clone, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
pub struct NameRangeDecl { pub struct NameRangeDecl {
/// The name of the declaration /// The name of the declaration.
pub name: Interned<str>, pub name: Interned<str>,
/// Boxed tuple containing (file_id, name_range, full_range) /// Boxed tuple containing (file_id, name_range, full_range).
pub at: Box<(TypstFileId, Range<usize>, Option<Range<usize>>)>, pub at: Box<(TypstFileId, Range<usize>, Option<Range<usize>>)>,
} }
impl NameRangeDecl { impl NameRangeDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
&self.name &self.name
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
Span::detached() Span::detached()
} }
@ -803,17 +838,19 @@ impl fmt::Debug for NameRangeDecl {
/// A module declaration with name and file ID. /// A module declaration with name and file ID.
#[derive(Clone, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
pub struct ModuleDecl { pub struct ModuleDecl {
/// The name of the module /// The name of the module.
pub name: Interned<str>, pub name: Interned<str>,
/// The file ID where the module is defined /// The file ID where the module is defined.
pub fid: TypstFileId, pub fid: TypstFileId,
} }
impl ModuleDecl { impl ModuleDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
&self.name &self.name
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
Span::detached() Span::detached()
} }
@ -833,10 +870,12 @@ pub struct DocsDecl {
} }
impl DocsDecl { impl DocsDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
Interned::empty() Interned::empty()
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
Span::detached() Span::detached()
} }
@ -853,10 +892,12 @@ impl fmt::Debug for DocsDecl {
pub struct SpanDecl(Span); pub struct SpanDecl(Span);
impl SpanDecl { impl SpanDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
Interned::empty() Interned::empty()
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
self.0 self.0
} }
@ -873,10 +914,12 @@ impl fmt::Debug for SpanDecl {
pub struct GeneratedDecl(DefId); pub struct GeneratedDecl(DefId);
impl GeneratedDecl { impl GeneratedDecl {
/// Gets the name of the declaration.
fn name(&self) -> &Interned<str> { fn name(&self) -> &Interned<str> {
Interned::empty() Interned::empty()
} }
/// Gets the span location of the declaration.
fn span(&self) -> Span { fn span(&self) -> Span {
Span::detached() Span::detached()
} }
@ -903,17 +946,25 @@ pub type ExportMap = BTreeMap<Interned<str>, Expr>;
/// Covers positional arguments, named arguments, and spread arguments. /// Covers positional arguments, named arguments, and spread arguments.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ArgExpr { pub enum ArgExpr {
/// A positional argument: `x`
Pos(Expr), Pos(Expr),
/// A named argument: `a: x`
Named(Box<(DeclExpr, Expr)>), Named(Box<(DeclExpr, Expr)>),
/// A named argument with a default value: `((a): x)`
NamedRt(Box<(Expr, Expr)>), NamedRt(Box<(Expr, Expr)>),
/// A spread argument: `..x`
Spread(Expr), Spread(Expr),
} }
/// Represents different kinds of patterns for destructuring. /// Represents different kinds of patterns for destructuring.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Pattern { pub enum Pattern {
/// A general pattern expression can occur in right-hand side of a
/// function signature.
Expr(Expr), Expr(Expr),
/// A simple pattern: `x`
Simple(Interned<Decl>), Simple(Interned<Decl>),
/// A pattern signature: `(x, y: val, ..z)`
Sig(Box<PatternSig>), Sig(Box<PatternSig>),
} }
@ -938,13 +989,13 @@ impl Pattern {
/// named, and spread parameters. /// named, and spread parameters.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PatternSig { pub struct PatternSig {
/// Positional parameters in order /// Positional parameters in order.
pub pos: EcoVec<Interned<Pattern>>, pub pos: EcoVec<Interned<Pattern>>,
/// Named parameters with their default patterns /// Named parameters with their default patterns.
pub named: EcoVec<(DeclExpr, Interned<Pattern>)>, pub named: EcoVec<(DeclExpr, Interned<Pattern>)>,
/// Left spread parameter (collects extra positional arguments) /// Left spread parameter (collects extra positional arguments).
pub spread_left: Option<(DeclExpr, Interned<Pattern>)>, pub spread_left: Option<(DeclExpr, Interned<Pattern>)>,
/// Right spread parameter (collects remaining arguments) /// Right spread parameter (collects remaining arguments).
pub spread_right: Option<(DeclExpr, Interned<Pattern>)>, pub spread_right: Option<(DeclExpr, Interned<Pattern>)>,
} }
@ -968,35 +1019,35 @@ pub struct ContentSeqExpr {
/// The term is the final resolved type of the reference. /// The term is the final resolved type of the reference.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RefExpr { pub struct RefExpr {
/// The declaration being referenced /// The declaration being referenced.
pub decl: DeclExpr, pub decl: DeclExpr,
/// The intermediate step in resolution (if any) /// The intermediate step in resolution (if any).
pub step: Option<Expr>, pub step: Option<Expr>,
/// The root expression of the reference chain /// The root expression of the reference chain.
pub root: Option<Expr>, pub root: Option<Expr>,
/// The final resolved type of the reference /// The final resolved type of the reference.
pub term: Option<Ty>, pub term: Option<Ty>,
} }
/// Represents a content reference expression. /// Represents a content reference expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ContentRefExpr { pub struct ContentRefExpr {
/// The identifier being referenced /// The identifier being referenced.
pub ident: DeclExpr, pub ident: DeclExpr,
/// The declaration this reference points to (if resolved) /// The declaration this reference points to (if resolved).
pub of: Option<DeclExpr>, pub of: Option<DeclExpr>,
/// The body content associated with the reference /// The body content associated with the reference.
pub body: Option<Expr>, pub body: Option<Expr>,
} }
/// Represents a field selection expression. /// Represents a field selection expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SelectExpr { pub struct SelectExpr {
/// The left-hand side expression being selected from /// The left-hand side expression being selected from.
pub lhs: Expr, pub lhs: Expr,
/// The key or field name being selected /// The key or field name being selected.
pub key: DeclExpr, pub key: DeclExpr,
/// The span location of this selection /// The span location of this selection.
pub span: Span, pub span: Span,
} }
@ -1014,9 +1065,9 @@ impl SelectExpr {
/// Represents an arguments expression. /// Represents an arguments expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ArgsExpr { pub struct ArgsExpr {
/// The list of arguments /// The list of arguments.
pub args: Vec<ArgExpr>, pub args: Vec<ArgExpr>,
/// The span location of the argument list /// The span location of the argument list.
pub span: Span, pub span: Span,
} }
@ -1030,138 +1081,138 @@ impl ArgsExpr {
/// Represents an element expression. /// Represents an element expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ElementExpr { pub struct ElementExpr {
/// The Typst element type /// The Typst element type.
pub elem: Element, pub elem: Element,
/// The content expressions within this element /// The content expressions within this element.
pub content: EcoVec<Expr>, pub content: EcoVec<Expr>,
} }
/// Represents a function application expression. /// Represents a function application expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ApplyExpr { pub struct ApplyExpr {
/// The function expression being called /// The function expression being called.
pub callee: Expr, pub callee: Expr,
/// The arguments passed to the function /// The arguments passed to the function.
pub args: Expr, pub args: Expr,
/// The span location of the function call /// The span location of the function call.
pub span: Span, pub span: Span,
} }
/// Represents a function expression. /// Represents a function expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FuncExpr { pub struct FuncExpr {
/// The declaration for this function /// The declaration for this function.
pub decl: DeclExpr, pub decl: DeclExpr,
/// The parameter signature defining function inputs /// The parameter signature defining function inputs.
pub params: PatternSig, pub params: PatternSig,
/// The function body expression /// The function body expression.
pub body: Expr, pub body: Expr,
} }
/// Represents a let binding expression. /// Represents a let binding expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct LetExpr { pub struct LetExpr {
/// Span of the pattern /// Span of the pattern.
pub span: Span, pub span: Span,
/// The pattern being bound (left side of assignment) /// The pattern being bound (left side of assignment).
pub pattern: Interned<Pattern>, pub pattern: Interned<Pattern>,
/// The optional body expression (right side of assignment) /// The optional body expression (right side of assignment).
pub body: Option<Expr>, pub body: Option<Expr>,
} }
/// Represents a show rule expression. /// Represents a show rule expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ShowExpr { pub struct ShowExpr {
/// Optional selector expression to determine what to show /// Optional selector expression to determine what to show.
pub selector: Option<Expr>, pub selector: Option<Expr>,
/// The edit function to apply to selected elements /// The edit function to apply to selected elements.
pub edit: Expr, pub edit: Expr,
} }
/// Represents a set rule expression. /// Represents a set rule expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SetExpr { pub struct SetExpr {
/// The target element or function to set /// The target element or function to set.
pub target: Expr, pub target: Expr,
/// The arguments to apply to the target /// The arguments to apply to the target.
pub args: Expr, pub args: Expr,
/// Optional condition for when to apply the set rule /// Optional condition for when to apply the set rule.
pub cond: Option<Expr>, pub cond: Option<Expr>,
} }
/// Represents an import expression. /// Represents an import expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ImportExpr { pub struct ImportExpr {
/// The reference expression for what is being imported /// The reference expression for what is being imported.
pub decl: Interned<RefExpr>, pub decl: Interned<RefExpr>,
} }
/// Represents an include expression. /// Represents an include expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct IncludeExpr { pub struct IncludeExpr {
/// The source expression indicating what file or content to include /// The source expression indicating what file or content to include.
pub source: Expr, pub source: Expr,
} }
/// Represents a conditional (if) expression. /// Represents a conditional (if) expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct IfExpr { pub struct IfExpr {
/// The condition expression to evaluate /// The condition expression to evaluate.
pub cond: Expr, pub cond: Expr,
/// The expression to evaluate if condition is true /// The expression to evaluate if condition is true.
pub then: Expr, pub then: Expr,
/// The expression to evaluate if condition is false /// The expression to evaluate if condition is false.
pub else_: Expr, pub else_: Expr,
} }
/// Represents a while loop expression. /// Represents a while loop expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct WhileExpr { pub struct WhileExpr {
/// The condition expression evaluated each iteration /// The condition expression evaluated each iteration.
pub cond: Expr, pub cond: Expr,
/// The body expression executed while condition is true /// The body expression executed while condition is true.
pub body: Expr, pub body: Expr,
} }
/// Represents a for loop expression. /// Represents a for loop expression.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ForExpr { pub struct ForExpr {
/// The pattern to match each iteration value against /// The pattern to match each iteration value against.
pub pattern: Interned<Pattern>, pub pattern: Interned<Pattern>,
/// The expression that produces values to iterate over /// The expression that produces values to iterate over.
pub iter: Expr, pub iter: Expr,
/// The body expression executed for each iteration /// The body expression executed for each iteration.
pub body: Expr, pub body: Expr,
} }
/// The kind of unary operation. /// The kind of unary operation.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum UnaryOp { pub enum UnaryOp {
/// The (arithmetic) positive operation /// The (arithmetic) positive operation.
/// `+t` /// `+t`
Pos, Pos,
/// The (arithmetic) negate operation /// The (arithmetic) negate operation.
/// `-t` /// `-t`
Neg, Neg,
/// The (logical) not operation /// The (logical) not operation.
/// `not t` /// `not t`
Not, Not,
/// The return operation /// The return operation.
/// `return t` /// `return t`
Return, Return,
/// The typst context operation /// The typst context operation.
/// `context t` /// `context t`
Context, Context,
/// The spreading operation /// The spreading operation.
/// `..t` /// `..t`
Spread, Spread,
/// The not element of operation /// The not element of operation.
/// `not in t` /// `not in t`
NotElementOf, NotElementOf,
/// The element of operation /// The element of operation.
/// `in t` /// `in t`
ElementOf, ElementOf,
/// The type of operation /// The type of operation.
/// `type(t)` /// `type(t)`
TypeOf, TypeOf,
} }
@ -1169,9 +1220,9 @@ pub enum UnaryOp {
/// A unary operation type. /// A unary operation type.
#[derive(Debug, Hash, Clone, PartialEq, Eq)] #[derive(Debug, Hash, Clone, PartialEq, Eq)]
pub struct UnInst<T> { pub struct UnInst<T> {
/// The operand of the unary operation /// The operand of the unary operation.
pub lhs: T, pub lhs: T,
/// The kind of the unary operation /// The kind of the unary operation.
pub op: UnaryOp, pub op: UnaryOp,
} }
@ -1211,9 +1262,9 @@ pub type BinaryOp = ast::BinOp;
/// A binary operation type. /// A binary operation type.
#[derive(Debug, Hash, Clone, PartialEq, Eq)] #[derive(Debug, Hash, Clone, PartialEq, Eq)]
pub struct BinInst<T> { pub struct BinInst<T> {
/// The operands of the binary operation (left, right) /// The operands of the binary operation (left, right).
pub operands: (T, T), pub operands: (T, T),
/// The kind of the binary operation /// The kind of the binary operation.
pub op: BinaryOp, pub op: BinaryOp,
} }
@ -1250,6 +1301,7 @@ impl<T> BinInst<T> {
} }
} }
/// Checks if a scope is empty.
fn is_empty_scope(scope: &typst::foundations::Scope) -> bool { fn is_empty_scope(scope: &typst::foundations::Scope) -> bool {
scope.iter().next().is_none() scope.iter().next().is_none()
} }

View file

@ -2,7 +2,7 @@
use crate::prelude::*; use crate::prelude::*;
/// Resolve a file id by its import path. /// Resolves a file id by its import path.
pub fn resolve_id_by_path( pub fn resolve_id_by_path(
world: &dyn World, world: &dyn World,
current: TypstFileId, current: TypstFileId,
@ -10,14 +10,14 @@ pub fn resolve_id_by_path(
) -> Option<TypstFileId> { ) -> Option<TypstFileId> {
if import_path.starts_with('@') { if import_path.starts_with('@') {
let spec = import_path.parse::<PackageSpec>().ok()?; let spec = import_path.parse::<PackageSpec>().ok()?;
// Evaluate the manifest. // Evaluates the manifest.
let manifest_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml")); let manifest_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
let bytes = world.file(manifest_id).ok()?; let bytes = world.file(manifest_id).ok()?;
let string = std::str::from_utf8(&bytes).map_err(FileError::from).ok()?; let string = std::str::from_utf8(&bytes).map_err(FileError::from).ok()?;
let manifest: PackageManifest = toml::from_str(string).ok()?; let manifest: PackageManifest = toml::from_str(string).ok()?;
manifest.validate(&spec).ok()?; manifest.validate(&spec).ok()?;
// Evaluate the entry point. // Evaluates the entry point.
return Some(manifest_id.join(&manifest.package.entrypoint)); return Some(manifest_id.join(&manifest.package.entrypoint));
} }
@ -31,7 +31,7 @@ pub fn resolve_id_by_path(
Some(TypstFileId::new(current.package().cloned(), vpath)) Some(TypstFileId::new(current.package().cloned(), vpath))
} }
/// Find a source instance by its import node. /// Finds a source instance by its import node.
pub fn find_source_by_expr( pub fn find_source_by_expr(
world: &dyn World, world: &dyn World,
current: TypstFileId, current: TypstFileId,
@ -46,7 +46,7 @@ pub fn find_source_by_expr(
} }
} }
/// Casts node to a single include expression. /// Casts a node to a single include expression.
pub fn cast_include_expr<'a>(name: &str, node: ast::Expr<'a>) -> Option<ast::Expr<'a>> { pub fn cast_include_expr<'a>(name: &str, node: ast::Expr<'a>) -> Option<ast::Expr<'a>> {
match node { match node {
ast::Expr::Include(inc) => Some(inc.source()), ast::Expr::Include(inc) => Some(inc.source()),

View file

@ -1,7 +1,7 @@
//! Convenient utilities to match syntax structures of code. //! Convenient utilities to match syntax structures of code.
//! - Iterators/Finders to traverse nodes. //! - Iterators/Finders to traverse nodes.
//! - Predicates to check nodes' properties. //! - Predicates to check nodes' properties.
//! - Classifiers to check nodes' syntax. //! - Classifiers to check nodes' syntaxes.
//! //!
//! ## Classifiers of syntax structures //! ## Classifiers of syntax structures
//! //!
@ -223,7 +223,7 @@ pub fn previous_decls<T>(
_ => {} _ => {}
} }
// import it self // import itself
if let Some(new_name) = import.new_name() { if let Some(new_name) = import.new_name() {
if let Some(t) = recv(PreviousDecl::Ident(new_name)) { if let Some(t) = recv(PreviousDecl::Ident(new_name)) {
return Some(t); return Some(t);
@ -284,7 +284,7 @@ pub fn previous_decls<T>(
}) })
} }
/// Whether the node can be recognized as a mark. /// Checks if the node can be recognized as a mark.
pub fn is_mark(sk: SyntaxKind) -> bool { pub fn is_mark(sk: SyntaxKind) -> bool {
use SyntaxKind::*; use SyntaxKind::*;
#[allow(clippy::match_like_matches_macro)] #[allow(clippy::match_like_matches_macro)]
@ -297,7 +297,7 @@ pub fn is_mark(sk: SyntaxKind) -> bool {
} }
} }
/// Whether the node can be recognized as an identifier. /// Checks if the node can be recognized as an identifier.
pub fn is_ident_like(node: &SyntaxNode) -> bool { pub fn is_ident_like(node: &SyntaxNode) -> bool {
fn can_be_ident(node: &SyntaxNode) -> bool { fn can_be_ident(node: &SyntaxNode) -> bool {
typst::syntax::is_ident(node.text()) typst::syntax::is_ident(node.text())
@ -328,7 +328,8 @@ pub enum InterpretMode {
Math, Math,
} }
/// Determine the interpretation mode at the given position (context-sensitive). /// Determines the interpretation mode at the given position
/// (context-sensitive).
pub fn interpret_mode_at(mut leaf: Option<&LinkedNode>) -> InterpretMode { pub fn interpret_mode_at(mut leaf: Option<&LinkedNode>) -> InterpretMode {
loop { loop {
crate::log_debug_ct!("leaf for mode: {leaf:?}"); crate::log_debug_ct!("leaf for mode: {leaf:?}");
@ -351,7 +352,7 @@ pub fn interpret_mode_at(mut leaf: Option<&LinkedNode>) -> InterpretMode {
} }
} }
/// Determine the interpretation mode at the given kind (context-free). /// Determines the interpretation mode at the given kind (context-free).
pub(crate) fn interpret_mode_at_kind(kind: SyntaxKind) -> Option<InterpretMode> { pub(crate) fn interpret_mode_at_kind(kind: SyntaxKind) -> Option<InterpretMode> {
use SyntaxKind::*; use SyntaxKind::*;
Some(match kind { Some(match kind {
@ -702,6 +703,7 @@ impl<'a> SyntaxClass<'a> {
} }
} }
/// Checks if the syntax class contains an error node.
pub fn contains_error(&self) -> bool { pub fn contains_error(&self) -> bool {
use SyntaxClass::*; use SyntaxClass::*;
match self { match self {
@ -860,15 +862,15 @@ pub fn classify_syntax(node: LinkedNode<'_>, cursor: usize) -> Option<SyntaxClas
} }
} }
// Move to the first ancestor that is an expression. // Moves to the first ancestor that is an expression.
let ancestor = first_ancestor_expr(node)?; let ancestor = first_ancestor_expr(node)?;
crate::log_debug_ct!("first_ancestor_expr: {ancestor:?}"); crate::log_debug_ct!("first_ancestor_expr: {ancestor:?}");
// Unwrap all parentheses to get the actual expression. // Unwraps all parentheses to get the actual expression.
let adjusted = adjust_expr(ancestor)?; let adjusted = adjust_expr(ancestor)?;
crate::log_debug_ct!("adjust_expr: {adjusted:?}"); crate::log_debug_ct!("adjust_expr: {adjusted:?}");
// Identify convenient expression kinds. // Identifies convenient expression kinds.
let expr = adjusted.cast::<ast::Expr>()?; let expr = adjusted.cast::<ast::Expr>()?;
Some(match expr { Some(match expr {
ast::Expr::Label(..) => SyntaxClass::label(adjusted), ast::Expr::Label(..) => SyntaxClass::label(adjusted),
@ -901,7 +903,7 @@ pub fn classify_syntax(node: LinkedNode<'_>, cursor: usize) -> Option<SyntaxClas
}) })
} }
/// Whether the node might be in code trivia. This is a bit internal so please /// Checks if the node might be in code trivia. This is a bit internal so please
/// check the caller to understand it. /// check the caller to understand it.
fn possible_in_code_trivia(kind: SyntaxKind) -> bool { fn possible_in_code_trivia(kind: SyntaxKind) -> bool {
!matches!( !matches!(
@ -937,7 +939,7 @@ impl ArgClass<'_> {
} }
} }
// todo: whether we can merge `SurroundingSyntax` and `SyntaxContext`? // todo: check if we can merge `SurroundingSyntax` and `SyntaxContext`?
/// Classes of syntax context (outer syntax) that can be operated on by IDE /// Classes of syntax context (outer syntax) that can be operated on by IDE
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash, strum::EnumIter)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash, strum::EnumIter)]
pub enum SurroundingSyntax { pub enum SurroundingSyntax {
@ -957,7 +959,7 @@ pub enum SurroundingSyntax {
ParamList, ParamList,
} }
/// Determines the surrounding syntax of the node at the position. /// Determines the surrounding syntax of the node at the given position.
pub fn surrounding_syntax(node: &LinkedNode) -> SurroundingSyntax { pub fn surrounding_syntax(node: &LinkedNode) -> SurroundingSyntax {
check_previous_syntax(node) check_previous_syntax(node)
.or_else(|| check_surrounding_syntax(node)) .or_else(|| check_surrounding_syntax(node))
@ -1060,6 +1062,7 @@ fn check_surrounding_syntax(mut leaf: &LinkedNode) -> Option<SurroundingSyntax>
None None
} }
/// Checks the previous syntax of the node.
fn check_previous_syntax(leaf: &LinkedNode) -> Option<SurroundingSyntax> { fn check_previous_syntax(leaf: &LinkedNode) -> Option<SurroundingSyntax> {
let mut leaf = leaf.clone(); let mut leaf = leaf.clone();
if leaf.kind().is_trivia() { if leaf.kind().is_trivia() {
@ -1085,6 +1088,7 @@ fn check_previous_syntax(leaf: &LinkedNode) -> Option<SurroundingSyntax> {
None None
} }
/// Checks if the node is enclosed by the given span.
fn enclosed_by(parent: &LinkedNode, s: Option<Span>, leaf: &LinkedNode) -> bool { fn enclosed_by(parent: &LinkedNode, s: Option<Span>, leaf: &LinkedNode) -> bool {
s.and_then(|s| parent.find(s)?.find(leaf.span())).is_some() s.and_then(|s| parent.find(s)?.find(leaf.span())).is_some()
} }
@ -1195,8 +1199,8 @@ enum ArgSourceKind {
Dict, Dict,
} }
/// Classifies node's context (outer syntax) by outer node that can be operated /// Classifies the context (outer syntax) of the node by the outer node that
/// on by IDE functionality. /// can be operated on by IDE functionality.
pub fn classify_context_outer<'a>( pub fn classify_context_outer<'a>(
outer: LinkedNode<'a>, outer: LinkedNode<'a>,
node: LinkedNode<'a>, node: LinkedNode<'a>,
@ -1231,8 +1235,8 @@ pub fn classify_context_outer<'a>(
} }
} }
/// Classifies node's context (outer syntax) that can be operated on by IDE /// Classifies the context (outer syntax) of the node that can be operated on
/// functionality. /// by IDE functionality.
pub fn classify_context(node: LinkedNode<'_>, cursor: Option<usize>) -> Option<SyntaxContext<'_>> { pub fn classify_context(node: LinkedNode<'_>, cursor: Option<usize>) -> Option<SyntaxContext<'_>> {
let mut node = node; let mut node = node;
if node.kind().is_trivia() && node.parent_kind().is_some_and(possible_in_code_trivia) { if node.kind().is_trivia() && node.parent_kind().is_some_and(possible_in_code_trivia) {
@ -1335,6 +1339,7 @@ pub fn classify_context(node: LinkedNode<'_>, cursor: Option<usize>) -> Option<S
} }
} }
/// Classifies the context of the callee node.
fn callee_context<'a>(callee: LinkedNode<'a>, node: LinkedNode<'a>) -> Option<SyntaxContext<'a>> { fn callee_context<'a>(callee: LinkedNode<'a>, node: LinkedNode<'a>) -> Option<SyntaxContext<'a>> {
let parent = callee.parent()?; let parent = callee.parent()?;
let args = match parent.cast::<ast::Expr>() { let args = match parent.cast::<ast::Expr>() {
@ -1370,6 +1375,7 @@ fn callee_context<'a>(callee: LinkedNode<'a>, node: LinkedNode<'a>) -> Option<Sy
}) })
} }
/// Classifies the context of the argument node.
fn arg_context<'a>( fn arg_context<'a>(
args_node: LinkedNode<'a>, args_node: LinkedNode<'a>,
mut node: LinkedNode<'a>, mut node: LinkedNode<'a>,
@ -1455,7 +1461,7 @@ fn arg_context<'a>(
} }
} }
/// The cursor is on an invalid position. /// The cursor is on an invalid position for completion.
pub enum BadCompletionCursor { pub enum BadCompletionCursor {
/// The cursor is outside of the argument list. /// The cursor is outside of the argument list.
ArgListPos, ArgListPos,

View file

@ -3,20 +3,24 @@ use core::fmt;
use super::def::*; use super::def::*;
use crate::ty::{Interned, Ty}; use crate::ty::{Interned, Ty};
/// Prints an expression to a writer.
pub(in crate::syntax) struct ExprPrinter<'a, T: fmt::Write> { pub(in crate::syntax) struct ExprPrinter<'a, T: fmt::Write> {
f: &'a mut T, f: &'a mut T,
indent: usize, indent: usize,
} }
impl<'a, T: fmt::Write> ExprPrinter<'a, T> { impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
/// Creates a new expression printer.
pub fn new(f: &'a mut T) -> Self { pub fn new(f: &'a mut T) -> Self {
Self { f, indent: 0 } Self { f, indent: 0 }
} }
/// Writes a declaration.
pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result { pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result {
write!(self.f, "{decl:?}") write!(self.f, "{decl:?}")
} }
/// Writes an expression.
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result { pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
match expr { match expr {
Expr::Block(exprs) => self.write_seq(exprs), Expr::Block(exprs) => self.write_seq(exprs),
@ -47,10 +51,12 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
} }
} }
/// Writes an indentation.
fn write_indent(&mut self) -> fmt::Result { fn write_indent(&mut self) -> fmt::Result {
write!(self.f, "{:indent$}", "", indent = self.indent) write!(self.f, "{:indent$}", "", indent = self.indent)
} }
/// Writes a sequence of expressions.
fn write_seq(&mut self, exprs: &Interned<Vec<Expr>>) -> fmt::Result { fn write_seq(&mut self, exprs: &Interned<Vec<Expr>>) -> fmt::Result {
writeln!(self.f, "[")?; writeln!(self.f, "[")?;
self.indent += 1; self.indent += 1;
@ -64,6 +70,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, "]") write!(self.f, "]")
} }
/// Writes an array.
fn write_array(&mut self, elems: &[ArgExpr]) -> fmt::Result { fn write_array(&mut self, elems: &[ArgExpr]) -> fmt::Result {
writeln!(self.f, "(")?; writeln!(self.f, "(")?;
self.indent += 1; self.indent += 1;
@ -77,6 +84,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a dictionary.
fn write_dict(&mut self, elems: &[ArgExpr]) -> fmt::Result { fn write_dict(&mut self, elems: &[ArgExpr]) -> fmt::Result {
writeln!(self.f, "(:")?; writeln!(self.f, "(:")?;
self.indent += 1; self.indent += 1;
@ -90,6 +98,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a list of arguments.
fn write_args(&mut self, args: &[ArgExpr]) -> fmt::Result { fn write_args(&mut self, args: &[ArgExpr]) -> fmt::Result {
writeln!(self.f, "(")?; writeln!(self.f, "(")?;
for arg in args.iter() { for arg in args.iter() {
@ -101,6 +110,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes an argument.
fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result { fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result {
match arg { match arg {
ArgExpr::Pos(pos) => self.write_expr(pos), ArgExpr::Pos(pos) => self.write_expr(pos),
@ -122,6 +132,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
} }
} }
/// Writes a pattern.
pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result { pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result {
match pat { match pat {
Pattern::Expr(expr) => self.write_expr(expr), Pattern::Expr(expr) => self.write_expr(expr),
@ -130,6 +141,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
} }
} }
/// Writes a pattern signature.
fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result { fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result {
self.f.write_str("pat(\n")?; self.f.write_str("pat(\n")?;
self.indent += 1; self.indent += 1;
@ -161,6 +173,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes an element.
fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result { fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result {
self.f.write_str("elem(\n")?; self.f.write_str("elem(\n")?;
self.indent += 1; self.indent += 1;
@ -174,12 +187,14 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a unary expression.
fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result { fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result {
write!(self.f, "un({:?})(", unary.op)?; write!(self.f, "un({:?})(", unary.op)?;
self.write_expr(&unary.lhs)?; self.write_expr(&unary.lhs)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a binary expression.
fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result { fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result {
let [lhs, rhs] = binary.operands(); let [lhs, rhs] = binary.operands();
write!(self.f, "bin({:?})(", binary.op)?; write!(self.f, "bin({:?})(", binary.op)?;
@ -189,6 +204,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a function application ex
fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result { fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result {
write!(self.f, "apply(")?; write!(self.f, "apply(")?;
self.write_expr(&apply.callee)?; self.write_expr(&apply.callee)?;
@ -197,6 +213,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a function.
fn write_func(&mut self, func: &Interned<FuncExpr>) -> fmt::Result { fn write_func(&mut self, func: &Interned<FuncExpr>) -> fmt::Result {
write!(self.f, "func[{:?}](", func.decl)?; write!(self.f, "func[{:?}](", func.decl)?;
self.write_pattern_sig(&func.params)?; self.write_pattern_sig(&func.params)?;
@ -205,6 +222,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a let expression.
fn write_let(&mut self, let_expr: &Interned<LetExpr>) -> fmt::Result { fn write_let(&mut self, let_expr: &Interned<LetExpr>) -> fmt::Result {
write!(self.f, "let(")?; write!(self.f, "let(")?;
self.write_pattern(&let_expr.pattern)?; self.write_pattern(&let_expr.pattern)?;
@ -215,6 +233,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a show expression.
fn write_show(&mut self, show: &Interned<ShowExpr>) -> fmt::Result { fn write_show(&mut self, show: &Interned<ShowExpr>) -> fmt::Result {
write!(self.f, "show(")?; write!(self.f, "show(")?;
if let Some(selector) = &show.selector { if let Some(selector) = &show.selector {
@ -225,6 +244,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a set expression.
fn write_set(&mut self, set: &Interned<SetExpr>) -> fmt::Result { fn write_set(&mut self, set: &Interned<SetExpr>) -> fmt::Result {
write!(self.f, "set(")?; write!(self.f, "set(")?;
self.write_expr(&set.target)?; self.write_expr(&set.target)?;
@ -237,6 +257,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a reference expression.
fn write_ref(&mut self, reference: &Interned<RefExpr>) -> fmt::Result { fn write_ref(&mut self, reference: &Interned<RefExpr>) -> fmt::Result {
write!(self.f, "ref({:?}", reference.decl)?; write!(self.f, "ref({:?}", reference.decl)?;
if let Some(step) = &reference.step { if let Some(step) = &reference.step {
@ -253,6 +274,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a content reference expression.
fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result { fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result {
write!(self.f, "content_ref({:?}", content_ref.ident)?; write!(self.f, "content_ref({:?}", content_ref.ident)?;
if let Some(of) = &content_ref.of { if let Some(of) = &content_ref.of {
@ -265,6 +287,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a select expression.
fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result { fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result {
write!(self.f, "(")?; write!(self.f, "(")?;
self.write_expr(&sel.lhs)?; self.write_expr(&sel.lhs)?;
@ -272,24 +295,28 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.write_decl(&sel.key) self.write_decl(&sel.key)
} }
/// Writes an import expression.
fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result { fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result {
self.f.write_str("import(")?; self.f.write_str("import(")?;
self.write_decl(&import.decl.decl)?; self.write_decl(&import.decl.decl)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes an include expression.
fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result { fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result {
self.f.write_str("include(")?; self.f.write_str("include(")?;
self.write_expr(&include.source)?; self.write_expr(&include.source)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a contextual expression.
fn write_contextual(&mut self, contextual: &Interned<Expr>) -> fmt::Result { fn write_contextual(&mut self, contextual: &Interned<Expr>) -> fmt::Result {
self.f.write_str("contextual(")?; self.f.write_str("contextual(")?;
self.write_expr(contextual)?; self.write_expr(contextual)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a conditional expression.
fn write_conditional(&mut self, if_expr: &Interned<IfExpr>) -> fmt::Result { fn write_conditional(&mut self, if_expr: &Interned<IfExpr>) -> fmt::Result {
self.f.write_str("if(")?; self.f.write_str("if(")?;
self.write_expr(&if_expr.cond)?; self.write_expr(&if_expr.cond)?;
@ -300,6 +327,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a while loop expression.
fn write_while_loop(&mut self, while_expr: &Interned<WhileExpr>) -> fmt::Result { fn write_while_loop(&mut self, while_expr: &Interned<WhileExpr>) -> fmt::Result {
self.f.write_str("while(")?; self.f.write_str("while(")?;
self.write_expr(&while_expr.cond)?; self.write_expr(&while_expr.cond)?;
@ -308,6 +336,7 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a for loop expression.
fn write_for_loop(&mut self, for_expr: &Interned<ForExpr>) -> fmt::Result { fn write_for_loop(&mut self, for_expr: &Interned<ForExpr>) -> fmt::Result {
self.f.write_str("for(")?; self.f.write_str("for(")?;
self.write_pattern(&for_expr.pattern)?; self.write_pattern(&for_expr.pattern)?;
@ -318,27 +347,32 @@ impl<'a, T: fmt::Write> ExprPrinter<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a type.
fn write_type(&mut self, ty: &Ty) -> fmt::Result { fn write_type(&mut self, ty: &Ty) -> fmt::Result {
let formatted = ty.describe(); let formatted = ty.describe();
let formatted = formatted.as_deref().unwrap_or("any"); let formatted = formatted.as_deref().unwrap_or("any");
self.f.write_str(formatted) self.f.write_str(formatted)
} }
/// Writes a star expression.
fn write_star(&mut self) -> fmt::Result { fn write_star(&mut self) -> fmt::Result {
self.f.write_str("*") self.f.write_str("*")
} }
} }
/// Describes an expression to a writer.
pub(in crate::syntax) struct ExprDescriber<'a, T: fmt::Write> { pub(in crate::syntax) struct ExprDescriber<'a, T: fmt::Write> {
f: &'a mut T, f: &'a mut T,
indent: usize, indent: usize,
} }
impl<'a, T: fmt::Write> ExprDescriber<'a, T> { impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
/// Creates a new expression describer.
pub fn new(f: &'a mut T) -> Self { pub fn new(f: &'a mut T) -> Self {
Self { f, indent: 0 } Self { f, indent: 0 }
} }
/// Writes a declaration.
pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result { pub fn write_decl(&mut self, decl: &Decl) -> fmt::Result {
use DefKind::*; use DefKind::*;
let shorter = matches!(decl.kind(), Function | Variable | Module); let shorter = matches!(decl.kind(), Function | Variable | Module);
@ -349,6 +383,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, "{decl:?}") write!(self.f, "{decl:?}")
} }
/// Writes an expression.
pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result { pub fn write_expr(&mut self, expr: &Expr) -> fmt::Result {
match expr { match expr {
Expr::Block(..) => self.f.write_str("Expr(..)"), Expr::Block(..) => self.f.write_str("Expr(..)"),
@ -377,10 +412,12 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
} }
} }
/// Writes an indentation.
fn write_indent(&mut self) -> fmt::Result { fn write_indent(&mut self) -> fmt::Result {
write!(self.f, "{:indent$}", "", indent = self.indent) write!(self.f, "{:indent$}", "", indent = self.indent)
} }
/// Writes an array.
fn write_array(&mut self, elems: &[ArgExpr]) -> fmt::Result { fn write_array(&mut self, elems: &[ArgExpr]) -> fmt::Result {
if elems.len() <= 1 { if elems.len() <= 1 {
self.f.write_char('(')?; self.f.write_char('(')?;
@ -403,6 +440,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a dictionary.
fn write_dict(&mut self, elems: &[ArgExpr]) -> fmt::Result { fn write_dict(&mut self, elems: &[ArgExpr]) -> fmt::Result {
if elems.len() <= 1 { if elems.len() <= 1 {
self.f.write_char('(')?; self.f.write_char('(')?;
@ -426,6 +464,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a list of arguments.
fn write_args(&mut self, args: &[ArgExpr]) -> fmt::Result { fn write_args(&mut self, args: &[ArgExpr]) -> fmt::Result {
writeln!(self.f, "(")?; writeln!(self.f, "(")?;
for arg in args.iter() { for arg in args.iter() {
@ -437,6 +476,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes an argument.
fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result { fn write_arg(&mut self, arg: &ArgExpr) -> fmt::Result {
match arg { match arg {
ArgExpr::Pos(pos) => self.write_expr(pos), ArgExpr::Pos(pos) => self.write_expr(pos),
@ -459,6 +499,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
} }
} }
/// Writes a pattern.
pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result { pub fn write_pattern(&mut self, pat: &Pattern) -> fmt::Result {
match pat { match pat {
Pattern::Expr(expr) => self.write_expr(expr), Pattern::Expr(expr) => self.write_expr(expr),
@ -467,6 +508,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
} }
} }
/// Writes a pattern signature.
fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result { fn write_pattern_sig(&mut self, sig: &PatternSig) -> fmt::Result {
self.f.write_str("pat(\n")?; self.f.write_str("pat(\n")?;
self.indent += 1; self.indent += 1;
@ -498,10 +540,12 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes an element.
fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result { fn write_element(&mut self, elem: &Interned<ElementExpr>) -> fmt::Result {
write!(self.f, "{:?}", elem.elem.name()) write!(self.f, "{:?}", elem.elem.name())
} }
/// Writes a unary expression.
fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result { fn write_unary(&mut self, unary: &Interned<UnExpr>) -> fmt::Result {
use UnaryOp::*; use UnaryOp::*;
match unary.op { match unary.op {
@ -547,6 +591,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
} }
} }
/// Writes a binary expression.
fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result { fn write_binary(&mut self, binary: &Interned<BinExpr>) -> fmt::Result {
let [lhs, rhs] = binary.operands(); let [lhs, rhs] = binary.operands();
self.write_expr(lhs)?; self.write_expr(lhs)?;
@ -554,6 +599,7 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
self.write_expr(rhs) self.write_expr(rhs)
} }
/// Writes a function application expression.
fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result { fn write_apply(&mut self, apply: &Interned<ApplyExpr>) -> fmt::Result {
self.write_expr(&apply.callee)?; self.write_expr(&apply.callee)?;
write!(self.f, "(")?; write!(self.f, "(")?;
@ -561,10 +607,12 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, ")") write!(self.f, ")")
} }
/// Writes a function expression.
fn write_func(&mut self, func: &Interned<FuncExpr>) -> fmt::Result { fn write_func(&mut self, func: &Interned<FuncExpr>) -> fmt::Result {
self.write_decl(&func.decl) self.write_decl(&func.decl)
} }
/// Writes a reference expression.
fn write_ref(&mut self, resolved: &Interned<RefExpr>) -> fmt::Result { fn write_ref(&mut self, resolved: &Interned<RefExpr>) -> fmt::Result {
if let Some(root) = &resolved.root { if let Some(root) = &resolved.root {
return self.write_expr(root); return self.write_expr(root);
@ -576,10 +624,12 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
write!(self.f, "undefined({:?})", resolved.decl) write!(self.f, "undefined({:?})", resolved.decl)
} }
/// Writes a content reference expression.
fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result { fn write_content_ref(&mut self, content_ref: &Interned<ContentRefExpr>) -> fmt::Result {
write!(self.f, "@{:?}", content_ref.ident) write!(self.f, "@{:?}", content_ref.ident)
} }
/// Writes a select expression.
fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result { fn write_select(&mut self, sel: &Interned<SelectExpr>) -> fmt::Result {
write!(self.f, "")?; write!(self.f, "")?;
self.write_expr(&sel.lhs)?; self.write_expr(&sel.lhs)?;
@ -587,18 +637,21 @@ impl<'a, T: fmt::Write> ExprDescriber<'a, T> {
self.write_decl(&sel.key) self.write_decl(&sel.key)
} }
/// Writes an import expression.
fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result { fn write_import(&mut self, import: &Interned<ImportExpr>) -> fmt::Result {
self.f.write_str("import(")?; self.f.write_str("import(")?;
self.write_decl(&import.decl.decl)?; self.write_decl(&import.decl.decl)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes an include expression.
fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result { fn write_include(&mut self, include: &Interned<IncludeExpr>) -> fmt::Result {
self.f.write_str("include(")?; self.f.write_str("include(")?;
self.write_expr(&include.source)?; self.write_expr(&include.source)?;
self.f.write_str(")") self.f.write_str(")")
} }
/// Writes a type.
fn write_type(&mut self, ty: &Ty) -> fmt::Result { fn write_type(&mut self, ty: &Ty) -> fmt::Result {
let formatted = ty.describe(); let formatted = ty.describe();
let formatted = formatted.as_deref().unwrap_or("any"); let formatted = formatted.as_deref().unwrap_or("any");

View file

@ -3,19 +3,22 @@ use std::sync::LazyLock;
use super::{Sig, SigChecker, SigSurfaceKind, TyCtx}; use super::{Sig, SigChecker, SigSurfaceKind, TyCtx};
use crate::ty::prelude::*; use crate::ty::prelude::*;
/// A trait for checking the application of a signature.
pub trait ApplyChecker: TyCtx { pub trait ApplyChecker: TyCtx {
/// Applies a signature to the given arguments.
fn apply(&mut self, sig: Sig, arguments: &Interned<ArgsTy>, pol: bool); fn apply(&mut self, sig: Sig, arguments: &Interned<ArgsTy>, pol: bool);
} }
/// The empty arguments type.
static EMPTY_ARGS: LazyLock<Interned<ArgsTy>> = LazyLock::new(|| ArgsTy::default().into()); static EMPTY_ARGS: LazyLock<Interned<ArgsTy>> = LazyLock::new(|| ArgsTy::default().into());
impl Ty { impl Ty {
/// Call the given type with the given arguments. /// Calls the given type with the given arguments.
pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, c: &mut impl ApplyChecker) { pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, c: &mut impl ApplyChecker) {
ApplySigChecker(c, args).ty(self, SigSurfaceKind::Call, pol); ApplySigChecker(c, args).ty(self, SigSurfaceKind::Call, pol);
} }
/// Get the tuple element type of the given type. /// Gets the tuple element type of the given type.
pub fn tuple_element_of(&self, pol: bool, c: &mut impl ApplyChecker) { pub fn tuple_element_of(&self, pol: bool, c: &mut impl ApplyChecker) {
ApplySigChecker(c, &EMPTY_ARGS).ty(self, SigSurfaceKind::Array, pol); ApplySigChecker(c, &EMPTY_ARGS).ty(self, SigSurfaceKind::Array, pol);
} }
@ -26,23 +29,26 @@ impl Ty {
} }
} }
/// A checker for applying a signature to a type.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(0)] #[bind(0)]
pub struct ApplySigChecker<'a, T: ApplyChecker>(&'a mut T, &'a Interned<ArgsTy>); pub struct ApplySigChecker<'a, T: ApplyChecker>(&'a mut T, &'a Interned<ArgsTy>);
impl<T: ApplyChecker> ApplySigChecker<'_, T> { impl<T: ApplyChecker> ApplySigChecker<'_, T> {
/// Applies a signature to a type.
fn ty(&mut self, ty: &Ty, surface: SigSurfaceKind, pol: bool) { fn ty(&mut self, ty: &Ty, surface: SigSurfaceKind, pol: bool) {
ty.sig_surface(pol, surface, self) ty.sig_surface(pol, surface, self)
} }
} }
impl<T: ApplyChecker> SigChecker for ApplySigChecker<'_, T> { impl<T: ApplyChecker> SigChecker for ApplySigChecker<'_, T> {
/// Checks a signature against a context.
fn check(&mut self, cano_sig: Sig, ctx: &mut super::SigCheckContext, pol: bool) -> Option<()> { fn check(&mut self, cano_sig: Sig, ctx: &mut super::SigCheckContext, pol: bool) -> Option<()> {
let (cano_sig, is_partialize) = match cano_sig { let (cano_sig, is_partialize) = match cano_sig {
Sig::Partialize(sig) => (*sig, true), Sig::Partialize(sig) => (*sig, true),
sig => (sig, false), sig => (sig, false),
}; };
// Bind the arguments to the canonical signature. // Binds the arguments to the canonical signature.
let partial_sig = if ctx.args.is_empty() { let partial_sig = if ctx.args.is_empty() {
cano_sig cano_sig
} else { } else {

View file

@ -4,13 +4,17 @@ use typst::foundations::{self, Func};
use crate::ty::prelude::*; use crate::ty::prelude::*;
/// A trait for checking the bounds of a type.
pub trait BoundChecker: Sized + TyCtx { pub trait BoundChecker: Sized + TyCtx {
/// Collects the bounds of a type.
fn collect(&mut self, ty: &Ty, pol: bool); fn collect(&mut self, ty: &Ty, pol: bool);
/// Checks the bounds of a variable.
fn check_var(&mut self, u: &Interned<TypeVar>, pol: bool) { fn check_var(&mut self, u: &Interned<TypeVar>, pol: bool) {
self.check_var_rec(u, pol); self.check_var_rec(u, pol);
} }
/// Checks the bounds of a variable recursively.
fn check_var_rec(&mut self, u: &Interned<TypeVar>, pol: bool) { fn check_var_rec(&mut self, u: &Interned<TypeVar>, pol: bool) {
let Some(w) = self.global_bounds(u, pol) else { let Some(w) = self.global_bounds(u, pol) else {
return; return;
@ -21,11 +25,13 @@ pub trait BoundChecker: Sized + TyCtx {
} }
} }
/// A predicate for checking the bounds of a type.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(0)] #[bind(0)]
pub struct BoundPred<'a, T: TyCtx, F>(pub &'a T, pub F); pub struct BoundPred<'a, T: TyCtx, F>(pub &'a T, pub F);
impl<'a, T: TyCtx, F> BoundPred<'a, T, F> { impl<'a, T: TyCtx, F> BoundPred<'a, T, F> {
/// Creates a new bound predicate.
pub fn new(t: &'a T, f: F) -> Self { pub fn new(t: &'a T, f: F) -> Self {
Self(t, f) Self(t, f)
} }
@ -40,15 +46,19 @@ where
} }
} }
/// A source of documentation.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum DocSource { pub enum DocSource {
/// A variable source.
Var(Interned<TypeVar>), Var(Interned<TypeVar>),
/// An (value) instance source.
Ins(Interned<InsTy>), Ins(Interned<InsTy>),
/// A builtin type source.
Builtin(BuiltinTy), Builtin(BuiltinTy),
} }
impl DocSource { impl DocSource {
/// Regard doc source as function. /// Casts doc source to a function.
pub fn as_func(&self) -> Option<Func> { pub fn as_func(&self) -> Option<Func> {
match self { match self {
Self::Var(..) => None, Self::Var(..) => None,
@ -65,12 +75,12 @@ impl DocSource {
} }
impl Ty { impl Ty {
/// Check if the given type has bounds (is combinated). /// Checks if the given type has bounds (is combinated).
pub fn has_bounds(&self) -> bool { pub fn has_bounds(&self) -> bool {
matches!(self, Ty::Union(_) | Ty::Let(_) | Ty::Var(_)) matches!(self, Ty::Union(_) | Ty::Let(_) | Ty::Var(_))
} }
/// Convert type to doc source /// Converts a type to doc source.
pub fn as_source(&self) -> Option<DocSource> { pub fn as_source(&self) -> Option<DocSource> {
match self { match self {
Ty::Builtin(ty @ (BuiltinTy::Type(..) | BuiltinTy::Element(..))) => { Ty::Builtin(ty @ (BuiltinTy::Type(..) | BuiltinTy::Element(..))) => {
@ -86,7 +96,7 @@ impl Ty {
} }
} }
/// Get the sources of the given type. /// Gets the sources of the given type.
pub fn sources(&self) -> Vec<DocSource> { pub fn sources(&self) -> Vec<DocSource> {
let mut results = vec![]; let mut results = vec![];
fn collect(ty: &Ty, results: &mut Vec<DocSource>) { fn collect(ty: &Ty, results: &mut Vec<DocSource>) {
@ -135,21 +145,24 @@ impl Ty {
results results
} }
/// Profile the bounds of the given type. /// Profiles the bounds of the given type.
pub fn bounds(&self, pol: bool, checker: &mut impl BoundChecker) { pub fn bounds(&self, pol: bool, checker: &mut impl BoundChecker) {
BoundCheckContext.ty(self, pol, checker); BoundCheckContext.ty(self, pol, checker);
} }
} }
/// A context for checking the bounds of a type.
pub struct BoundCheckContext; pub struct BoundCheckContext;
impl BoundCheckContext { impl BoundCheckContext {
/// Checks the bounds of multiple types.
fn tys<'a>(&mut self, tys: impl Iterator<Item = &'a Ty>, pol: bool, c: &mut impl BoundChecker) { fn tys<'a>(&mut self, tys: impl Iterator<Item = &'a Ty>, pol: bool, c: &mut impl BoundChecker) {
for ty in tys { for ty in tys {
self.ty(ty, pol, c); self.ty(ty, pol, c);
} }
} }
/// Checks the bounds of a type.
fn ty(&mut self, ty: &Ty, pol: bool, checker: &mut impl BoundChecker) { fn ty(&mut self, ty: &Ty, pol: bool, checker: &mut impl BoundChecker) {
match ty { match ty {
Ty::Union(u) => { Ty::Union(u) => {

View file

@ -16,25 +16,44 @@ use typst::{
use crate::syntax::Decl; use crate::syntax::Decl;
use crate::ty::*; use crate::ty::*;
/// A kind of path recognized by the analyzer.
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, EnumIter)] #[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, EnumIter)]
pub enum PathPreference { pub enum PathKind {
Source { allow_package: bool }, /// A source path: `import "foo.typ"`.
Source {
/// Whether to allow package imports.
allow_package: bool,
},
/// A WASM path: `plugin("foo.wasm")`.
Wasm, Wasm,
/// A CSV path: `csv("foo.csv")`.
Csv, Csv,
/// An image path: `image("foo.png")`.
Image, Image,
/// A JSON path: `json("foo.json")`.
Json, Json,
/// A YAML path: `yaml("foo.yml")`.
Yaml, Yaml,
/// A XML path: `xml("foo.xml")`.
Xml, Xml,
/// A TOML path: `toml("foo.toml")`.
Toml, Toml,
/// A CSL path: `bibliography(csl: "foo.csl")`.
Csl, Csl,
/// A bibliography path: `bibliography("foo.bib")`.
Bibliography, Bibliography,
/// A raw theme path: `raw(theme: "foo.tmTheme")`.
RawTheme, RawTheme,
/// A raw syntaxes path: `raw(syntaxes: "foo.tmLanguage")`.
RawSyntax, RawSyntax,
/// All of the above kinds.
Special, Special,
/// Merely known as a path.
None, None,
} }
impl PathPreference { impl PathKind {
/// Matches the extension of the path by kind.
pub fn ext_matcher(&self) -> &'static RegexSet { pub fn ext_matcher(&self) -> &'static RegexSet {
type RegSet = LazyLock<RegexSet>; type RegSet = LazyLock<RegexSet>;
@ -81,34 +100,37 @@ impl PathPreference {
}); });
match self { match self {
PathPreference::Source { .. } => &SOURCE_REGSET, PathKind::Source { .. } => &SOURCE_REGSET,
PathPreference::Wasm => &WASM_REGSET, PathKind::Wasm => &WASM_REGSET,
PathPreference::Csv => &CSV_REGSET, PathKind::Csv => &CSV_REGSET,
PathPreference::Image => &IMAGE_REGSET, PathKind::Image => &IMAGE_REGSET,
PathPreference::Json => &JSON_REGSET, PathKind::Json => &JSON_REGSET,
PathPreference::Yaml => &YAML_REGSET, PathKind::Yaml => &YAML_REGSET,
PathPreference::Xml => &XML_REGSET, PathKind::Xml => &XML_REGSET,
PathPreference::Toml => &TOML_REGSET, PathKind::Toml => &TOML_REGSET,
PathPreference::Csl => &CSL_REGSET, PathKind::Csl => &CSL_REGSET,
PathPreference::Bibliography => &BIB_REGSET, PathKind::Bibliography => &BIB_REGSET,
PathPreference::RawTheme => &RAW_THEME_REGSET, PathKind::RawTheme => &RAW_THEME_REGSET,
PathPreference::RawSyntax => &RAW_SYNTAX_REGSET, PathKind::RawSyntax => &RAW_SYNTAX_REGSET,
PathPreference::Special => &ALL_SPECIAL_REGSET, PathKind::Special => &ALL_SPECIAL_REGSET,
PathPreference::None => &ALL_REGSET, PathKind::None => &ALL_REGSET,
} }
} }
/// Checks if the path matches the kind.
pub fn is_match(&self, path: &Path) -> bool { pub fn is_match(&self, path: &Path) -> bool {
let ext = path.extension().and_then(|ext| ext.to_str()); let ext = path.extension().and_then(|ext| ext.to_str());
ext.is_some_and(|ext| self.ext_matcher().is_match(ext)) ext.is_some_and(|ext| self.ext_matcher().is_match(ext))
} }
/// Gets the kind of the path by extension.
pub fn from_ext(path: &str) -> Option<Self> { pub fn from_ext(path: &str) -> Option<Self> {
PathPreference::iter().find(|preference| preference.is_match(std::path::Path::new(path))) PathKind::iter().find(|preference| preference.is_match(std::path::Path::new(path)))
} }
} }
impl Ty { impl Ty {
/// Converts a cast info to a type.
pub fn from_cast_info(ty: &CastInfo) -> Ty { pub fn from_cast_info(ty: &CastInfo) -> Ty {
match &ty { match &ty {
CastInfo::Any => Ty::Any, CastInfo::Any => Ty::Any,
@ -120,6 +142,7 @@ impl Ty {
} }
} }
/// Converts a parameter site to a type.
pub fn from_param_site(func: &Func, param: &ParamInfo) -> Ty { pub fn from_param_site(func: &Func, param: &ParamInfo) -> Ty {
use typst::foundations::func::Repr; use typst::foundations::func::Repr;
match func.inner() { match func.inner() {
@ -135,6 +158,7 @@ impl Ty {
Self::from_cast_info(&param.input) Self::from_cast_info(&param.input)
} }
/// Converts a return site to a type.
pub(crate) fn from_return_site(func: &Func, ty: &'_ CastInfo) -> Self { pub(crate) fn from_return_site(func: &Func, ty: &'_ CastInfo) -> Self {
use typst::foundations::func::Repr; use typst::foundations::func::Repr;
match func.inner() { match func.inner() {
@ -148,6 +172,7 @@ impl Ty {
} }
} }
/// An iterator over a union of cast infos.
struct UnionIter<'a>(Vec<std::slice::Iter<'a, CastInfo>>); struct UnionIter<'a>(Vec<std::slice::Iter<'a, CastInfo>>);
impl<'a> Iterator for UnionIter<'a> { impl<'a> Iterator for UnionIter<'a> {
@ -171,18 +196,21 @@ impl<'a> Iterator for UnionIter<'a> {
} }
// todo: we can write some proto files for builtin sigs // todo: we can write some proto files for builtin sigs
/// A builtin signature.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum BuiltinSig<'a> { pub enum BuiltinSig<'a> {
/// Map a function over a tuple. /// Maps a function over a tuple: `(a, b, c).map`
TupleMap(&'a Ty), TupleMap(&'a Ty),
/// Get element of a tuple. /// Gets element of a tuple: `(a, b, c).at`
TupleAt(&'a Ty), TupleAt(&'a Ty),
} }
/// A package identifier. /// A package identifier.
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct PackageId { pub struct PackageId {
/// The namespace of the package.
pub namespace: StrRef, pub namespace: StrRef,
/// The name of the package.
pub name: StrRef, pub name: StrRef,
} }
@ -206,52 +234,84 @@ impl TryFrom<FileId> for PackageId {
} }
} }
/// A builtin type.
#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] #[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub enum BuiltinTy { pub enum BuiltinTy {
/// A clause type.
Clause, Clause,
/// An undefined type.
Undef, Undef,
/// A space type: `[ ]`
Space, Space,
/// A none type: `none`
None, None,
/// A break type: `break`
Break, Break,
/// A continue type: `continue`
Continue, Continue,
/// An infer type: `any`
Infer, Infer,
/// A flow none type: `none`
FlowNone, FlowNone,
/// An auto type: `auto`
Auto, Auto,
/// Arguments: `arguments(a, b: c, ..d)`
Args, Args,
/// A color type: `rgb(r, g, b)`
Color, Color,
/// A text size type: `text.size`
TextSize, TextSize,
/// A text font type: `text.font`
TextFont, TextFont,
/// A text feature type: `text.feature`
TextFeature, TextFeature,
/// A text language type: `text.lang`
TextLang, TextLang,
/// A text region type: `text.region`
TextRegion, TextRegion,
/// A dir type: `left`
Label,
CiteLabel,
RefLabel,
Dir, Dir,
/// A label type: `<label>`
Label,
/// A cite label type: `#cite(<label>)`
CiteLabel,
/// A ref label type: `@label`
RefLabel,
/// A length type: `10pt`
Length, Length,
/// A float type: `1.0`
Float, Float,
/// A stroke type: `stroke(paint: red)`
Stroke, Stroke,
/// A margin type: `page(margin: 10pt)`
Margin, Margin,
/// An inset type: `box(inset: 10pt)`
Inset, Inset,
/// An outset type: `box(outset: 10pt)`
Outset, Outset,
/// A radius type: `box(radius: 10pt)`
Radius, Radius,
/// A tag type: `tag`
Tag(Box<(StrRef, Option<Interned<PackageId>>)>), Tag(Box<(StrRef, Option<Interned<PackageId>>)>),
/// A value having a specific type. /// The type of a value: `int` of `10`
Type(typst::foundations::Type), Type(typst::foundations::Type),
/// A value of some type. /// The type of a type: `type(int)`
TypeType(typst::foundations::Type), TypeType(typst::foundations::Type),
/// A content having a specific element type. /// The element type of a content value. For example, `#[text]` has
/// element type `text`.
///
/// If the element is not specified, the element type is `content`.
Content(Option<typst::foundations::Element>), Content(Option<typst::foundations::Element>),
/// A value of some element type. /// The type of an element: `text`
Element(typst::foundations::Element), Element(typst::foundations::Element),
/// A module type: `module(foo)`
Module(Interned<Decl>), Module(Interned<Decl>),
Path(PathPreference), /// A path type: `import "foo.typ"`
Path(PathKind),
} }
impl fmt::Debug for BuiltinTy { impl fmt::Debug for BuiltinTy {
@ -310,6 +370,7 @@ impl fmt::Debug for BuiltinTy {
} }
impl BuiltinTy { impl BuiltinTy {
/// Converts a value to a type.
pub fn from_value(builtin: &Value) -> Ty { pub fn from_value(builtin: &Value) -> Ty {
if let Value::Bool(v) = builtin { if let Value::Bool(v) = builtin {
return Ty::Boolean(Some(*v)); return Ty::Boolean(Some(*v));
@ -318,6 +379,7 @@ impl BuiltinTy {
Self::from_builtin(builtin.ty()) Self::from_builtin(builtin.ty())
} }
/// Converts a builtin type to a type.
pub fn from_builtin(builtin: Type) -> Ty { pub fn from_builtin(builtin: Type) -> Ty {
if builtin == Type::of::<AutoValue>() { if builtin == Type::of::<AutoValue>() {
return Ty::Builtin(BuiltinTy::Auto); return Ty::Builtin(BuiltinTy::Auto);
@ -344,6 +406,7 @@ impl BuiltinTy {
BuiltinTy::Type(builtin).literally() BuiltinTy::Type(builtin).literally()
} }
/// Describes the builtin type.
pub(crate) fn describe(&self) -> EcoString { pub(crate) fn describe(&self) -> EcoString {
let res = match self { let res = match self {
BuiltinTy::Clause => "any", BuiltinTy::Clause => "any",
@ -394,20 +457,20 @@ impl BuiltinTy {
} }
BuiltinTy::Module(m) => return eco_format!("module({})", m.name()), BuiltinTy::Module(m) => return eco_format!("module({})", m.name()),
BuiltinTy::Path(s) => match s { BuiltinTy::Path(s) => match s {
PathPreference::None => "[any]", PathKind::None => "[any]",
PathPreference::Special => "[any]", PathKind::Special => "[any]",
PathPreference::Source { .. } => "[source]", PathKind::Source { .. } => "[source]",
PathPreference::Wasm => "[wasm]", PathKind::Wasm => "[wasm]",
PathPreference::Csv => "[csv]", PathKind::Csv => "[csv]",
PathPreference::Image => "[image]", PathKind::Image => "[image]",
PathPreference::Json => "[json]", PathKind::Json => "[json]",
PathPreference::Yaml => "[yaml]", PathKind::Yaml => "[yaml]",
PathPreference::Xml => "[xml]", PathKind::Xml => "[xml]",
PathPreference::Toml => "[toml]", PathKind::Toml => "[toml]",
PathPreference::Csl => "[csl]", PathKind::Csl => "[csl]",
PathPreference::Bibliography => "[bib]", PathKind::Bibliography => "[bib]",
PathPreference::RawTheme => "[theme]", PathKind::RawTheme => "[theme]",
PathPreference::RawSyntax => "[syntax]", PathKind::RawSyntax => "[syntax]",
}, },
}; };
@ -417,10 +480,12 @@ impl BuiltinTy {
use BuiltinTy::*; use BuiltinTy::*;
/// Converts a flow builtin to a type.
fn literally(s: impl FlowBuiltinLiterally) -> Ty { fn literally(s: impl FlowBuiltinLiterally) -> Ty {
s.literally() s.literally()
} }
/// A trait for converting a flow builtin to a type.
trait FlowBuiltinLiterally { trait FlowBuiltinLiterally {
fn literally(self) -> Ty; fn literally(self) -> Ty;
} }
@ -443,7 +508,7 @@ impl FlowBuiltinLiterally for Ty {
} }
} }
// separate by middle /// A macro for converting a flow builtin to a type.
macro_rules! flow_builtin_union_inner { macro_rules! flow_builtin_union_inner {
($literal_kind:expr) => { ($literal_kind:expr) => {
literally($literal_kind) literally($literal_kind)
@ -455,6 +520,7 @@ macro_rules! flow_builtin_union_inner {
}; };
} }
/// A macro for converting a flow builtin to a type.
macro_rules! flow_union { macro_rules! flow_union {
// the first one is string // the first one is string
($($b:tt)*) => { ($($b:tt)*) => {
@ -463,6 +529,7 @@ macro_rules! flow_union {
} }
/// A macro for converting a flow builtin to a type.
macro_rules! flow_record { macro_rules! flow_record {
($($name:expr => $ty:expr),* $(,)?) => { ($($name:expr => $ty:expr),* $(,)?) => {
RecordTy::new(vec![ RecordTy::new(vec![
@ -476,24 +543,25 @@ macro_rules! flow_record {
}; };
} }
/// Maps a function parameter to a type.
pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> { pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
// todo: remove path params which is compatible with 0.12.0 // todo: remove path params which is compatible with 0.12.0
match (func.name()?, param.name) { match (func.name()?, param.name) {
// todo: pdf.embed // todo: pdf.embed
("embed", "path") => Some(literally(Path(PathPreference::None))), ("embed", "path") => Some(literally(Path(PathKind::None))),
("cbor", "path" | "source") => Some(literally(Path(PathPreference::None))), ("cbor", "path" | "source") => Some(literally(Path(PathKind::None))),
("plugin", "source") => Some(literally(Path(PathPreference::Wasm))), ("plugin", "source") => Some(literally(Path(PathKind::Wasm))),
("csv", "path" | "source") => Some(literally(Path(PathPreference::Csv))), ("csv", "path" | "source") => Some(literally(Path(PathKind::Csv))),
("image", "path" | "source") => Some(literally(Path(PathPreference::Image))), ("image", "path" | "source") => Some(literally(Path(PathKind::Image))),
("read", "path" | "source") => Some(literally(Path(PathPreference::None))), ("read", "path" | "source") => Some(literally(Path(PathKind::None))),
("json", "path" | "source") => Some(literally(Path(PathPreference::Json))), ("json", "path" | "source") => Some(literally(Path(PathKind::Json))),
("yaml", "path" | "source") => Some(literally(Path(PathPreference::Yaml))), ("yaml", "path" | "source") => Some(literally(Path(PathKind::Yaml))),
("xml", "path" | "source") => Some(literally(Path(PathPreference::Xml))), ("xml", "path" | "source") => Some(literally(Path(PathKind::Xml))),
("toml", "path" | "source") => Some(literally(Path(PathPreference::Toml))), ("toml", "path" | "source") => Some(literally(Path(PathKind::Toml))),
("raw", "theme") => Some(literally(Path(PathPreference::RawTheme))), ("raw", "theme") => Some(literally(Path(PathKind::RawTheme))),
("raw", "syntaxes") => Some(literally(Path(PathPreference::RawSyntax))), ("raw", "syntaxes") => Some(literally(Path(PathKind::RawSyntax))),
("bibliography" | "cite", "style") => Some(Ty::iter_union([ ("bibliography" | "cite", "style") => Some(Ty::iter_union([
literally(Path(PathPreference::Csl)), literally(Path(PathKind::Csl)),
Ty::from_cast_info(&param.input), Ty::from_cast_info(&param.input),
])), ])),
("cite", "key") => Some(Ty::iter_union([literally(CiteLabel)])), ("cite", "key") => Some(Ty::iter_union([literally(CiteLabel)])),
@ -518,7 +586,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
} }
("bibliography", "path" | "sources") => { ("bibliography", "path" | "sources") => {
static BIB_PATH_TYPE: LazyLock<Ty> = LazyLock::new(|| { static BIB_PATH_TYPE: LazyLock<Ty> = LazyLock::new(|| {
let bib_path_ty = literally(Path(PathPreference::Bibliography)); let bib_path_ty = literally(Path(PathKind::Bibliography));
Ty::iter_union([bib_path_ty.clone(), Ty::Array(bib_path_ty.into())]) Ty::iter_union([bib_path_ty.clone(), Ty::Array(bib_path_ty.into())])
}); });
Some(BIB_PATH_TYPE.clone()) Some(BIB_PATH_TYPE.clone())
@ -615,6 +683,7 @@ pub(super) fn param_mapping(func: &Func, param: &ParamInfo) -> Option<Ty> {
} }
} }
/// The record component of a stroke type.
static FLOW_STROKE_DASH_TYPE: LazyLock<Ty> = LazyLock::new(|| { static FLOW_STROKE_DASH_TYPE: LazyLock<Ty> = LazyLock::new(|| {
flow_union!( flow_union!(
"solid", "solid",
@ -635,6 +704,7 @@ static FLOW_STROKE_DASH_TYPE: LazyLock<Ty> = LazyLock::new(|| {
) )
}); });
/// The record component of a stroke type.
pub static FLOW_STROKE_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_STROKE_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"paint" => literally(Color), "paint" => literally(Color),
@ -646,6 +716,7 @@ pub static FLOW_STROKE_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
) )
}); });
/// The record component of a margin type.
pub static FLOW_MARGIN_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_MARGIN_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
@ -660,6 +731,7 @@ pub static FLOW_MARGIN_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
) )
}); });
/// The record component of an inset type.
pub static FLOW_INSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_INSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
@ -672,6 +744,7 @@ pub static FLOW_INSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
) )
}); });
/// The record component of an outset type.
pub static FLOW_OUTSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_OUTSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
@ -684,6 +757,7 @@ pub static FLOW_OUTSET_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
) )
}); });
/// The record component of a radius type.
pub static FLOW_RADIUS_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_RADIUS_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"top" => literally(Length), "top" => literally(Length),
@ -698,6 +772,7 @@ pub static FLOW_RADIUS_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
) )
}); });
/// The record component of a text font type.
pub static FLOW_TEXT_FONT_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| { pub static FLOW_TEXT_FONT_DICT: LazyLock<Interned<RecordTy>> = LazyLock::new(|| {
flow_record!( flow_record!(
"name" => literally(TextFont), "name" => literally(TextFont),
@ -733,15 +808,15 @@ mod tests {
#[test] #[test]
fn test_image_extension() { fn test_image_extension() {
let path = "test.png"; let path = "test.png";
let preference = super::PathPreference::from_ext(path).unwrap(); let preference = super::PathKind::from_ext(path).unwrap();
assert_eq!(preference, super::PathPreference::Image); assert_eq!(preference, super::PathKind::Image);
} }
#[test] #[test]
fn test_image_extension_uppercase() { fn test_image_extension_uppercase() {
let path = "TEST.PNG"; let path = "TEST.PNG";
let preference = super::PathPreference::from_ext(path).unwrap(); let preference = super::PathKind::from_ext(path).unwrap();
assert_eq!(preference, super::PathPreference::Image); assert_eq!(preference, super::PathKind::Image);
} }
// todo: map function // todo: map function

View file

@ -4,6 +4,7 @@ use crate::func_signature;
use super::*; use super::*;
/// Checks if a value is a plain value.
pub fn is_plain_value(value: &Value) -> bool { pub fn is_plain_value(value: &Value) -> bool {
matches!( matches!(
value, value,
@ -69,6 +70,7 @@ pub fn term_value(value: &Value) -> Ty {
} }
} }
/// Gets the type of a value recursively.
pub fn term_value_rec(value: &Value, s: Span) -> Ty { pub fn term_value_rec(value: &Value, s: Span) -> Ty {
match value { match value {
Value::Type(ty) => Ty::Builtin(BuiltinTy::TypeType(*ty)), Value::Type(ty) => Ty::Builtin(BuiltinTy::TypeType(*ty)),

View file

@ -28,12 +28,12 @@ pub(crate) use super::{TyCtx, TyCtxMut};
pub(crate) use crate::adt::interner::Interned; pub(crate) use crate::adt::interner::Interned;
pub use tinymist_derive::BindTyCtx; pub use tinymist_derive::BindTyCtx;
/// A reference to the interned type /// A reference to the interned type.
pub(crate) type TyRef = Interned<Ty>; pub(crate) type TyRef = Interned<Ty>;
/// A reference to the interned string /// A reference to the interned string.
pub(crate) type StrRef = Interned<str>; pub(crate) type StrRef = Interned<str>;
/// All possible types in tinymist /// All possible types in tinymist.
#[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum Ty { pub enum Ty {
// Simple Types // Simple Types
@ -54,37 +54,37 @@ pub enum Ty {
/// A union type, whose negation is intersection type. /// A union type, whose negation is intersection type.
/// `t := t1 | t2 | ... | tn, t^- := t1 & t2 & ... & tn` /// `t := t1 | t2 | ... | tn, t^- := t1 & t2 & ... & tn`
Union(Interned<Vec<Ty>>), Union(Interned<Vec<Ty>>),
/// A frozen type variable /// A frozen type variable.
/// `t :> t1 | t2 | ... | tn <: f1 & f2 & ... & fn` /// `t :> t1 | t2 | ... | tn <: f1 & f2 & ... & fn`
Let(Interned<TypeBounds>), Let(Interned<TypeBounds>),
/// An opening type variable owing bounds /// An opening type variable owing bounds.
Var(Interned<TypeVar>), Var(Interned<TypeVar>),
// Composite Types // Composite Types
/// A typst dictionary type /// A typst dictionary type.
Dict(Interned<RecordTy>), Dict(Interned<RecordTy>),
/// An array type /// An array type.
Array(TyRef), Array(TyRef),
/// A tuple type /// A tuple type.
/// Note: may contains spread types /// Note: may contains spread types.
Tuple(Interned<Vec<Ty>>), Tuple(Interned<Vec<Ty>>),
/// A function type /// A function type.
Func(Interned<SigTy>), Func(Interned<SigTy>),
/// An argument type /// An argument type.
Args(Interned<ArgsTy>), Args(Interned<ArgsTy>),
/// An argument type /// A pattern type.
Pattern(Interned<PatternTy>), Pattern(Interned<PatternTy>),
// Type operations // Type operations
/// A partially applied function type /// A partially applied function type.
With(Interned<SigWithTy>), With(Interned<SigWithTy>),
/// Select a field from a type /// Select a field from a type.
Select(Interned<SelectTy>), Select(Interned<SelectTy>),
/// A unary operation /// A unary operation.
Unary(Interned<TypeUnary>), Unary(Interned<TypeUnary>),
/// A binary operation /// A binary operation.
Binary(Interned<TypeBinary>), Binary(Interned<TypeBinary>),
/// A conditional type /// A conditional type.
If(Interned<IfTy>), If(Interned<IfTy>),
} }
@ -136,11 +136,12 @@ impl fmt::Debug for Ty {
} }
impl Ty { impl Ty {
/// Whether the type is a dictionary type /// Whether the type is a dictionary type.
pub fn is_dict(&self) -> bool { pub fn is_dict(&self) -> bool {
matches!(self, Ty::Dict(..)) matches!(self, Ty::Dict(..))
} }
/// Creates a union type from two types.
pub fn union(lhs: Option<Ty>, rhs: Option<Ty>) -> Option<Ty> { pub fn union(lhs: Option<Ty>, rhs: Option<Ty>) -> Option<Ty> {
Some(match (lhs, rhs) { Some(match (lhs, rhs) {
(Some(lhs), Some(rhs)) => Ty::from_types([lhs, rhs].into_iter()), (Some(lhs), Some(rhs)) => Ty::from_types([lhs, rhs].into_iter()),
@ -149,7 +150,7 @@ impl Ty {
}) })
} }
/// Create a union type from an iterator of types /// Creates a union type from an iterator of types.
pub fn from_types(iter: impl ExactSizeIterator<Item = Ty>) -> Self { pub fn from_types(iter: impl ExactSizeIterator<Item = Ty>) -> Self {
if iter.len() == 0 { if iter.len() == 0 {
Ty::Any Ty::Any
@ -161,20 +162,20 @@ impl Ty {
} }
} }
/// Create a union type from an iterator of types /// Creates a union type from an iterator of types.
pub fn iter_union(iter: impl IntoIterator<Item = Ty>) -> Self { pub fn iter_union(iter: impl IntoIterator<Item = Ty>) -> Self {
let mut v: Vec<Ty> = iter.into_iter().collect(); let mut v: Vec<Ty> = iter.into_iter().collect();
v.sort(); v.sort();
Ty::Union(Interned::new(v)) Ty::Union(Interned::new(v))
} }
/// Create an undefined type (which will emit an error) /// Creates an undefined type (which will emit an error).
/// A that type is annotated if the syntax structure causes an type error /// A that type is annotated if the syntax structure causes an type error.
pub const fn undef() -> Self { pub const fn undef() -> Self {
Ty::Builtin(BuiltinTy::Undef) Ty::Builtin(BuiltinTy::Undef)
} }
/// Get name of the type /// Gets the name of the type.
pub fn name(&self) -> Interned<str> { pub fn name(&self) -> Interned<str> {
match self { match self {
Ty::Var(v) => v.name.clone(), Ty::Var(v) => v.name.clone(),
@ -186,7 +187,7 @@ impl Ty {
} }
} }
/// Get span of the type /// Gets the span of the type.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
fn seq(u: &[Ty]) -> Option<Span> { fn seq(u: &[Ty]) -> Option<Span> {
u.iter().find_map(|ty| { u.iter().find_map(|ty| {
@ -208,7 +209,7 @@ impl Ty {
} }
} }
/// Get value repr of the type /// Gets the value repr of the type.
pub fn value(&self) -> Option<Value> { pub fn value(&self) -> Option<Value> {
match self { match self {
Ty::Value(v) => Some(v.val.clone()), Ty::Value(v) => Some(v.val.clone()),
@ -218,7 +219,7 @@ impl Ty {
} }
} }
/// Get as element type /// Gets the element type.
pub fn element(&self) -> Option<Element> { pub fn element(&self) -> Option<Element> {
match self { match self {
Ty::Value(ins_ty) => match &ins_ty.val { Ty::Value(ins_ty) => match &ins_ty.val {
@ -230,10 +231,12 @@ impl Ty {
} }
} }
/// Checks a type against a context.
pub fn satisfy<T: TyCtx>(&self, ctx: &T, f: impl FnMut(&Ty, bool)) { pub fn satisfy<T: TyCtx>(&self, ctx: &T, f: impl FnMut(&Ty, bool)) {
self.bounds(true, &mut BoundPred::new(ctx, f)); self.bounds(true, &mut BoundPred::new(ctx, f));
} }
/// Checks if the type is a content type.
pub fn is_content<T: TyCtx>(&self, ctx: &T) -> bool { pub fn is_content<T: TyCtx>(&self, ctx: &T) -> bool {
let mut res = false; let mut res = false;
self.satisfy(ctx, |ty: &Ty, _pol| { self.satisfy(ctx, |ty: &Ty, _pol| {
@ -249,6 +252,7 @@ impl Ty {
res res
} }
/// Checks if the type is a string type.
pub fn is_str<T: TyCtx>(&self, ctx: &T) -> bool { pub fn is_str<T: TyCtx>(&self, ctx: &T) -> bool {
let mut res = false; let mut res = false;
self.satisfy(ctx, |ty: &Ty, _pol| { self.satisfy(ctx, |ty: &Ty, _pol| {
@ -263,6 +267,7 @@ impl Ty {
res res
} }
/// Checks if the type is a type type.
pub fn is_type<T: TyCtx>(&self, ctx: &T) -> bool { pub fn is_type<T: TyCtx>(&self, ctx: &T) -> bool {
let mut res = false; let mut res = false;
self.satisfy(ctx, |ty: &Ty, _pol| { self.satisfy(ctx, |ty: &Ty, _pol| {
@ -279,25 +284,28 @@ impl Ty {
} }
} }
/// Checks if the type is a content builtin type.
fn is_content_builtin_type(ty: &Type) -> bool { fn is_content_builtin_type(ty: &Type) -> bool {
*ty == Type::of::<Content>() || *ty == Type::of::<typst::foundations::Symbol>() *ty == Type::of::<Content>() || *ty == Type::of::<typst::foundations::Symbol>()
} }
/// Checks if the type is a string builtin type.
fn is_str_builtin_type(ty: &Type) -> bool { fn is_str_builtin_type(ty: &Type) -> bool {
*ty == Type::of::<typst::foundations::Str>() *ty == Type::of::<typst::foundations::Str>()
} }
/// Checks if the type is a type builtin type.
fn is_type_builtin_type(ty: &Type) -> bool { fn is_type_builtin_type(ty: &Type) -> bool {
*ty == Type::of::<Type>() *ty == Type::of::<Type>()
} }
/// A function parameter type /// A function parameter type.
pub enum TypeSigParam<'a> { pub enum TypeSigParam<'a> {
/// A positional parameter /// A positional parameter: `a`
Pos(&'a Ty), Pos(&'a Ty),
/// A named parameter /// A named parameter: `b: c`
Named(&'a StrRef, &'a Ty), Named(&'a StrRef, &'a Ty),
/// A rest parameter (spread right) /// A rest parameter (spread right): `..d`
Rest(&'a Ty), Rest(&'a Ty),
} }
@ -306,20 +314,21 @@ impl fmt::Debug for TypeSigParam<'_> {
match self { match self {
TypeSigParam::Pos(ty) => write!(f, "{ty:?}"), TypeSigParam::Pos(ty) => write!(f, "{ty:?}"),
TypeSigParam::Named(name, ty) => write!(f, "{name:?}: {ty:?}"), TypeSigParam::Named(name, ty) => write!(f, "{name:?}: {ty:?}"),
// todo: the rest is not three dots
TypeSigParam::Rest(ty) => write!(f, "...: {ty:?}"), TypeSigParam::Rest(ty) => write!(f, "...: {ty:?}"),
} }
} }
} }
/// The syntax source (definition) of a type node /// The syntax source (definition) of a type node.
/// todo: whether we should store them in the type node /// todo: whether we should store them in the type node
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeSource { pub struct TypeSource {
/// A name node with span /// A name node with span.
pub name_node: SyntaxNode, pub name_node: SyntaxNode,
/// A lazy evaluated name /// A lazy evaluated name.
pub name_repr: OnceLock<StrRef>, pub name_repr: OnceLock<StrRef>,
/// Attached documentation /// The attached documentation.
pub doc: StrRef, pub doc: StrRef,
} }
@ -331,7 +340,7 @@ impl Hash for TypeSource {
} }
impl TypeSource { impl TypeSource {
/// Get name of the type node /// Gets the name of the type node.
pub fn name(&self) -> StrRef { pub fn name(&self) -> StrRef {
self.name_repr self.name_repr
.get_or_init(|| { .get_or_init(|| {
@ -346,15 +355,15 @@ impl TypeSource {
} }
} }
/// An ordered list of names /// An ordered list of names.
#[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct NameBone { pub struct NameBone {
/// The names in the bone /// The names in the bone.
pub names: Box<[StrRef]>, pub names: Box<[StrRef]>,
} }
impl NameBone { impl NameBone {
/// Create an empty bone /// Creates an empty bone.
pub fn empty() -> Interned<Self> { pub fn empty() -> Interned<Self> {
Interned::new(Self { Interned::new(Self {
names: Box::new([]), names: Box::new([]),
@ -363,14 +372,14 @@ impl NameBone {
} }
impl NameBone { impl NameBone {
/// Find the index of the name in the bone /// Finds the index of the name in the bone.
pub fn find(&self, name: &StrRef) -> Option<usize> { pub fn find(&self, name: &StrRef) -> Option<usize> {
self.names.binary_search_by(|probe| probe.cmp(name)).ok() self.names.binary_search_by(|probe| probe.cmp(name)).ok()
} }
} }
impl NameBone { impl NameBone {
/// Intersect the names of two bones /// Intersects the names of two bones.
pub fn intersect_enumerate<'a>( pub fn intersect_enumerate<'a>(
&'a self, &'a self,
rhs: &'a NameBone, rhs: &'a NameBone,
@ -406,7 +415,7 @@ impl NameBone {
} }
} }
/// The state of a type variable (bounds of some type in program) /// The state of a type variable (bounds of some type in program).
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct DynTypeBounds { pub struct DynTypeBounds {
/// The lower bounds /// The lower bounds
@ -425,7 +434,7 @@ impl From<TypeBounds> for DynTypeBounds {
} }
impl DynTypeBounds { impl DynTypeBounds {
/// Get frozen bounds /// Gets the frozen bounds.
pub fn freeze(&self) -> TypeBounds { pub fn freeze(&self) -> TypeBounds {
// sorted // sorted
let mut lbs: Vec<_> = self.lbs.iter().cloned().collect(); let mut lbs: Vec<_> = self.lbs.iter().cloned().collect();
@ -436,14 +445,14 @@ impl DynTypeBounds {
} }
} }
/// A frozen type variable (bounds of some type in program) /// A frozen type variable (bounds of some type in program).
/// `t :> t1 | ... | tn <: f1 & ... & fn` /// `t :> t1 | ... | tn <: f1 & ... & fn`
/// ` lbs------------- ubs-------------` /// ` lbs------------- ubs-------------`
#[derive(Hash, Clone, PartialEq, Eq, Default, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, Default, PartialOrd, Ord)]
pub struct TypeBounds { pub struct TypeBounds {
/// The lower bounds /// The lower bounds.
pub lbs: Vec<Ty>, pub lbs: Vec<Ty>,
/// The upper bounds /// The upper bounds.
pub ubs: Vec<Ty>, pub ubs: Vec<Ty>,
} }
@ -467,17 +476,17 @@ impl fmt::Debug for TypeBounds {
} }
} }
/// A common type kinds for those types that has fields (Abstracted record /// A common type kinds for those types that has fields (abstracted record
/// type). /// type).
pub trait TypeInterface { pub trait TypeInterface {
/// Get the bone of a record. /// Gets the bone of a record.
/// See [`NameBone`] for more details. /// See [`NameBone`] for more details.
fn bone(&self) -> &Interned<NameBone>; fn bone(&self) -> &Interned<NameBone>;
/// Iterate over the fields of a record. /// Iterates over the fields of a record.
fn interface(&self) -> impl Iterator<Item = (&StrRef, &Ty)>; fn interface(&self) -> impl Iterator<Item = (&StrRef, &Ty)>;
/// Get the field by bone offset. /// Gets the field by bone offset.
fn field_by_bone_offset(&self, idx: usize) -> Option<&Ty>; fn field_by_bone_offset(&self, idx: usize) -> Option<&Ty>;
/// Get the field by name. /// Gets the field by name.
fn field_by_name(&self, name: &StrRef) -> Option<&Ty> { fn field_by_name(&self, name: &StrRef) -> Option<&Ty> {
self.field_by_bone_offset(self.bone().find(name)?) self.field_by_bone_offset(self.bone().find(name)?)
} }
@ -505,12 +514,12 @@ pub trait TypeInterfaceExt: TypeInterface {
impl<T: TypeInterface> TypeInterfaceExt for T {} impl<T: TypeInterface> TypeInterfaceExt for T {}
/// An instance of a typst type /// An instance of a typst type.
#[derive(Debug, Hash, Clone, PartialEq)] #[derive(Debug, Hash, Clone, PartialEq)]
pub struct InsTy { pub struct InsTy {
/// The value of the instance /// The value of the instance.
pub val: Value, pub val: Value,
/// The syntax source of the instance /// The syntax source of the instance.
pub syntax: Option<Interned<TypeSource>>, pub syntax: Option<Interned<TypeSource>>,
} }
@ -533,12 +542,12 @@ impl Ord for InsTy {
} }
impl InsTy { impl InsTy {
/// Create a instance /// Creates an instance.
pub fn new(val: Value) -> Interned<Self> { pub fn new(val: Value) -> Interned<Self> {
Self { val, syntax: None }.into() Self { val, syntax: None }.into()
} }
/// Create a instance with a sapn /// Creates an instance with a sapn.
pub fn new_at(val: Value, span: Span) -> Interned<Self> { pub fn new_at(val: Value, span: Span) -> Interned<Self> {
let mut name = SyntaxNode::leaf(SyntaxKind::Ident, ""); let mut name = SyntaxNode::leaf(SyntaxKind::Ident, "");
name.synthesize(span); name.synthesize(span);
@ -551,7 +560,8 @@ impl InsTy {
})), })),
}) })
} }
/// Create a instance with a documentation string
/// Creates an instance with a documentation string.
pub fn new_doc(val: Value, doc: impl Into<StrRef>) -> Interned<Self> { pub fn new_doc(val: Value, doc: impl Into<StrRef>) -> Interned<Self> {
Interned::new(Self { Interned::new(Self {
val, val,
@ -563,7 +573,7 @@ impl InsTy {
}) })
} }
/// Get the span of the instance /// Gets the span of the instance.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
self.syntax self.syntax
.as_ref() .as_ref()
@ -580,25 +590,26 @@ impl InsTy {
} }
} }
/// Describes a function parameter. /// Describes a function parameter attribute.
#[derive( #[derive(
Debug, Clone, Copy, Hash, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord, Debug, Clone, Copy, Hash, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord,
)] )]
pub struct ParamAttrs { pub struct ParamAttrs {
/// Is the parameter positional? /// Whether the parameter is positional.
pub positional: bool, pub positional: bool,
/// Is the parameter named? /// Whether the parameter is named.
/// ///
/// Can be true even if `positional` is true if the parameter can be given /// Can be true even if `positional` is true if the parameter can be given
/// in both variants. /// in both variants.
pub named: bool, pub named: bool,
/// Can the parameter be given any number of times? /// Whether the parameter can be given any number of times.
pub variadic: bool, pub variadic: bool,
/// Is the parameter settable with a set rule? /// Whether the parameter is settable with a set rule.
pub settable: bool, pub settable: bool,
} }
impl ParamAttrs { impl ParamAttrs {
/// Creates a positional parameter attribute.
pub fn positional() -> ParamAttrs { pub fn positional() -> ParamAttrs {
ParamAttrs { ParamAttrs {
positional: true, positional: true,
@ -608,6 +619,7 @@ impl ParamAttrs {
} }
} }
/// Creates a named parameter attribute.
pub fn named() -> ParamAttrs { pub fn named() -> ParamAttrs {
ParamAttrs { ParamAttrs {
positional: false, positional: false,
@ -617,6 +629,7 @@ impl ParamAttrs {
} }
} }
/// Creates a variadic parameter attribute.
pub fn variadic() -> ParamAttrs { pub fn variadic() -> ParamAttrs {
ParamAttrs { ParamAttrs {
positional: true, positional: true,
@ -645,7 +658,7 @@ pub struct ParamTy {
pub name: StrRef, pub name: StrRef,
/// The docstring of the parameter. /// The docstring of the parameter.
pub docs: Option<EcoString>, pub docs: Option<EcoString>,
/// The default value of the variable /// The default value of the variable.
pub default: Option<EcoString>, pub default: Option<EcoString>,
/// The type of the parameter. /// The type of the parameter.
pub ty: Ty, pub ty: Ty,
@ -654,12 +667,12 @@ pub struct ParamTy {
} }
impl ParamTy { impl ParamTy {
/// Create an untyped field type /// Creates an untyped field type.
pub fn new_untyped(name: StrRef, attrs: ParamAttrs) -> Interned<Self> { pub fn new_untyped(name: StrRef, attrs: ParamAttrs) -> Interned<Self> {
Self::new(Ty::Any, name, attrs) Self::new(Ty::Any, name, attrs)
} }
/// Create a typed field type /// Creates a typed field type.
pub fn new(ty: Ty, name: StrRef, attrs: ParamAttrs) -> Interned<Self> { pub fn new(ty: Ty, name: StrRef, attrs: ParamAttrs) -> Interned<Self> {
Interned::new(Self { Interned::new(Self {
name, name,
@ -671,12 +684,12 @@ impl ParamTy {
} }
} }
/// A type variable /// A type variable.
#[derive(Hash, Clone, PartialEq, Eq)] #[derive(Hash, Clone, PartialEq, Eq)]
pub struct TypeVar { pub struct TypeVar {
/// The name of the type variable /// The name of the type variable.
pub name: StrRef, pub name: StrRef,
/// The definition id of the type variable /// The definition id of the type variable.
pub def: DeclExpr, pub def: DeclExpr,
} }
@ -708,28 +721,28 @@ impl fmt::Debug for TypeVar {
} }
impl TypeVar { impl TypeVar {
/// Create a type variable /// Creates a type variable.
pub fn new(name: StrRef, def: DeclExpr) -> Interned<Self> { pub fn new(name: StrRef, def: DeclExpr) -> Interned<Self> {
Interned::new(Self { name, def }) Interned::new(Self { name, def })
} }
/// Get the name of the type variable /// Gets the name of the type variable.
pub fn name(&self) -> StrRef { pub fn name(&self) -> StrRef {
self.name.clone() self.name.clone()
} }
} }
/// A record type /// A record type.
#[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct RecordTy { pub struct RecordTy {
/// The names of the fields /// The names of the fields.
pub names: Interned<NameBone>, pub names: Interned<NameBone>,
/// The types of the fields /// The types of the fields.
pub types: Interned<Vec<Ty>>, pub types: Interned<Vec<Ty>>,
} }
impl RecordTy { impl RecordTy {
/// Shape the fields of a record /// Shapes the fields of a record.
pub fn shape_fields(mut fields: Vec<(StrRef, Ty)>) -> (NameBone, Vec<Ty>) { pub fn shape_fields(mut fields: Vec<(StrRef, Ty)>) -> (NameBone, Vec<Ty>) {
fields.sort_by(|a, b| a.0.cmp(&b.0)); fields.sort_by(|a, b| a.0.cmp(&b.0));
let names = NameBone { let names = NameBone {
@ -740,7 +753,7 @@ impl RecordTy {
(names, types) (names, types)
} }
/// Create a record type /// Creates a record type.
pub fn new(fields: Vec<(StrRef, Ty)>) -> Interned<Self> { pub fn new(fields: Vec<(StrRef, Ty)>) -> Interned<Self> {
let (names, types) = Self::shape_fields(fields); let (names, types) = Self::shape_fields(fields);
Interned::new(Self { Interned::new(Self {
@ -776,25 +789,38 @@ impl fmt::Debug for RecordTy {
} }
} }
/// A typst function type /// A typst function type.
#[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SigTy { pub struct SigTy {
/// The input types of the function /// The input types of the function.
pub inputs: Interned<Vec<Ty>>, pub inputs: Interned<Vec<Ty>>,
/// The return (body) type of the function /// The return (body) type of the function.
pub body: Option<Ty>, pub body: Option<Ty>,
/// The name bone of the named parameters /// The name bone of the named parameters.
pub names: Interned<NameBone>, pub names: Interned<NameBone>,
/// The index of the first named parameter /// The index of the first named parameter.
pub name_started: u32, pub name_started: u32,
/// Whether the function has a spread left parameter /// Whether the function has a spread left parameter.
pub spread_left: bool, pub spread_left: bool,
/// Whether the function has a spread right parameter /// Whether the function has a spread right parameter.
pub spread_right: bool, pub spread_right: bool,
} }
impl SigTy { impl SigTy {
/// Array constructor /// Creates an function that accepts any arguments: `(a, b: c, ..d)`
pub fn any() -> Interned<SigTy> {
let rest = Ty::Array(Interned::new(Ty::Any));
Interned::new(Self {
inputs: Interned::new(vec![rest]),
body: Some(Ty::Any),
names: NameBone::empty(),
name_started: 0,
spread_left: false,
spread_right: true,
})
}
/// Creates an array constructor: `(a)`
#[comemo::memoize] #[comemo::memoize]
pub fn array_cons(elem: Ty, anyify: bool) -> Interned<SigTy> { pub fn array_cons(elem: Ty, anyify: bool) -> Interned<SigTy> {
let rest = Ty::Array(Interned::new(elem.clone())); let rest = Ty::Array(Interned::new(elem.clone()));
@ -809,20 +835,7 @@ impl SigTy {
}) })
} }
/// Any constructor /// Creates a unary constructor: `(a) => b`
pub fn any() -> Interned<SigTy> {
let rest = Ty::Array(Interned::new(Ty::Any));
Interned::new(Self {
inputs: Interned::new(vec![rest]),
body: Some(Ty::Any),
names: NameBone::empty(),
name_started: 0,
spread_left: false,
spread_right: true,
})
}
/// Unary constructor
#[comemo::memoize] #[comemo::memoize]
pub fn unary(inp: Ty, ret: Ty) -> Interned<SigTy> { pub fn unary(inp: Ty, ret: Ty) -> Interned<SigTy> {
Interned::new(Self { Interned::new(Self {
@ -835,7 +848,7 @@ impl SigTy {
}) })
} }
/// Tuple constructor /// Creates a tuple constructor: `(a, b, c)`
#[comemo::memoize] #[comemo::memoize]
pub fn tuple_cons(elems: Interned<Vec<Ty>>, anyify: bool) -> Interned<SigTy> { pub fn tuple_cons(elems: Interned<Vec<Ty>>, anyify: bool) -> Interned<SigTy> {
let ret = if anyify { let ret = if anyify {
@ -854,7 +867,7 @@ impl SigTy {
}) })
} }
/// Dictionary constructor /// Creates a dictionary constructor: `(a: b, c: d)`
#[comemo::memoize] #[comemo::memoize]
pub fn dict_cons(named: &Interned<RecordTy>, anyify: bool) -> Interned<SigTy> { pub fn dict_cons(named: &Interned<RecordTy>, anyify: bool) -> Interned<SigTy> {
let ret = if anyify { let ret = if anyify {
@ -873,12 +886,13 @@ impl SigTy {
}) })
} }
/// Sets the return type of the function.
pub fn with_body(mut self, res_ty: Ty) -> Self { pub fn with_body(mut self, res_ty: Ty) -> Self {
self.body = Some(res_ty); self.body = Some(res_ty);
self self
} }
/// Create a function type /// Creates a function type.
pub fn new( pub fn new(
pos: impl ExactSizeIterator<Item = Ty>, pos: impl ExactSizeIterator<Item = Ty>,
named: impl IntoIterator<Item = (StrRef, Ty)>, named: impl IntoIterator<Item = (StrRef, Ty)>,
@ -943,24 +957,24 @@ impl TypeInterface for SigTy {
} }
impl SigTy { impl SigTy {
/// Get the input types of the function /// Gets the input types of the function.
pub fn inputs(&self) -> impl Iterator<Item = &Ty> { pub fn inputs(&self) -> impl Iterator<Item = &Ty> {
self.inputs.iter() self.inputs.iter()
} }
/// Get the positional parameters of the function /// Gets the positional parameters of the function.
pub fn positional_params(&self) -> impl ExactSizeIterator<Item = &Ty> { pub fn positional_params(&self) -> impl ExactSizeIterator<Item = &Ty> {
self.inputs.iter().take(self.name_started as usize) self.inputs.iter().take(self.name_started as usize)
} }
/// Get the parameter at the given index /// Gets the parameter at the given index.
pub fn pos(&self, idx: usize) -> Option<&Ty> { pub fn pos(&self, idx: usize) -> Option<&Ty> {
(idx < self.name_started as usize) (idx < self.name_started as usize)
.then_some(()) .then_some(())
.and_then(|_| self.inputs.get(idx)) .and_then(|_| self.inputs.get(idx))
} }
/// Get the parameter or the rest parameter at the given index /// Gets the parameter or the rest parameter at the given index.
pub fn pos_or_rest(&self, idx: usize) -> Option<Ty> { pub fn pos_or_rest(&self, idx: usize) -> Option<Ty> {
let nth = self.pos(idx).cloned(); let nth = self.pos(idx).cloned();
nth.or_else(|| { nth.or_else(|| {
@ -975,7 +989,7 @@ impl SigTy {
}) })
} }
/// Get the named parameters of the function /// Gets the named parameters of the function.
pub fn named_params(&self) -> impl ExactSizeIterator<Item = (&StrRef, &Ty)> { pub fn named_params(&self) -> impl ExactSizeIterator<Item = (&StrRef, &Ty)> {
let named_names = self.names.names.iter(); let named_names = self.names.names.iter();
let named_types = self.inputs.iter().skip(self.name_started as usize); let named_types = self.inputs.iter().skip(self.name_started as usize);
@ -983,13 +997,13 @@ impl SigTy {
named_names.zip(named_types) named_names.zip(named_types)
} }
/// Get the named parameter by given name /// Gets the named parameter by given name.
pub fn named(&self, name: &StrRef) -> Option<&Ty> { pub fn named(&self, name: &StrRef) -> Option<&Ty> {
let idx = self.names.find(name)?; let idx = self.names.find(name)?;
self.inputs.get(idx + self.name_started as usize) self.inputs.get(idx + self.name_started as usize)
} }
/// Get the rest parameter of the function /// Gets the rest parameter of the function.
pub fn rest_param(&self) -> Option<&Ty> { pub fn rest_param(&self) -> Option<&Ty> {
if self.spread_right { if self.spread_right {
self.inputs.last() self.inputs.last()
@ -998,7 +1012,7 @@ impl SigTy {
} }
} }
/// Match the function type with the given arguments /// Matches the function type with the given arguments.
pub fn matches<'a>( pub fn matches<'a>(
&'a self, &'a self,
args: &'a SigTy, args: &'a SigTy,
@ -1062,23 +1076,23 @@ impl fmt::Debug for SigTy {
} }
} }
/// A function argument type /// A function argument type.
pub type ArgsTy = SigTy; pub type ArgsTy = SigTy;
/// A pattern type /// A pattern type.
pub type PatternTy = SigTy; pub type PatternTy = SigTy;
/// A type with partially applied arguments /// A type with partially applied arguments.
#[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SigWithTy { pub struct SigWithTy {
/// The signature of the function /// The signature of the function.
pub sig: TyRef, pub sig: TyRef,
/// The arguments applied to the function /// The arguments applied to the function.
pub with: Interned<ArgsTy>, pub with: Interned<ArgsTy>,
} }
impl SigWithTy { impl SigWithTy {
/// Create a type with applied arguments /// Creates a type with applied arguments.
pub fn new(sig: TyRef, with: Interned<ArgsTy>) -> Interned<Self> { pub fn new(sig: TyRef, with: Interned<ArgsTy>) -> Interned<Self> {
Interned::new(Self { sig, with }) Interned::new(Self { sig, with })
} }
@ -1090,17 +1104,17 @@ impl fmt::Debug for SigWithTy {
} }
} }
/// A field selection type /// A field selection type.
#[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct SelectTy { pub struct SelectTy {
/// The type to select from /// The type to select from.
pub ty: TyRef, pub ty: TyRef,
/// The field to select /// The field to select
pub select: StrRef, pub select: StrRef,
} }
impl SelectTy { impl SelectTy {
/// Create a field selection type /// Creates a field selection type.
pub fn new(ty: TyRef, select: StrRef) -> Interned<Self> { pub fn new(ty: TyRef, select: StrRef) -> Interned<Self> {
Interned::new(Self { ty, select }) Interned::new(Self { ty, select })
} }
@ -1112,10 +1126,10 @@ impl fmt::Debug for SelectTy {
} }
} }
/// A unary operation type /// A unary operation type.
#[derive(Debug, Hash, Clone, PartialEq, Eq)] #[derive(Debug, Hash, Clone, PartialEq, Eq)]
pub struct TypeUnary { pub struct TypeUnary {
/// The operand of the unary operation /// The operand of the unary operation.
pub lhs: Ty, pub lhs: Ty,
/// The kind of the unary operation /// The kind of the unary operation
pub op: UnaryOp, pub op: UnaryOp,
@ -1138,26 +1152,26 @@ impl Ord for TypeUnary {
} }
impl TypeUnary { impl TypeUnary {
/// Create a unary operation type /// Creates a unary operation type.
pub fn new(op: UnaryOp, lhs: Ty) -> Interned<Self> { pub fn new(op: UnaryOp, lhs: Ty) -> Interned<Self> {
Interned::new(Self { lhs, op }) Interned::new(Self { lhs, op })
} }
/// Get the operands of the unary operation /// Gets the operands of the unary operation.
pub fn operands(&self) -> [&Ty; 1] { pub fn operands(&self) -> [&Ty; 1] {
[&self.lhs] [&self.lhs]
} }
} }
/// The kind of binary operation /// The kind of binary operation.
pub type BinaryOp = ast::BinOp; pub type BinaryOp = ast::BinOp;
/// A binary operation type /// A binary operation type.
#[derive(Debug, Hash, Clone, PartialEq, Eq)] #[derive(Debug, Hash, Clone, PartialEq, Eq)]
pub struct TypeBinary { pub struct TypeBinary {
/// The operands of the binary operation /// The operands of the binary operation.
pub operands: (Ty, Ty), pub operands: (Ty, Ty),
/// The kind of the binary operation /// The kind of the binary operation.
pub op: BinaryOp, pub op: BinaryOp,
} }
@ -1178,7 +1192,7 @@ impl Ord for TypeBinary {
} }
impl TypeBinary { impl TypeBinary {
/// Create a binary operation type /// Creates a binary operation type.
pub fn new(op: BinaryOp, lhs: Ty, rhs: Ty) -> Interned<Self> { pub fn new(op: BinaryOp, lhs: Ty, rhs: Ty) -> Interned<Self> {
Interned::new(Self { Interned::new(Self {
operands: (lhs, rhs), operands: (lhs, rhs),
@ -1186,51 +1200,51 @@ impl TypeBinary {
}) })
} }
/// Get the operands of the binary operation /// Gets the operands of the binary operation.
pub fn operands(&self) -> [&Ty; 2] { pub fn operands(&self) -> [&Ty; 2] {
[&self.operands.0, &self.operands.1] [&self.operands.0, &self.operands.1]
} }
} }
/// A conditional type /// A conditional type.
/// `if t1 then t2 else t3` /// `if t1 then t2 else t3`
#[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Hash, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct IfTy { pub struct IfTy {
/// The condition /// The condition.
pub cond: TyRef, pub cond: TyRef,
/// The type when the condition is true /// The type when the condition is true.
pub then: TyRef, pub then: TyRef,
/// The type when the condition is false /// The type when the condition is false.
pub else_: TyRef, pub else_: TyRef,
} }
impl IfTy { impl IfTy {
/// Create a conditional type /// Creates a conditional type.
pub fn new(cond: TyRef, then: TyRef, else_: TyRef) -> Interned<Self> { pub fn new(cond: TyRef, then: TyRef, else_: TyRef) -> Interned<Self> {
Interned::new(Self { cond, then, else_ }) Interned::new(Self { cond, then, else_ })
} }
} }
/// The type information on a group of syntax structures (typing) /// The type information on a group of syntax structures (typing).
#[derive(Default)] #[derive(Default)]
pub struct TypeInfo { pub struct TypeInfo {
/// Whether the typing is valid /// Whether the typing is valid.
pub valid: bool, pub valid: bool,
/// The belonging file id /// The belonging file id.
pub fid: Option<FileId>, pub fid: Option<FileId>,
/// The revision used /// The used revision.
pub revision: usize, pub revision: usize,
/// The exported types /// The exported types.
pub exports: FxHashMap<StrRef, Ty>, pub exports: FxHashMap<StrRef, Ty>,
/// The typing on definitions /// The typing on definitions.
pub vars: FxHashMap<DeclExpr, TypeVarBounds>, pub vars: FxHashMap<DeclExpr, TypeVarBounds>,
/// The checked documentation of definitions /// The checked documentation of definitions.
pub var_docs: FxHashMap<DeclExpr, Arc<UntypedDefDocs>>, pub var_docs: FxHashMap<DeclExpr, Arc<UntypedDefDocs>>,
/// The local binding of the type variable /// The local binding of the type variable.
pub local_binds: snapshot_map::SnapshotMap<DeclExpr, Ty>, pub local_binds: snapshot_map::SnapshotMap<DeclExpr, Ty>,
/// The typing on syntax structures /// The typing on syntax structures.
pub mapping: FxHashMap<Span, FxHashSet<Ty>>, pub mapping: FxHashMap<Span, FxHashSet<Ty>>,
/// The cache to canonicalize types.
pub(super) cano_cache: Mutex<TypeCanoStore>, pub(super) cano_cache: Mutex<TypeCanoStore>,
} }
@ -1254,7 +1268,7 @@ impl TyCtx for TypeInfo {
} }
impl TypeInfo { impl TypeInfo {
/// Gets the type of a syntax structure /// Gets the type of a syntax structure.
pub fn type_of_span(&self, site: Span) -> Option<Ty> { pub fn type_of_span(&self, site: Span) -> Option<Ty> {
self.mapping self.mapping
.get(&site) .get(&site)
@ -1263,16 +1277,16 @@ impl TypeInfo {
} }
// todo: distinguish at least, at most // todo: distinguish at least, at most
/// Witnesses a lower-bound type on a syntax structure /// Witnesses a lower-bound type on a syntax structure.
pub fn witness_at_least(&mut self, site: Span, ty: Ty) { pub fn witness_at_least(&mut self, site: Span, ty: Ty) {
Self::witness_(site, ty, &mut self.mapping); Self::witness_(site, ty, &mut self.mapping);
} }
/// Witnesses a upper-bound type on a syntax structure /// Witnesses a upper-bound type on a syntax structure.
pub fn witness_at_most(&mut self, site: Span, ty: Ty) { pub fn witness_at_most(&mut self, site: Span, ty: Ty) {
Self::witness_(site, ty, &mut self.mapping); Self::witness_(site, ty, &mut self.mapping);
} }
/// Witnesses a type /// Witnesses a type.
pub fn witness_(site: Span, ty: Ty, mapping: &mut FxHashMap<Span, FxHashSet<Ty>>) { pub fn witness_(site: Span, ty: Ty, mapping: &mut FxHashMap<Span, FxHashSet<Ty>>) {
if site.is_detached() { if site.is_detached() {
return; return;
@ -1282,7 +1296,7 @@ impl TypeInfo {
mapping.entry(site).or_default().insert(ty); mapping.entry(site).or_default().insert(ty);
} }
/// Converts a type to a type with bounds /// Converts a type to a type with bounds.
pub fn to_bounds(&self, def: Ty) -> DynTypeBounds { pub fn to_bounds(&self, def: Ty) -> DynTypeBounds {
let mut store = DynTypeBounds::default(); let mut store = DynTypeBounds::default();
match def { match def {
@ -1345,12 +1359,12 @@ impl TyCtxMut for TypeInfo {
} }
} }
/// A type variable bounds /// A type variable bounds.
#[derive(Clone)] #[derive(Clone)]
pub struct TypeVarBounds { pub struct TypeVarBounds {
/// The type variable representation /// The type variable representation.
pub var: Interned<TypeVar>, pub var: Interned<TypeVar>,
/// The bounds of the type variable /// The bounds of the type variable.
pub bounds: FlowVarKind, pub bounds: FlowVarKind,
} }
@ -1361,7 +1375,7 @@ impl fmt::Debug for TypeVarBounds {
} }
impl TypeVarBounds { impl TypeVarBounds {
/// Create a type variable bounds /// Creates a type variable bounds.
pub fn new(var: TypeVar, init: DynTypeBounds) -> Self { pub fn new(var: TypeVar, init: DynTypeBounds) -> Self {
Self { Self {
var: Interned::new(var), var: Interned::new(var),
@ -1369,17 +1383,17 @@ impl TypeVarBounds {
} }
} }
/// Get the name of the type variable /// Gets the name of the type variable.
pub fn name(&self) -> &StrRef { pub fn name(&self) -> &StrRef {
&self.var.name &self.var.name
} }
/// Get self as a type /// Gets self as a type.
pub fn as_type(&self) -> Ty { pub fn as_type(&self) -> Ty {
Ty::Var(self.var.clone()) Ty::Var(self.var.clone())
} }
/// Slightly close the type variable /// Slightly closes the type variable.
pub fn weaken(&mut self) { pub fn weaken(&mut self) {
match &self.bounds { match &self.bounds {
FlowVarKind::Strong(w) => { FlowVarKind::Strong(w) => {
@ -1390,18 +1404,18 @@ impl TypeVarBounds {
} }
} }
/// A type variable bounds /// A type variable bounds.
#[derive(Clone)] #[derive(Clone)]
pub enum FlowVarKind { pub enum FlowVarKind {
/// A type variable that receives both types and values (type instances) /// A type variable that receives both types and values (type instances).
Strong(Arc<RwLock<DynTypeBounds>>), Strong(Arc<RwLock<DynTypeBounds>>),
/// A type variable that receives only types /// A type variable that receives only types.
/// The received values will be lifted to types /// The received values will be lifted to types.
Weak(Arc<RwLock<DynTypeBounds>>), Weak(Arc<RwLock<DynTypeBounds>>),
} }
impl FlowVarKind { impl FlowVarKind {
/// Get the bounds of the type variable /// Gets the bounds of the type variable.
pub fn bounds(&self) -> &RwLock<DynTypeBounds> { pub fn bounds(&self) -> &RwLock<DynTypeBounds> {
match self { match self {
FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => w, FlowVarKind::Strong(w) | FlowVarKind::Weak(w) => w,
@ -1409,11 +1423,16 @@ impl FlowVarKind {
} }
} }
/// A cache to canonicalize types.
#[derive(Default)] #[derive(Default)]
pub(super) struct TypeCanoStore { pub(super) struct TypeCanoStore {
/// Maps a type to its canonical form.
pub cano_cache: FxHashMap<(Ty, bool), Ty>, pub cano_cache: FxHashMap<(Ty, bool), Ty>,
/// Maps a local type to its canonical form.
pub cano_local_cache: FxHashMap<(DeclExpr, bool), Ty>, pub cano_local_cache: FxHashMap<(DeclExpr, bool), Ty>,
/// The negative bounds of a type variable.
pub negatives: FxHashSet<DeclExpr>, pub negatives: FxHashSet<DeclExpr>,
/// The positive bounds of a type variable.
pub positives: FxHashSet<DeclExpr>, pub positives: FxHashSet<DeclExpr>,
} }

View file

@ -7,7 +7,7 @@ use super::{is_plain_value, term_value};
use crate::{ty::prelude::*, upstream::truncated_repr_}; use crate::{ty::prelude::*, upstream::truncated_repr_};
impl Ty { impl Ty {
/// Describe the given type. /// Describes the given type.
pub fn repr(&self) -> Option<EcoString> { pub fn repr(&self) -> Option<EcoString> {
let mut worker = TypeDescriber { let mut worker = TypeDescriber {
repr: true, repr: true,
@ -16,7 +16,7 @@ impl Ty {
worker.describe_root(self) worker.describe_root(self)
} }
/// Describe available value instances of the given type. /// Describes available value instances of the given type.
pub fn value_repr(&self) -> Option<EcoString> { pub fn value_repr(&self) -> Option<EcoString> {
let mut worker = TypeDescriber { let mut worker = TypeDescriber {
repr: true, repr: true,
@ -48,16 +48,23 @@ impl Ty {
// }; // };
} }
/// A worker to describe types.
#[derive(Default)] #[derive(Default)]
struct TypeDescriber { struct TypeDescriber {
/// Whether to describe the representation of the type.
repr: bool, repr: bool,
/// Whether to describe the value instances of the type.
value: bool, value: bool,
/// The cache to describe types.
described: HashMap<u128, EcoString>, described: HashMap<u128, EcoString>,
/// The results of the description.
results: HashSet<EcoString>, results: HashSet<EcoString>,
/// The functions to describe.
functions: Vec<Interned<SigTy>>, functions: Vec<Interned<SigTy>>,
} }
impl TypeDescriber { impl TypeDescriber {
/// Describes the given type.
fn describe_root(&mut self, ty: &Ty) -> Option<EcoString> { fn describe_root(&mut self, ty: &Ty) -> Option<EcoString> {
let _ = TypeDescriber::describe_iter; let _ = TypeDescriber::describe_iter;
// recursive structure // recursive structure
@ -132,6 +139,7 @@ impl TypeDescriber {
Some(res) Some(res)
} }
/// Describes the given types.
fn describe_iter(&mut self, ty: &[Ty]) { fn describe_iter(&mut self, ty: &[Ty]) {
for ty in ty.iter() { for ty in ty.iter() {
let desc = self.describe(ty); let desc = self.describe(ty);
@ -141,6 +149,7 @@ impl TypeDescriber {
} }
} }
/// Describes the given type.
fn describe(&mut self, ty: &Ty) -> EcoString { fn describe(&mut self, ty: &Ty) -> EcoString {
match ty { match ty {
Ty::Var(..) => {} Ty::Var(..) => {}

View file

@ -4,42 +4,68 @@ use typst::syntax::FileId;
use super::BoundChecker; use super::BoundChecker;
use crate::{syntax::Decl, ty::prelude::*}; use crate::{syntax::Decl, ty::prelude::*};
/// A type that represents the interface of a type.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum Iface<'a> { pub enum Iface<'a> {
/// An array type.
Array(&'a Interned<Ty>), Array(&'a Interned<Ty>),
/// A tuple type.
Tuple(&'a Interned<Vec<Ty>>), Tuple(&'a Interned<Vec<Ty>>),
/// A dictionary type.
Dict(&'a Interned<RecordTy>), Dict(&'a Interned<RecordTy>),
/// A content type.
Content { Content {
/// The element type.
val: &'a typst::foundations::Element, val: &'a typst::foundations::Element,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A type type.
TypeType { TypeType {
/// The type type.
val: &'a typst::foundations::Type, val: &'a typst::foundations::Type,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A type.
Type { Type {
/// The type.
val: &'a typst::foundations::Type, val: &'a typst::foundations::Type,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A function type.
Func { Func {
/// The function.
val: &'a typst::foundations::Func, val: &'a typst::foundations::Func,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A value type.
Value { Value {
/// The value.
val: &'a Dict, val: &'a Dict,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A module type.
Module { Module {
/// The module.
val: FileId, val: FileId,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A module value type.
ModuleVal { ModuleVal {
/// The module value.
val: &'a Module, val: &'a Module,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
} }
impl Iface<'_> { impl Iface<'_> {
/// Converts the interface to a type.
pub fn to_type(self) -> Ty { pub fn to_type(self) -> Ty {
match self { match self {
Iface::Array(ty) => Ty::Array(ty.clone()), Iface::Array(ty) => Ty::Array(ty.clone()),
@ -55,7 +81,7 @@ impl Iface<'_> {
} }
} }
// IfaceShape { iface } /// Selects the given key from the interface.
pub fn select(self, ctx: &mut impl TyCtxMut, key: &StrRef) -> Option<Ty> { pub fn select(self, ctx: &mut impl TyCtxMut, key: &StrRef) -> Option<Ty> {
crate::log_debug_ct!("iface shape: {self:?}"); crate::log_debug_ct!("iface shape: {self:?}");
@ -77,6 +103,7 @@ impl Iface<'_> {
} }
} }
/// Selects the given key from the given scope.
fn select_scope(scope: Option<&Scope>, key: &str) -> Option<Ty> { fn select_scope(scope: Option<&Scope>, key: &str) -> Option<Ty> {
let scope = scope?; let scope = scope?;
let sub = scope.get(key)?; let sub = scope.get(key)?;
@ -84,12 +111,14 @@ fn select_scope(scope: Option<&Scope>, key: &str) -> Option<Ty> {
Some(Ty::Value(InsTy::new_at(sub.read().clone(), sub_span))) Some(Ty::Value(InsTy::new_at(sub.read().clone(), sub_span)))
} }
/// A trait to check the interface of a type.
pub trait IfaceChecker: TyCtx { pub trait IfaceChecker: TyCtx {
/// Checks the interface of the given type.
fn check(&mut self, iface: Iface, ctx: &mut IfaceCheckContext, pol: bool) -> Option<()>; fn check(&mut self, iface: Iface, ctx: &mut IfaceCheckContext, pol: bool) -> Option<()>;
} }
impl Ty { impl Ty {
/// Iterate over the signatures of the given type. /// Iterates over the signatures of the given type.
pub fn iface_surface( pub fn iface_surface(
&self, &self,
pol: bool, pol: bool,
@ -106,10 +135,13 @@ impl Ty {
} }
} }
/// A context to check the interface of a type.
pub struct IfaceCheckContext { pub struct IfaceCheckContext {
/// The arguments of the function.
pub args: Vec<Interned<SigTy>>, pub args: Vec<Interned<SigTy>>,
} }
/// A driver to check the interface of a type.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(checker)] #[bind(checker)]
pub struct IfaceCheckDriver<'a> { pub struct IfaceCheckDriver<'a> {
@ -124,18 +156,22 @@ impl BoundChecker for IfaceCheckDriver<'_> {
} }
impl IfaceCheckDriver<'_> { impl IfaceCheckDriver<'_> {
/// Determines whether to check the array as an interface.
fn array_as_iface(&self) -> bool { fn array_as_iface(&self) -> bool {
true true
} }
/// Determines whether to check the dictionary as an interface.
fn dict_as_iface(&self) -> bool { fn dict_as_iface(&self) -> bool {
true true
} }
/// Determines whether to check the value as an interface.
fn value_as_iface(&self) -> bool { fn value_as_iface(&self) -> bool {
true true
} }
/// Checks the interface of the given type.
fn ty(&mut self, at: &Ty, pol: bool) { fn ty(&mut self, at: &Ty, pol: bool) {
crate::log_debug_ct!("check iface ty: {at:?}"); crate::log_debug_ct!("check iface ty: {at:?}");

View file

@ -1,7 +1,5 @@
//! Types and type operations for Typst. //! Types and type operations for Typst.
#![allow(missing_docs)]
mod apply; mod apply;
mod bound; mod bound;
mod builtin; mod builtin;

View file

@ -1,9 +1,13 @@
use crate::ty::def::*; use crate::ty::def::*;
/// A trait to mutate a type.
pub trait TyMutator { pub trait TyMutator {
/// Mutates the given type.
fn mutate(&mut self, ty: &Ty, pol: bool) -> Option<Ty> { fn mutate(&mut self, ty: &Ty, pol: bool) -> Option<Ty> {
self.mutate_rec(ty, pol) self.mutate_rec(ty, pol)
} }
/// Mutates the given type recursively.
fn mutate_rec(&mut self, ty: &Ty, pol: bool) -> Option<Ty> { fn mutate_rec(&mut self, ty: &Ty, pol: bool) -> Option<Ty> {
use Ty::*; use Ty::*;
match ty { match ty {
@ -25,6 +29,7 @@ pub trait TyMutator {
} }
} }
/// Mutates the given vector of types.
fn mutate_vec(&mut self, ty: &[Ty], pol: bool) -> Option<Interned<Vec<Ty>>> { fn mutate_vec(&mut self, ty: &[Ty], pol: bool) -> Option<Interned<Vec<Ty>>> {
let mut mutated = false; let mut mutated = false;
@ -42,6 +47,7 @@ pub trait TyMutator {
if mutated { Some(types.into()) } else { None } if mutated { Some(types.into()) } else { None }
} }
/// Mutates the given option of type.
fn mutate_option(&mut self, ty: Option<&Ty>, pol: bool) -> Option<Option<Ty>> { fn mutate_option(&mut self, ty: Option<&Ty>, pol: bool) -> Option<Option<Ty>> {
match ty { match ty {
Some(ty) => self.mutate(ty, pol).map(Some), Some(ty) => self.mutate(ty, pol).map(Some),
@ -49,6 +55,7 @@ pub trait TyMutator {
} }
} }
/// Mutates the given function signature.
fn mutate_func(&mut self, ty: &Interned<SigTy>, pol: bool) -> Option<SigTy> { fn mutate_func(&mut self, ty: &Interned<SigTy>, pol: bool) -> Option<SigTy> {
let types = self.mutate_vec(&ty.inputs, pol); let types = self.mutate_vec(&ty.inputs, pol);
let ret = self.mutate_option(ty.body.as_ref(), pol); let ret = self.mutate_option(ty.body.as_ref(), pol);
@ -67,6 +74,7 @@ pub trait TyMutator {
}) })
} }
/// Mutates the given parameter type.
fn mutate_param(&mut self, param: &Interned<ParamTy>, pol: bool) -> Option<ParamTy> { fn mutate_param(&mut self, param: &Interned<ParamTy>, pol: bool) -> Option<ParamTy> {
let ty = self.mutate(&param.ty, pol)?; let ty = self.mutate(&param.ty, pol)?;
let mut param = param.as_ref().clone(); let mut param = param.as_ref().clone();
@ -74,6 +82,7 @@ pub trait TyMutator {
Some(param) Some(param)
} }
/// Mutates the given record type.
fn mutate_record(&mut self, record: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> { fn mutate_record(&mut self, record: &Interned<RecordTy>, pol: bool) -> Option<RecordTy> {
let types = self.mutate_vec(&record.types, pol)?; let types = self.mutate_vec(&record.types, pol)?;
@ -81,6 +90,7 @@ pub trait TyMutator {
Some(RecordTy { types, ..rec }) Some(RecordTy { types, ..rec })
} }
/// Mutates the given function signature with type.
fn mutate_with_sig(&mut self, ty: &Interned<SigWithTy>, pol: bool) -> Option<SigWithTy> { fn mutate_with_sig(&mut self, ty: &Interned<SigWithTy>, pol: bool) -> Option<SigWithTy> {
let sig = self.mutate(ty.sig.as_ref(), pol); let sig = self.mutate(ty.sig.as_ref(), pol);
let with = self.mutate_func(&ty.with, pol); let with = self.mutate_func(&ty.with, pol);
@ -95,12 +105,14 @@ pub trait TyMutator {
Some(SigWithTy { sig, with }) Some(SigWithTy { sig, with })
} }
/// Mutates the given unary type.
fn mutate_unary(&mut self, ty: &Interned<TypeUnary>, pol: bool) -> Option<TypeUnary> { fn mutate_unary(&mut self, ty: &Interned<TypeUnary>, pol: bool) -> Option<TypeUnary> {
let lhs = self.mutate(&ty.lhs, pol)?; let lhs = self.mutate(&ty.lhs, pol)?;
Some(TypeUnary { lhs, op: ty.op }) Some(TypeUnary { lhs, op: ty.op })
} }
/// Mutates the given binary type.
fn mutate_binary(&mut self, ty: &Interned<TypeBinary>, pol: bool) -> Option<TypeBinary> { fn mutate_binary(&mut self, ty: &Interned<TypeBinary>, pol: bool) -> Option<TypeBinary> {
let (lhs, rhs) = &ty.operands; let (lhs, rhs) = &ty.operands;
@ -120,6 +132,7 @@ pub trait TyMutator {
}) })
} }
/// Mutates the given if type.
fn mutate_if(&mut self, ty: &Interned<IfTy>, pol: bool) -> Option<IfTy> { fn mutate_if(&mut self, ty: &Interned<IfTy>, pol: bool) -> Option<IfTy> {
let cond = self.mutate(ty.cond.as_ref(), pol); let cond = self.mutate(ty.cond.as_ref(), pol);
let then = self.mutate(ty.then.as_ref(), pol); let then = self.mutate(ty.then.as_ref(), pol);
@ -136,6 +149,7 @@ pub trait TyMutator {
Some(IfTy { cond, then, else_ }) Some(IfTy { cond, then, else_ })
} }
/// Mutates the given select type.
fn mutate_select(&mut self, ty: &Interned<SelectTy>, pol: bool) -> Option<SelectTy> { fn mutate_select(&mut self, ty: &Interned<SelectTy>, pol: bool) -> Option<SelectTy> {
let target = self.mutate(ty.ty.as_ref(), pol)?.into(); let target = self.mutate(ty.ty.as_ref(), pol)?.into();
@ -156,7 +170,7 @@ where
} }
impl Ty { impl Ty {
/// Mutate the given type. /// Mutates the given type.
pub fn mutate(&self, pol: bool, checker: &mut impl TyMutator) -> Option<Ty> { pub fn mutate(&self, pol: bool, checker: &mut impl TyMutator) -> Option<Ty> {
checker.mutate(self, pol) checker.mutate(self, pol)
} }

View file

@ -1,27 +1,32 @@
use super::{Iface, IfaceChecker}; use super::{Iface, IfaceChecker};
use crate::ty::def::*; use crate::ty::def::*;
/// A trait to check the select of a type.
pub trait SelectChecker: TyCtx { pub trait SelectChecker: TyCtx {
/// Checks the select of the given type.
fn select(&mut self, sig: Iface, key: &Interned<str>, pol: bool); fn select(&mut self, sig: Iface, key: &Interned<str>, pol: bool);
} }
impl Ty { impl Ty {
/// Select the given type with the given key. /// Selects the given type with the given key.
pub fn select(&self, key: &Interned<str>, pol: bool, checker: &mut impl SelectChecker) { pub fn select(&self, key: &Interned<str>, pol: bool, checker: &mut impl SelectChecker) {
SelectKeyChecker(checker, key).ty(self, pol); SelectKeyChecker(checker, key).ty(self, pol);
} }
} }
/// A checker to check the select of a type.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(0)] #[bind(0)]
pub struct SelectKeyChecker<'a, T: TyCtx>(&'a mut T, &'a Interned<str>); pub struct SelectKeyChecker<'a, T: TyCtx>(&'a mut T, &'a Interned<str>);
/// A driver to check the select of a type.
impl<T: SelectChecker> SelectKeyChecker<'_, T> { impl<T: SelectChecker> SelectKeyChecker<'_, T> {
fn ty(&mut self, ty: &Ty, pol: bool) { fn ty(&mut self, ty: &Ty, pol: bool) {
ty.iface_surface(pol, self) ty.iface_surface(pol, self)
} }
} }
/// A checker to check the select of a type.
impl<T: SelectChecker> IfaceChecker for SelectKeyChecker<'_, T> { impl<T: SelectChecker> IfaceChecker for SelectKeyChecker<'_, T> {
fn check( fn check(
&mut self, &mut self,

View file

@ -3,35 +3,56 @@ use typst::foundations::{Func, Value};
use super::BoundChecker; use super::BoundChecker;
use crate::ty::prelude::*; use crate::ty::prelude::*;
/// A signature.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum Sig<'a> { pub enum Sig<'a> {
/// A builtin signature.
Builtin(BuiltinSig<'a>), Builtin(BuiltinSig<'a>),
/// A type signature.
Type(&'a Interned<SigTy>), Type(&'a Interned<SigTy>),
/// A type constructor.
TypeCons { TypeCons {
/// The type.
val: &'a typst::foundations::Type, val: &'a typst::foundations::Type,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// An array constructor.
ArrayCons(&'a TyRef), ArrayCons(&'a TyRef),
/// A tuple constructor.
TupleCons(&'a Interned<Vec<Ty>>), TupleCons(&'a Interned<Vec<Ty>>),
/// A dictionary constructor.
DictCons(&'a Interned<RecordTy>), DictCons(&'a Interned<RecordTy>),
/// A value signature.
Value { Value {
/// The value.
val: &'a Func, val: &'a Func,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
/// A partialize signature.
Partialize(&'a Sig<'a>), Partialize(&'a Sig<'a>),
/// A with signature.
With { With {
/// The signature.
sig: &'a Sig<'a>, sig: &'a Sig<'a>,
/// The bounds.
withs: &'a Vec<Interned<ArgsTy>>, withs: &'a Vec<Interned<ArgsTy>>,
/// The original type.
at: &'a Ty, at: &'a Ty,
}, },
} }
/// A shape of a signature.
pub struct SigShape<'a> { pub struct SigShape<'a> {
/// The signature.
pub sig: Interned<SigTy>, pub sig: Interned<SigTy>,
/// The withs.
pub withs: Option<&'a Vec<Interned<SigTy>>>, pub withs: Option<&'a Vec<Interned<SigTy>>>,
} }
impl<'a> Sig<'a> { impl<'a> Sig<'a> {
/// Gets the type of the signature.
pub fn ty(self) -> Option<Ty> { pub fn ty(self) -> Option<Ty> {
Some(match self { Some(match self {
Sig::Builtin(_) => return None, Sig::Builtin(_) => return None,
@ -46,6 +67,7 @@ impl<'a> Sig<'a> {
}) })
} }
/// Gets the shape of the signature.
pub fn shape(self, ctx: &mut impl TyCtxMut) -> Option<SigShape<'a>> { pub fn shape(self, ctx: &mut impl TyCtxMut) -> Option<SigShape<'a>> {
let (sig, _is_partialize) = match self { let (sig, _is_partialize) = match self {
Sig::Partialize(sig) => (*sig, true), Sig::Partialize(sig) => (*sig, true),
@ -74,15 +96,22 @@ impl<'a> Sig<'a> {
} }
} }
/// A kind of signature surface.
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum SigSurfaceKind { pub enum SigSurfaceKind {
/// A call signature.
Call, Call,
/// An array signature.
Array, Array,
/// A dictionary signature.
Dict, Dict,
/// An array or dictionary signature.
ArrayOrDict, ArrayOrDict,
} }
/// A trait to check a signature.
pub trait SigChecker: TyCtx { pub trait SigChecker: TyCtx {
/// Checks the signature.
fn check(&mut self, sig: Sig, args: &mut SigCheckContext, pol: bool) -> Option<()>; fn check(&mut self, sig: Sig, args: &mut SigCheckContext, pol: bool) -> Option<()>;
} }
@ -131,12 +160,17 @@ impl Ty {
} }
} }
/// A context to check a signature.
pub struct SigCheckContext { pub struct SigCheckContext {
/// The kind of signature surface.
pub sig_kind: SigSurfaceKind, pub sig_kind: SigSurfaceKind,
/// The arguments.
pub args: Vec<Interned<SigTy>>, pub args: Vec<Interned<SigTy>>,
/// The type.
pub at: TyRef, pub at: TyRef,
} }
/// A driver to check a signature.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(checker)] #[bind(checker)]
pub struct SigCheckDriver<'a> { pub struct SigCheckDriver<'a> {
@ -145,10 +179,12 @@ pub struct SigCheckDriver<'a> {
} }
impl SigCheckDriver<'_> { impl SigCheckDriver<'_> {
/// Determines whether the signature is a function.
fn func_as_sig(&self) -> bool { fn func_as_sig(&self) -> bool {
matches!(self.ctx.sig_kind, SigSurfaceKind::Call) matches!(self.ctx.sig_kind, SigSurfaceKind::Call)
} }
/// Determines whether the signature is an array.
fn array_as_sig(&self) -> bool { fn array_as_sig(&self) -> bool {
matches!( matches!(
self.ctx.sig_kind, self.ctx.sig_kind,
@ -156,6 +192,7 @@ impl SigCheckDriver<'_> {
) )
} }
/// Determines whether the signature is a dictionary.
fn dict_as_sig(&self) -> bool { fn dict_as_sig(&self) -> bool {
matches!( matches!(
self.ctx.sig_kind, self.ctx.sig_kind,
@ -163,6 +200,7 @@ impl SigCheckDriver<'_> {
) )
} }
/// Checks the signature of the given type.
fn ty(&mut self, at: &Ty, pol: bool) { fn ty(&mut self, at: &Ty, pol: bool) {
crate::log_debug_ct!("check sig: {at:?}"); crate::log_debug_ct!("check sig: {at:?}");
match at { match at {
@ -258,6 +296,7 @@ impl BoundChecker for SigCheckDriver<'_> {
} }
} }
/// A driver to check a method.
#[derive(BindTyCtx)] #[derive(BindTyCtx)]
#[bind(0)] #[bind(0)]
struct MethodDriver<'a, 'b>(&'a mut SigCheckDriver<'b>, &'a StrRef); struct MethodDriver<'a, 'b>(&'a mut SigCheckDriver<'b>, &'a StrRef);

View file

@ -4,6 +4,7 @@ use ecow::EcoVec;
use crate::{syntax::DeclExpr, ty::prelude::*}; use crate::{syntax::DeclExpr, ty::prelude::*};
/// A compact type.
#[derive(Default)] #[derive(Default)]
struct CompactTy { struct CompactTy {
equiv_vars: HashSet<DefId>, equiv_vars: HashSet<DefId>,
@ -15,7 +16,7 @@ struct CompactTy {
} }
impl TypeInfo { impl TypeInfo {
/// Simplify (Canonicalize) the given type with the given type scheme. /// Simplifies (canonicalizes) the given type with the given type scheme.
pub fn simplify(&self, ty: Ty, principal: bool) -> Ty { pub fn simplify(&self, ty: Ty, principal: bool) -> Ty {
let mut cache = self.cano_cache.lock(); let mut cache = self.cano_cache.lock();
let cache = &mut *cache; let cache = &mut *cache;
@ -38,6 +39,7 @@ impl TypeInfo {
} }
} }
/// A simplifier to simplify a type.
struct TypeSimplifier<'a, 'b> { struct TypeSimplifier<'a, 'b> {
principal: bool, principal: bool,
@ -50,6 +52,7 @@ struct TypeSimplifier<'a, 'b> {
} }
impl TypeSimplifier<'_, '_> { impl TypeSimplifier<'_, '_> {
/// Simplifies the given type.
fn simplify(&mut self, ty: Ty, principal: bool) -> Ty { fn simplify(&mut self, ty: Ty, principal: bool) -> Ty {
if let Some(cano) = self.cano_cache.get(&(ty.clone(), principal)) { if let Some(cano) = self.cano_cache.get(&(ty.clone(), principal)) {
return cano.clone(); return cano.clone();
@ -60,6 +63,7 @@ impl TypeSimplifier<'_, '_> {
self.transform(&ty, true) self.transform(&ty, true)
} }
/// Analyzes the given type.
fn analyze(&mut self, ty: &Ty, pol: bool) { fn analyze(&mut self, ty: &Ty, pol: bool) {
match ty { match ty {
Ty::Var(var) => { Ty::Var(var) => {
@ -162,6 +166,7 @@ impl TypeSimplifier<'_, '_> {
} }
} }
/// Transforms the given type.
fn transform(&mut self, ty: &Ty, pol: bool) -> Ty { fn transform(&mut self, ty: &Ty, pol: bool) -> Ty {
match ty { match ty {
Ty::Let(bounds) => self.transform_let(bounds.lbs.iter(), bounds.ubs.iter(), None, pol), Ty::Let(bounds) => self.transform_let(bounds.lbs.iter(), bounds.ubs.iter(), None, pol),
@ -250,11 +255,13 @@ impl TypeSimplifier<'_, '_> {
} }
} }
/// Transforms the given sequence of types.
fn transform_seq(&mut self, types: &[Ty], pol: bool) -> Interned<Vec<Ty>> { fn transform_seq(&mut self, types: &[Ty], pol: bool) -> Interned<Vec<Ty>> {
let seq = types.iter().map(|ty| self.transform(ty, pol)); let seq = types.iter().map(|ty| self.transform(ty, pol));
seq.collect::<Vec<_>>().into() seq.collect::<Vec<_>>().into()
} }
/// Transforms the given let type.
#[allow(clippy::mutable_key_type)] #[allow(clippy::mutable_key_type)]
fn transform_let<'a>( fn transform_let<'a>(
&mut self, &mut self,
@ -299,6 +306,7 @@ impl TypeSimplifier<'_, '_> {
Ty::Let(TypeBounds { lbs, ubs }.into()) Ty::Let(TypeBounds { lbs, ubs }.into())
} }
/// Transforms the given signature.
fn transform_sig(&mut self, sig: &SigTy, pol: bool) -> Interned<SigTy> { fn transform_sig(&mut self, sig: &SigTy, pol: bool) -> Interned<SigTy> {
let mut sig = sig.clone(); let mut sig = sig.clone();
sig.inputs = self.transform_seq(&sig.inputs, !pol); sig.inputs = self.transform_seq(&sig.inputs, !pol);

View file

@ -2,6 +2,7 @@ use super::{Sig, SigShape, TyMutator};
use crate::ty::prelude::*; use crate::ty::prelude::*;
impl Sig<'_> { impl Sig<'_> {
/// Calls the signature with the given arguments.
pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, ctx: &mut impl TyCtxMut) -> Option<Ty> { pub fn call(&self, args: &Interned<ArgsTy>, pol: bool, ctx: &mut impl TyCtxMut) -> Option<Ty> {
crate::log_debug_ct!("call {self:?} {args:?} {pol:?}"); crate::log_debug_ct!("call {self:?} {args:?} {pol:?}");
ctx.with_scope(|ctx| { ctx.with_scope(|ctx| {
@ -13,6 +14,7 @@ impl Sig<'_> {
}) })
} }
/// Checks the binding of the signature.
pub fn check_bind(&self, args: &Interned<ArgsTy>, ctx: &mut impl TyCtxMut) -> Option<Ty> { pub fn check_bind(&self, args: &Interned<ArgsTy>, ctx: &mut impl TyCtxMut) -> Option<Ty> {
let SigShape { sig, withs } = self.shape(ctx)?; let SigShape { sig, withs } = self.shape(ctx)?;
@ -30,11 +32,13 @@ impl Sig<'_> {
} }
} }
/// A checker to substitute the bound variables.
struct SubstituteChecker<'a, T: TyCtxMut> { struct SubstituteChecker<'a, T: TyCtxMut> {
ctx: &'a mut T, ctx: &'a mut T,
} }
impl<T: TyCtxMut> SubstituteChecker<'_, T> { impl<T: TyCtxMut> SubstituteChecker<'_, T> {
/// Substitutes the bound variables in the given type.
fn ty(&mut self, body: &Ty, pol: bool) -> Option<Ty> { fn ty(&mut self, body: &Ty, pol: bool) -> Option<Ty> {
body.mutate(pol, self) body.mutate(pol, self)
} }

View file

@ -15,3 +15,6 @@ include = ["src/**/*"]
[features] [features]
typst-preview = [] typst-preview = []
l10n = [] l10n = []
[lints]
workspace = true

View file

@ -1,6 +1,10 @@
//! The assets for compiling tinymist.
/// If this file is not found, please refer to <https://enter-tainer.github.io/typst-preview/dev.html> to build the frontend. /// If this file is not found, please refer to <https://enter-tainer.github.io/typst-preview/dev.html> to build the frontend.
#[cfg(feature = "typst-preview")] #[cfg(feature = "typst-preview")]
pub const TYPST_PREVIEW_HTML: &str = include_str!("typst-preview.html"); pub const TYPST_PREVIEW_HTML: &str = include_str!("typst-preview.html");
/// A dummy HTML file to be used when the `typst-preview` feature is not
/// enabled.
#[cfg(not(feature = "typst-preview"))] #[cfg(not(feature = "typst-preview"))]
pub const TYPST_PREVIEW_HTML: &str = "<html><body>Typst Preview needs to be built with the `embed-html` feature to work!</body></html>"; pub const TYPST_PREVIEW_HTML: &str = "<html><body>Typst Preview needs to be built with the `embed-html` feature to work!</body></html>";
@ -8,5 +12,6 @@ pub const TYPST_PREVIEW_HTML: &str = "<html><body>Typst Preview needs to be buil
/// localization data. /// localization data.
#[cfg(feature = "l10n")] #[cfg(feature = "l10n")]
pub const L10N_DATA: &str = include_str!("tinymist-rt.toml"); pub const L10N_DATA: &str = include_str!("tinymist-rt.toml");
/// A dummy string to be used when the `l10n` feature is not enabled.
#[cfg(not(feature = "l10n"))] #[cfg(not(feature = "l10n"))]
pub const L10N_DATA: &str = ""; pub const L10N_DATA: &str = "";

View file

@ -18,3 +18,6 @@ quote.workspace = true
[lib] [lib]
proc-macro = true proc-macro = true
[lints]
workspace = true

View file

@ -1,9 +1,22 @@
//! Derives for tinymist.
extern crate proc_macro; extern crate proc_macro;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;
use syn::{DeriveInput, parse_macro_input}; use syn::{DeriveInput, parse_macro_input};
/// Derives the `BindTyCtx` trait.
///
/// # Example
///
/// ```ignore
/// #[derive(BindTyCtx)]
/// struct MyStruct {
/// #[bind]
/// tyctx: TyCtx,
/// }
/// ```
#[proc_macro_derive(BindTyCtx, attributes(bind))] #[proc_macro_derive(BindTyCtx, attributes(bind))]
pub fn bind_ty_ctx(input: TokenStream) -> TokenStream { pub fn bind_ty_ctx(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree
@ -47,6 +60,17 @@ pub fn bind_ty_ctx(input: TokenStream) -> TokenStream {
TokenStream::from(expanded) TokenStream::from(expanded)
} }
/// Derives the `DeclEnum` trait.
///
/// # Example
///
/// ```ignore
/// #[derive(DeclEnum)]
/// enum MyEnum {
/// Sub1(SpannedDecl),
/// Sub2(SpannedDecl),
/// }
/// ```
#[proc_macro_derive(DeclEnum)] #[proc_macro_derive(DeclEnum)]
pub fn gen_decl_enum(input: TokenStream) -> TokenStream { pub fn gen_decl_enum(input: TokenStream) -> TokenStream {
// In form of // In form of
@ -71,12 +95,13 @@ pub fn gen_decl_enum(input: TokenStream) -> TokenStream {
let expanded = quote! { let expanded = quote! {
impl #input_name { impl #input_name {
/// Gets the name of the item.
pub fn name(&self) -> &Interned<str> { pub fn name(&self) -> &Interned<str> {
match self { match self {
#(Self::#names(x) => x.name()),* #(Self::#names(x) => x.name()),*
} }
} }
/// Gets the span of the item.
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match self { match self {
#(Self::#names(x) => x.span()),* #(Self::#names(x) => x.span()),*
@ -97,6 +122,16 @@ pub fn gen_decl_enum(input: TokenStream) -> TokenStream {
TokenStream::from(expanded) TokenStream::from(expanded)
} }
/// Derives the `TypliteAttr` trait.
///
/// # Example
///
/// ```ignore
/// #[derive(TypliteAttr, Default)]
/// pub struct FigureAttr {
/// pub caption: EcoString,
/// }
/// ```
#[proc_macro_derive(TypliteAttr)] #[proc_macro_derive(TypliteAttr)]
pub fn gen_typlite_element(input: TokenStream) -> TokenStream { pub fn gen_typlite_element(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree

View file

@ -169,15 +169,14 @@ pub struct CompileStatusResult {
elapsed: tinymist_std::time::Duration, elapsed: tinymist_std::time::Duration,
} }
#[allow(missing_docs)]
impl CompileReport { impl CompileReport {
/// Get the status message. /// Gets the status message.
pub fn message(&self) -> CompileReportMsg<'_> { pub fn message(&self) -> CompileReportMsg<'_> {
CompileReportMsg(self) CompileReportMsg(self)
} }
} }
#[allow(missing_docs)] /// A message of the compilation status.
pub struct CompileReportMsg<'a>(&'a CompileReport); pub struct CompileReportMsg<'a>(&'a CompileReport);
impl fmt::Display for CompileReportMsg<'_> { impl fmt::Display for CompileReportMsg<'_> {

View file

@ -1,5 +1,3 @@
#![allow(missing_docs)]
use std::cmp::Ordering; use std::cmp::Ordering;
use std::io::{Read, Seek, SeekFrom, Write}; use std::io::{Read, Seek, SeekFrom, Write};
use std::{path::Path, sync::Arc}; use std::{path::Path, sync::Arc};
@ -16,14 +14,17 @@ use crate::model::{ApplyProjectTask, Id, ProjectInput, ProjectRoute, ResourcePat
use crate::{LOCK_FILENAME, LOCK_VERSION, LockFile, LockFileCompat, LspWorld, ProjectPathMaterial}; use crate::{LOCK_FILENAME, LOCK_VERSION, LockFile, LockFileCompat, LspWorld, ProjectPathMaterial};
impl LockFile { impl LockFile {
/// Gets the input by the id.
pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> { pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> {
self.document.iter().find(|i| &i.id == id) self.document.iter().find(|i| &i.id == id)
} }
/// Gets the task by the id.
pub fn get_task(&self, id: &Id) -> Option<&ApplyProjectTask> { pub fn get_task(&self, id: &Id) -> Option<&ApplyProjectTask> {
self.task.iter().find(|i| &i.id == id) self.task.iter().find(|i| &i.id == id)
} }
/// Replaces the input by the id.
pub fn replace_document(&mut self, mut input: ProjectInput) { pub fn replace_document(&mut self, mut input: ProjectInput) {
input.lock_dir = None; input.lock_dir = None;
let input = input; let input = input;
@ -36,6 +37,7 @@ impl LockFile {
} }
} }
/// Replaces the task by the id.
pub fn replace_task(&mut self, mut task: ApplyProjectTask) { pub fn replace_task(&mut self, mut task: ApplyProjectTask) {
if let Some(pat) = task.task.as_export_mut().and_then(|t| t.output.as_mut()) { if let Some(pat) = task.task.as_export_mut().and_then(|t| t.output.as_mut()) {
let rel = pat.clone().relative_to(self.lock_dir.as_ref().unwrap()); let rel = pat.clone().relative_to(self.lock_dir.as_ref().unwrap());
@ -53,6 +55,7 @@ impl LockFile {
} }
} }
/// Replaces the route by the id.
pub fn replace_route(&mut self, route: ProjectRoute) { pub fn replace_route(&mut self, route: ProjectRoute) {
let id = route.id.clone(); let id = route.id.clone();
@ -60,6 +63,7 @@ impl LockFile {
self.route.push(route); self.route.push(route);
} }
/// Sorts the document, task, and route.
pub fn sort(&mut self) { pub fn sort(&mut self) {
self.document.sort_by(|a, b| a.id.cmp(&b.id)); self.document.sort_by(|a, b| a.id.cmp(&b.id));
self.task self.task
@ -67,6 +71,7 @@ impl LockFile {
// the route's order is important, so we don't sort them. // the route's order is important, so we don't sort them.
} }
/// Serializes the lock file.
pub fn serialize_resolve(&self) -> String { pub fn serialize_resolve(&self) -> String {
let content = toml::Table::try_from(self).unwrap(); let content = toml::Table::try_from(self).unwrap();
@ -139,6 +144,7 @@ impl LockFile {
} }
} }
/// Updates the lock file.
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> { pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned()); let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned());
@ -210,6 +216,7 @@ impl LockFile {
Ok(()) Ok(())
} }
/// Reads the lock file.
pub fn read(dir: &Path) -> Result<Self> { pub fn read(dir: &Path) -> Result<Self> {
let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned()); let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned());
@ -246,12 +253,14 @@ enum LockUpdate {
Route(ProjectRoute), Route(ProjectRoute),
} }
/// A lock file update.
pub struct LockFileUpdate { pub struct LockFileUpdate {
root: Arc<Path>, root: Arc<Path>,
updates: Vec<LockUpdate>, updates: Vec<LockUpdate>,
} }
impl LockFileUpdate { impl LockFileUpdate {
/// Compiles the lock file.
pub fn compiled(&mut self, world: &LspWorld, ctx: CtxPath) -> Option<Id> { pub fn compiled(&mut self, world: &LspWorld, ctx: CtxPath) -> Option<Id> {
let id = Id::from_world(world, ctx)?; let id = Id::from_world(world, ctx)?;
@ -299,10 +308,12 @@ impl LockFileUpdate {
Some(id) Some(id)
} }
/// Adds a task to the lock file.
pub fn task(&mut self, task: ApplyProjectTask) { pub fn task(&mut self, task: ApplyProjectTask) {
self.updates.push(LockUpdate::Task(task)); self.updates.push(LockUpdate::Task(task));
} }
/// Adds a material to the lock file.
pub fn update_materials(&mut self, doc_id: Id, files: EcoVec<ImmutPath>) { pub fn update_materials(&mut self, doc_id: Id, files: EcoVec<ImmutPath>) {
self.updates self.updates
.push(LockUpdate::Material(ProjectPathMaterial::from_deps( .push(LockUpdate::Material(ProjectPathMaterial::from_deps(
@ -310,6 +321,7 @@ impl LockFileUpdate {
))); )));
} }
/// Adds a route to the lock file.
pub fn route(&mut self, doc_id: Id, priority: u32) { pub fn route(&mut self, doc_id: Id, priority: u32) {
self.updates.push(LockUpdate::Route(ProjectRoute { self.updates.push(LockUpdate::Route(ProjectRoute {
id: doc_id, id: doc_id,
@ -317,6 +329,7 @@ impl LockFileUpdate {
})); }));
} }
/// Commits the lock file.
pub fn commit(self) { pub fn commit(self) {
crate::LockFile::update(&self.root, |l| { crate::LockFile::update(&self.root, |l| {
let root: EcoString = unix_slash(&self.root).into(); let root: EcoString = unix_slash(&self.root).into();
@ -367,6 +380,9 @@ impl LockFileUpdate {
} }
} }
/// A version string conforming to the [semver] standard.
///
/// [semver]: https://semver.org
struct Version<'a>(&'a str); struct Version<'a>(&'a str);
impl PartialEq for Version<'_> { impl PartialEq for Version<'_> {

View file

@ -28,7 +28,7 @@ use typst_shim::{syntax::LinkedNodeExt, utils::hash128};
use unscanny::Scanner; use unscanny::Scanner;
use crate::adt::interner::Interned; use crate::adt::interner::Interned;
use crate::analysis::{BuiltinTy, LocalContext, PathPreference, Ty}; use crate::analysis::{BuiltinTy, LocalContext, PathKind, Ty};
use crate::completion::{ use crate::completion::{
Completion, CompletionCommand, CompletionContextKey, CompletionItem, CompletionKind, Completion, CompletionCommand, CompletionContextKey, CompletionItem, CompletionKind,
DEFAULT_POSTFIX_SNIPPET, DEFAULT_PREFIX_SNIPPET, EcoTextEdit, ParsedSnippet, PostfixSnippet, DEFAULT_POSTFIX_SNIPPET, DEFAULT_PREFIX_SNIPPET, EcoTextEdit, ParsedSnippet, PostfixSnippet,
@ -632,7 +632,7 @@ impl CompletionPair<'_, '_, '_> {
self.package_completions(all_versions); self.package_completions(all_versions);
return Some(()); return Some(());
} else { } else {
let paths = self.complete_path(&crate::analysis::PathPreference::Source { let paths = self.complete_path(&crate::analysis::PathKind::Source {
allow_package: true, allow_package: true,
}); });
// todo: remove ctx.completions // todo: remove ctx.completions

View file

@ -4,7 +4,7 @@ use tinymist_world::vfs::WorkspaceResolver;
use super::*; use super::*;
impl CompletionPair<'_, '_, '_> { impl CompletionPair<'_, '_, '_> {
pub fn complete_path(&mut self, preference: &PathPreference) -> Option<Vec<CompletionItem>> { pub fn complete_path(&mut self, preference: &PathKind) -> Option<Vec<CompletionItem>> {
let id = self.cursor.source.id(); let id = self.cursor.source.id();
if WorkspaceResolver::is_package_file(id) { if WorkspaceResolver::is_package_file(id) {
return None; return None;

View file

@ -33,7 +33,7 @@ use super::{LspQuerySnapshot, TypeEnv};
use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot}; use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot};
use crate::analysis::prelude::*; use crate::analysis::prelude::*;
use crate::analysis::{ use crate::analysis::{
AnalysisStats, BibInfo, CompletionFeat, Definition, PathPreference, QueryStatGuard, AnalysisStats, BibInfo, CompletionFeat, Definition, PathKind, QueryStatGuard,
SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty, SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty,
TypeInfo, analyze_signature, bib_info, definition, post_type_check, TypeInfo, analyze_signature, bib_info, definition, post_type_check,
}; };
@ -339,16 +339,13 @@ impl LocalContext {
} }
/// Get all the source files in the workspace. /// Get all the source files in the workspace.
pub(crate) fn completion_files( pub(crate) fn completion_files(&self, pref: &PathKind) -> impl Iterator<Item = &TypstFileId> {
&self,
pref: &PathPreference,
) -> impl Iterator<Item = &TypstFileId> {
let regexes = pref.ext_matcher(); let regexes = pref.ext_matcher();
self.caches self.caches
.completion_files .completion_files
.get_or_init(|| { .get_or_init(|| {
if let Some(root) = self.world.entry_state().workspace_root() { if let Some(root) = self.world.entry_state().workspace_root() {
scan_workspace_files(&root, PathPreference::Special.ext_matcher(), |path| { scan_workspace_files(&root, PathKind::Special.ext_matcher(), |path| {
WorkspaceResolver::workspace_file(Some(&root), VirtualPath::new(path)) WorkspaceResolver::workspace_file(Some(&root), VirtualPath::new(path))
}) })
} else { } else {
@ -368,7 +365,7 @@ impl LocalContext {
/// Get all the source files in the workspace. /// Get all the source files in the workspace.
pub fn source_files(&self) -> &Vec<TypstFileId> { pub fn source_files(&self) -> &Vec<TypstFileId> {
self.caches.root_files.get_or_init(|| { self.caches.root_files.get_or_init(|| {
self.completion_files(&PathPreference::Source { self.completion_files(&PathKind::Source {
allow_package: false, allow_package: false,
}) })
.copied() .copied()
@ -391,7 +388,7 @@ impl LocalContext {
/// Get all depended files in the workspace, inclusively. /// Get all depended files in the workspace, inclusively.
pub fn depended_source_files(&self) -> EcoVec<TypstFileId> { pub fn depended_source_files(&self) -> EcoVec<TypstFileId> {
let mut ids = self.depended_files(); let mut ids = self.depended_files();
let preference = PathPreference::Source { let preference = PathKind::Source {
allow_package: false, allow_package: false,
}; };
ids.retain(|id| preference.is_match(id.vpath().as_rooted_path())); ids.retain(|id| preference.is_match(id.vpath().as_rooted_path()));

View file

@ -311,11 +311,11 @@ impl<'a> PostTypeChecker<'a> {
crate::log_debug_ct!("post check target iterated: {:?}", resp.bounds); crate::log_debug_ct!("post check target iterated: {:?}", resp.bounds);
Some(resp.finalize()) Some(resp.finalize())
} }
SyntaxContext::ImportPath(..) | SyntaxContext::IncludePath(..) => Some(Ty::Builtin( SyntaxContext::ImportPath(..) | SyntaxContext::IncludePath(..) => {
BuiltinTy::Path(crate::ty::PathPreference::Source { Some(Ty::Builtin(BuiltinTy::Path(crate::ty::PathKind::Source {
allow_package: true, allow_package: true,
}), })))
)), }
SyntaxContext::VarAccess(VarClass::Ident(node)) SyntaxContext::VarAccess(VarClass::Ident(node))
| SyntaxContext::VarAccess(VarClass::FieldAccess(node)) | SyntaxContext::VarAccess(VarClass::FieldAccess(node))
| SyntaxContext::VarAccess(VarClass::DotAccess(node)) | SyntaxContext::VarAccess(VarClass::DotAccess(node))

View file

@ -248,7 +248,7 @@ impl HoverWorker<'_> {
args: vec![JsonValue::String(target.to_string())], args: vec![JsonValue::String(target.to_string())],
}], }],
}); });
if let Some(kind) = PathPreference::from_ext(target.path()) { if let Some(kind) = PathKind::from_ext(target.path()) {
self.def.push(format!("A `{kind:?}` file.")); self.def.push(format!("A `{kind:?}` file."));
} }
} }

View file

@ -127,7 +127,6 @@ pub trait StatefulRequest {
fn request(self, ctx: &mut LocalContext, graph: LspComputeGraph) -> Option<Self::Response>; fn request(self, ctx: &mut LocalContext, graph: LspComputeGraph) -> Option<Self::Response>;
} }
#[allow(missing_docs)]
mod polymorphic { mod polymorphic {
use completion::CompletionList; use completion::CompletionList;
use lsp_types::TextEdit; use lsp_types::TextEdit;
@ -138,6 +137,7 @@ mod polymorphic {
use super::prelude::*; use super::prelude::*;
use super::*; use super::*;
/// A request to run an export task.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct OnExportRequest { pub struct OnExportRequest {
/// The path of the document to export. /// The path of the document to export.
@ -148,68 +148,111 @@ mod polymorphic {
pub open: bool, pub open: bool,
} }
/// A request to format the document.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FormattingRequest { pub struct FormattingRequest {
/// The path of the document to get semantic tokens for. /// The path of the document to get semantic tokens for.
pub path: PathBuf, pub path: PathBuf,
} }
/// A request to get the server info.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ServerInfoRequest {} pub struct ServerInfoRequest {}
/// The response to the server info request.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ServerInfoResponse { pub struct ServerInfoResponse {
/// The root path of the server.
pub root: Option<PathBuf>, pub root: Option<PathBuf>,
/// The font paths of the server.
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
/// The inputs of the server.
pub inputs: Dict, pub inputs: Dict,
/// The statistics of the server.
pub stats: HashMap<String, String>, pub stats: HashMap<String, String>,
} }
/// The feature of the fold request.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum FoldRequestFeature { pub enum FoldRequestFeature {
/// Serves the request with the first pinned entry.
PinnedFirst, PinnedFirst,
/// Makes the items unique.
Unique, Unique,
/// Merges the items.
Mergeable, Mergeable,
/// Makes the items unique without context.
ContextFreeUnique, ContextFreeUnique,
} }
/// The analysis request.
#[derive(Debug, Clone, strum::IntoStaticStr)] #[derive(Debug, Clone, strum::IntoStaticStr)]
pub enum CompilerQueryRequest { pub enum CompilerQueryRequest {
/// A request to run an export task.
OnExport(OnExportRequest), OnExport(OnExportRequest),
/// A request to get the hover information.
Hover(HoverRequest), Hover(HoverRequest),
/// A request to go to the definition.
GotoDefinition(GotoDefinitionRequest), GotoDefinition(GotoDefinitionRequest),
/// A request to go to the declaration.
GotoDeclaration(GotoDeclarationRequest), GotoDeclaration(GotoDeclarationRequest),
/// A request to get the references.
References(ReferencesRequest), References(ReferencesRequest),
/// A request to get the inlay hints.
InlayHint(InlayHintRequest), InlayHint(InlayHintRequest),
/// A request to get the document colors.
DocumentColor(DocumentColorRequest), DocumentColor(DocumentColorRequest),
/// A request to get the document links.
DocumentLink(DocumentLinkRequest), DocumentLink(DocumentLinkRequest),
/// A request to get the document highlights.
DocumentHighlight(DocumentHighlightRequest), DocumentHighlight(DocumentHighlightRequest),
/// A request to get the color presentations.
ColorPresentation(ColorPresentationRequest), ColorPresentation(ColorPresentationRequest),
/// A request to get the code actions.
CodeAction(CodeActionRequest), CodeAction(CodeActionRequest),
/// A request to get the code lenses.
CodeLens(CodeLensRequest), CodeLens(CodeLensRequest),
/// A request to get the completions.
Completion(CompletionRequest), Completion(CompletionRequest),
/// A request to get the signature helps.
SignatureHelp(SignatureHelpRequest), SignatureHelp(SignatureHelpRequest),
/// A request to rename.
Rename(RenameRequest), Rename(RenameRequest),
/// A request to determine the files to be renamed.
WillRenameFiles(WillRenameFilesRequest), WillRenameFiles(WillRenameFilesRequest),
/// A request to prepare the rename.
PrepareRename(PrepareRenameRequest), PrepareRename(PrepareRenameRequest),
/// A request to get the document symbols.
DocumentSymbol(DocumentSymbolRequest), DocumentSymbol(DocumentSymbolRequest),
/// A request to get the symbols.
Symbol(SymbolRequest), Symbol(SymbolRequest),
/// A request to get the semantic tokens full.
SemanticTokensFull(SemanticTokensFullRequest), SemanticTokensFull(SemanticTokensFullRequest),
/// A request to get the semantic tokens delta.
SemanticTokensDelta(SemanticTokensDeltaRequest), SemanticTokensDelta(SemanticTokensDeltaRequest),
/// A request to format the document.
Formatting(FormattingRequest), Formatting(FormattingRequest),
/// A request to get the folding ranges.
FoldingRange(FoldingRangeRequest), FoldingRange(FoldingRangeRequest),
/// A request to get the selection ranges.
SelectionRange(SelectionRangeRequest), SelectionRange(SelectionRangeRequest),
/// A request to interact with the code context.
InteractCodeContext(InteractCodeContextRequest), InteractCodeContext(InteractCodeContextRequest),
/// A request to get extra text edits on enter.
OnEnter(OnEnterRequest), OnEnter(OnEnterRequest),
/// A request to get the document metrics.
DocumentMetrics(DocumentMetricsRequest), DocumentMetrics(DocumentMetricsRequest),
/// A request to get the workspace labels.
WorkspaceLabel(WorkspaceLabelRequest), WorkspaceLabel(WorkspaceLabelRequest),
/// A request to get the server info.
ServerInfo(ServerInfoRequest), ServerInfo(ServerInfoRequest),
} }
impl CompilerQueryRequest { impl CompilerQueryRequest {
/// Gets the feature of the fold request.
pub fn fold_feature(&self) -> FoldRequestFeature { pub fn fold_feature(&self) -> FoldRequestFeature {
use FoldRequestFeature::*; use FoldRequestFeature::*;
match self { match self {
@ -247,6 +290,7 @@ mod polymorphic {
} }
} }
/// Gets the associated path of the request.
pub fn associated_path(&self) -> Option<&Path> { pub fn associated_path(&self) -> Option<&Path> {
Some(match self { Some(match self {
Self::OnExport(..) => return None, Self::OnExport(..) => return None,
@ -284,39 +328,69 @@ mod polymorphic {
} }
} }
/// The response to the compiler query request.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum CompilerQueryResponse { pub enum CompilerQueryResponse {
/// The response to the on export request.
OnExport(Option<PathBuf>), OnExport(Option<PathBuf>),
/// The response to the hover request.
Hover(Option<Hover>), Hover(Option<Hover>),
/// The response to the goto definition request.
GotoDefinition(Option<GotoDefinitionResponse>), GotoDefinition(Option<GotoDefinitionResponse>),
/// The response to the goto declaration request.
GotoDeclaration(Option<GotoDeclarationResponse>), GotoDeclaration(Option<GotoDeclarationResponse>),
/// The response to the references request.
References(Option<Vec<LspLocation>>), References(Option<Vec<LspLocation>>),
/// The response to the inlay hint request.
InlayHint(Option<Vec<InlayHint>>), InlayHint(Option<Vec<InlayHint>>),
/// The response to the document color request.
DocumentColor(Option<Vec<ColorInformation>>), DocumentColor(Option<Vec<ColorInformation>>),
/// The response to the document link request.
DocumentLink(Option<Vec<DocumentLink>>), DocumentLink(Option<Vec<DocumentLink>>),
/// The response to the document highlight request.
DocumentHighlight(Option<Vec<DocumentHighlight>>), DocumentHighlight(Option<Vec<DocumentHighlight>>),
/// The response to the color presentation request.
ColorPresentation(Option<Vec<ColorPresentation>>), ColorPresentation(Option<Vec<ColorPresentation>>),
/// The response to the code action request.
CodeAction(Option<Vec<CodeAction>>), CodeAction(Option<Vec<CodeAction>>),
/// The response to the code lens request.
CodeLens(Option<Vec<CodeLens>>), CodeLens(Option<Vec<CodeLens>>),
/// The response to the completion request.
Completion(Option<CompletionList>), Completion(Option<CompletionList>),
/// The response to the signature help request.
SignatureHelp(Option<SignatureHelp>), SignatureHelp(Option<SignatureHelp>),
/// The response to the prepare rename request.
PrepareRename(Option<PrepareRenameResponse>), PrepareRename(Option<PrepareRenameResponse>),
/// The response to the rename request.
Rename(Option<WorkspaceEdit>), Rename(Option<WorkspaceEdit>),
/// The response to the will rename files request.
WillRenameFiles(Option<WorkspaceEdit>), WillRenameFiles(Option<WorkspaceEdit>),
/// The response to the document symbol request.
DocumentSymbol(Option<DocumentSymbolResponse>), DocumentSymbol(Option<DocumentSymbolResponse>),
/// The response to the symbol request.
Symbol(Option<Vec<SymbolInformation>>), Symbol(Option<Vec<SymbolInformation>>),
/// The response to the workspace label request.
WorkspaceLabel(Option<Vec<SymbolInformation>>), WorkspaceLabel(Option<Vec<SymbolInformation>>),
/// The response to the semantic tokens full request.
SemanticTokensFull(Option<SemanticTokensResult>), SemanticTokensFull(Option<SemanticTokensResult>),
/// The response to the semantic tokens delta request.
SemanticTokensDelta(Option<SemanticTokensFullDeltaResult>), SemanticTokensDelta(Option<SemanticTokensFullDeltaResult>),
/// The response to the formatting request.
Formatting(Option<Vec<TextEdit>>), Formatting(Option<Vec<TextEdit>>),
/// The response to the folding range request.
FoldingRange(Option<Vec<FoldingRange>>), FoldingRange(Option<Vec<FoldingRange>>),
/// The response to the selection range request.
SelectionRange(Option<Vec<SelectionRange>>), SelectionRange(Option<Vec<SelectionRange>>),
/// The response to the interact code context request.
InteractCodeContext(Option<Vec<Option<InteractCodeContextResponse>>>), InteractCodeContext(Option<Vec<Option<InteractCodeContextResponse>>>),
/// The response to the on enter request.
OnEnter(Option<Vec<TextEdit>>), OnEnter(Option<Vec<TextEdit>>),
/// The response to the document metrics request.
DocumentMetrics(Option<DocumentMetricsResponse>), DocumentMetrics(Option<DocumentMetricsResponse>),
/// The response to the server info request.
ServerInfo(Option<HashMap<String, ServerInfoResponse>>), ServerInfo(Option<HashMap<String, ServerInfoResponse>>),
} }
} }

View file

@ -36,5 +36,5 @@ pub use crate::lsp_typst_boundary::{
to_typst_range, to_typst_range,
}; };
pub use crate::syntax::{Decl, DefKind, classify_syntax}; pub use crate::syntax::{Decl, DefKind, classify_syntax};
pub(crate) use crate::ty::PathPreference; pub(crate) use crate::ty::PathKind;
pub use crate::{SemanticRequest, StatefulRequest}; pub use crate::{SemanticRequest, StatefulRequest};

View file

@ -1,5 +1,3 @@
#![allow(missing_docs)]
use std::ops::DerefMut; use std::ops::DerefMut;
use parking_lot::Mutex; use parking_lot::Mutex;

View file

@ -5,13 +5,18 @@ use tinymist_world::package::PackageSpec;
use crate::{adt::interner::Interned, prelude::*}; use crate::{adt::interner::Interned, prelude::*};
/// The information for the index.
#[derive(Default)] #[derive(Default)]
pub struct IndexInfo { pub struct IndexInfo {
/// The paths in the source.
pub(crate) paths: FxHashSet<Interned<str>>, pub(crate) paths: FxHashSet<Interned<str>>,
/// The packages in the source.
pub(crate) packages: FxHashSet<PackageSpec>, pub(crate) packages: FxHashSet<PackageSpec>,
/// The identifiers in the source.
pub(crate) identifiers: FxHashSet<Interned<str>>, pub(crate) identifiers: FxHashSet<Interned<str>>,
} }
/// Gets the index information for the source.
#[typst_macros::time(span = src.root().span())] #[typst_macros::time(span = src.root().span())]
#[comemo::memoize] #[comemo::memoize]
pub fn get_index_info(src: &Source) -> Arc<IndexInfo> { pub fn get_index_info(src: &Source) -> Arc<IndexInfo> {
@ -23,11 +28,13 @@ pub fn get_index_info(src: &Source) -> Arc<IndexInfo> {
Arc::new(worker.info) Arc::new(worker.info)
} }
/// The worker for the index.
struct IndexWorker { struct IndexWorker {
info: IndexInfo, info: IndexInfo,
} }
impl IndexWorker { impl IndexWorker {
/// Visits the node.
fn visit(&mut self, node: &SyntaxNode) { fn visit(&mut self, node: &SyntaxNode) {
match node.cast::<ast::Expr>() { match node.cast::<ast::Expr>() {
Some(ast::Expr::Str(path_str)) => { Some(ast::Expr::Str(path_str)) => {

View file

@ -47,49 +47,65 @@ pub(crate) fn get_lexical_hierarchy(
res.map(|_| worker.stack.pop().unwrap().1) res.map(|_| worker.stack.pop().unwrap().1)
} }
/// The kind of a variable.
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum LexicalVarKind { pub enum LexicalVarKind {
/// A value reference.
/// `#foo` /// `#foo`
/// ^^^ /// ^^^
ValRef, ValRef,
/// A label reference.
/// `@foo` /// `@foo`
/// ^^^ /// ^^^
LabelRef, LabelRef,
/// A label.
/// `<foo>` /// `<foo>`
/// ^^^ /// ^^^
Label, Label,
/// A bib key.
/// `x:` /// `x:`
/// ^^ /// ^^
BibKey, BibKey,
/// A variable.
/// `let foo` /// `let foo`
/// ^^^ /// ^^^
Variable, Variable,
/// A function.
/// `let foo()` /// `let foo()`
/// ^^^ /// ^^^
Function, Function,
} }
/// The kind of a lexical hierarchy recogized by the analyzers.
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum LexicalKind { pub enum LexicalKind {
/// A heading.
Heading(i16), Heading(i16),
/// A variable.
Var(LexicalVarKind), Var(LexicalVarKind),
/// A block.
Block, Block,
/// A comment group.
CommentGroup, CommentGroup,
} }
impl LexicalKind { impl LexicalKind {
/// Creates a label.
const fn label() -> LexicalKind { const fn label() -> LexicalKind {
LexicalKind::Var(LexicalVarKind::Label) LexicalKind::Var(LexicalVarKind::Label)
} }
/// Creates a function.
const fn function() -> LexicalKind { const fn function() -> LexicalKind {
LexicalKind::Var(LexicalVarKind::Function) LexicalKind::Var(LexicalVarKind::Function)
} }
/// Creates a variable.
const fn variable() -> LexicalKind { const fn variable() -> LexicalKind {
LexicalKind::Var(LexicalVarKind::Variable) LexicalKind::Var(LexicalVarKind::Variable)
} }
/// Checks if the kind is a valid LSP symbol.
pub fn is_valid_lsp_symbol(&self) -> bool { pub fn is_valid_lsp_symbol(&self) -> bool {
!matches!(self, LexicalKind::Block | LexicalKind::CommentGroup) !matches!(self, LexicalKind::Block | LexicalKind::CommentGroup)
} }

View file

@ -2,8 +2,6 @@
//! //!
//! This module must hide all **AST details** from the rest of the codebase. //! This module must hide all **AST details** from the rest of the codebase.
#![allow(missing_docs)]
pub(crate) mod docs; pub(crate) mod docs;
pub(crate) mod expr; pub(crate) mod expr;
pub(crate) mod index; pub(crate) mod index;

View file

@ -397,7 +397,6 @@ impl<T, E: std::fmt::Display> WithContextUntyped<T> for Result<T, E> {
/// The error prelude. /// The error prelude.
pub mod prelude { pub mod prelude {
#![allow(missing_docs)]
use super::ErrKindExt; use super::ErrKindExt;
use crate::Error; use crate::Error;
@ -405,22 +404,27 @@ pub mod prelude {
pub use super::{IgnoreLogging, WithContext, WithContextUntyped}; pub use super::{IgnoreLogging, WithContext, WithContextUntyped};
pub use crate::{Result, bail}; pub use crate::{Result, bail};
/// Maps the given string error to an error.
pub fn map_string_err<T: ToString>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_string_err<T: ToString>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.to_string().to_error_kind(), None) move |e| Error::new(loc, e.to_string().to_error_kind(), None)
} }
/// Maps the given error to an error.
pub fn map_into_err<S: ErrKindExt, T: Into<S>>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_into_err<S: ErrKindExt, T: Into<S>>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.into().to_error_kind(), None) move |e| Error::new(loc, e.into().to_error_kind(), None)
} }
/// Maps the given error to an error.
pub fn map_err<T: ErrKindExt>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_err<T: ErrKindExt>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.to_error_kind(), None) move |e| Error::new(loc, e.to_error_kind(), None)
} }
/// Wraps the given error.
pub fn wrap_err(loc: &'static str) -> impl Fn(Error) -> Error { pub fn wrap_err(loc: &'static str) -> impl Fn(Error) -> Error {
move |e| Error::new(loc, crate::ErrKind::Inner(e), None) move |e| Error::new(loc, crate::ErrKind::Inner(e), None)
} }
/// Maps the given string error to an error with arguments.
pub fn map_string_err_with_args< pub fn map_string_err_with_args<
T: ToString, T: ToString,
Args: IntoIterator<Item = (&'static str, String)>, Args: IntoIterator<Item = (&'static str, String)>,
@ -437,6 +441,7 @@ pub mod prelude {
} }
} }
/// Maps the given error to an error with arguments.
pub fn map_into_err_with_args< pub fn map_into_err_with_args<
S: ErrKindExt, S: ErrKindExt,
T: Into<S>, T: Into<S>,
@ -454,6 +459,7 @@ pub mod prelude {
} }
} }
/// Maps the given error to an error with arguments.
pub fn map_err_with_args<T: ErrKindExt, Args: IntoIterator<Item = (&'static str, String)>>( pub fn map_err_with_args<T: ErrKindExt, Args: IntoIterator<Item = (&'static str, String)>>(
loc: &'static str, loc: &'static str,
args: Args, args: Args,
@ -467,6 +473,7 @@ pub mod prelude {
} }
} }
/// Wraps the given error with arguments.
pub fn wrap_err_with_args<Args: IntoIterator<Item = (&'static str, String)>>( pub fn wrap_err_with_args<Args: IntoIterator<Item = (&'static str, String)>>(
loc: &'static str, loc: &'static str,
args: Args, args: Args,
@ -480,16 +487,20 @@ pub mod prelude {
} }
} }
/// Creates an error with arguments.
pub fn _error_once(loc: &'static str, args: Box<[(&'static str, String)]>) -> Error { pub fn _error_once(loc: &'static str, args: Box<[(&'static str, String)]>) -> Error {
Error::new(loc, crate::ErrKind::None, Some(args)) Error::new(loc, crate::ErrKind::None, Some(args))
} }
/// Creates an error with a message.
pub fn _msg(loc: &'static str, msg: EcoString) -> Error { pub fn _msg(loc: &'static str, msg: EcoString) -> Error {
Error::new(loc, crate::ErrKind::Msg(msg), None) Error::new(loc, crate::ErrKind::Msg(msg), None)
} }
/// Formats a string.
pub use ecow::eco_format as _eco_format; pub use ecow::eco_format as _eco_format;
/// Bails with the given arguments.
#[macro_export] #[macro_export]
macro_rules! bail { macro_rules! bail {
($($arg:tt)+) => {{ ($($arg:tt)+) => {{
@ -498,6 +509,7 @@ pub mod prelude {
}}; }};
} }
/// Creates an error with a message.
#[macro_export] #[macro_export]
macro_rules! error_once { macro_rules! error_once {
($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => { ($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => {
@ -508,6 +520,7 @@ pub mod prelude {
}; };
} }
/// Maps the given error to an error with arguments.
#[macro_export] #[macro_export]
macro_rules! error_once_map { macro_rules! error_once_map {
($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => { ($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => {
@ -518,6 +531,7 @@ pub mod prelude {
}; };
} }
/// Maps the given string error to an error with arguments.
#[macro_export] #[macro_export]
macro_rules! error_once_map_string { macro_rules! error_once_map_string {
($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => { ($loc:expr, $($arg_key:ident: $arg:expr),+ $(,)?) => {

View file

@ -1,3 +1,5 @@
//! The computations for the tasks.
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
@ -28,13 +30,18 @@ pub mod text;
#[cfg(feature = "text")] #[cfg(feature = "text")]
pub use text::*; pub use text::*;
/// The flag indicating that the svg export is needed.
pub struct SvgFlag; pub struct SvgFlag;
/// The flag indicating that the png export is needed.
pub struct PngFlag; pub struct PngFlag;
/// The flag indicating that the html export is needed.
pub struct HtmlFlag; pub struct HtmlFlag;
/// The computation to check if the export is needed.
pub struct ExportTimings; pub struct ExportTimings;
impl ExportTimings { impl ExportTimings {
/// Checks if the export is needed.
pub fn needs_run<F: CompilerFeat, D: typst::Document>( pub fn needs_run<F: CompilerFeat, D: typst::Document>(
snap: &CompileSnapshot<F>, snap: &CompileSnapshot<F>,
timing: Option<&TaskWhen>, timing: Option<&TaskWhen>,
@ -45,6 +52,7 @@ impl ExportTimings {
} }
} }
/// The computation for svg export.
pub struct SvgExport; pub struct SvgExport;
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport { impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
@ -80,6 +88,7 @@ impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
// } // }
// } // }
/// The computation for png export.
pub struct PngExport; pub struct PngExport;
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport { impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
@ -130,6 +139,7 @@ impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
// } // }
// } // }
/// The computation for html export.
pub struct HtmlExport; pub struct HtmlExport;
impl<F: CompilerFeat> ExportComputation<F, TypstHtmlDocument> for HtmlExport { impl<F: CompilerFeat> ExportComputation<F, TypstHtmlDocument> for HtmlExport {
@ -153,6 +163,7 @@ impl<F: CompilerFeat> ExportComputation<F, TypstHtmlDocument> for HtmlExport {
// } // }
// } // }
/// The computation for document query.
pub struct DocumentQuery; pub struct DocumentQuery;
impl DocumentQuery { impl DocumentQuery {
@ -210,6 +221,7 @@ impl DocumentQuery {
.collect()) .collect())
} }
/// Queries the document and returns the result as a value.
pub fn doc_get_as_value<F: CompilerFeat>( pub fn doc_get_as_value<F: CompilerFeat>(
g: &Arc<WorldComputeGraph<F>>, g: &Arc<WorldComputeGraph<F>>,
doc: &TypstDocument, doc: &TypstDocument,
@ -221,6 +233,7 @@ impl DocumentQuery {
} }
} }
/// Queries the document and returns the result as a value.
pub fn get_as_value<F: CompilerFeat, D: typst::Document>( pub fn get_as_value<F: CompilerFeat, D: typst::Document>(
g: &Arc<WorldComputeGraph<F>>, g: &Arc<WorldComputeGraph<F>>,
doc: &Arc<D>, doc: &Arc<D>,

View file

@ -1,3 +1,5 @@
//! The computation for pdf export.
use tinymist_std::time::ToUtcDateTime; use tinymist_std::time::ToUtcDateTime;
pub use typst_pdf::PdfStandard as TypstPdfStandard; pub use typst_pdf::PdfStandard as TypstPdfStandard;
pub use typst_pdf::pdf; pub use typst_pdf::pdf;
@ -7,6 +9,7 @@ use typst_pdf::{PdfOptions, PdfStandards, Timestamp};
use super::*; use super::*;
use crate::model::ExportPdfTask; use crate::model::ExportPdfTask;
/// The computation for pdf export.
pub struct PdfExport; pub struct PdfExport;
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PdfExport { impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PdfExport {

View file

@ -1,3 +1,5 @@
//! The computation for text export.
use core::fmt; use core::fmt;
use std::sync::Arc; use std::sync::Arc;
use typst::html::{HtmlNode::*, tag}; use typst::html::{HtmlNode::*, tag};
@ -7,9 +9,11 @@ use tinymist_std::error::prelude::*;
use tinymist_std::typst::{TypstDocument, TypstPagedDocument}; use tinymist_std::typst::{TypstDocument, TypstPagedDocument};
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph}; use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
/// The computation for text export.
pub struct TextExport; pub struct TextExport;
impl TextExport { impl TextExport {
/// Runs the computation on a document.
pub fn run_on_doc(doc: &TypstDocument) -> Result<String> { pub fn run_on_doc(doc: &TypstDocument) -> Result<String> {
Ok(format!("{}", FullTextDigest(doc))) Ok(format!("{}", FullTextDigest(doc)))
} }

View file

@ -1,4 +1,4 @@
#![allow(missing_docs)] //! The task system of tinymist.
mod model; mod model;
pub use model::*; pub use model::*;

View file

@ -320,6 +320,7 @@ impl<'de> serde::Deserialize<'de> for ResourcePath {
} }
} }
/// The path context.
// todo: The ctx path looks not quite maintainable. But we only target to make // todo: The ctx path looks not quite maintainable. But we only target to make
// things correct, then back to make code good. // things correct, then back to make code good.
pub type CtxPath<'a, 'b> = (/* cwd */ &'a Path, /* lock_dir */ &'b Path); pub type CtxPath<'a, 'b> = (/* cwd */ &'a Path, /* lock_dir */ &'b Path);
@ -358,6 +359,8 @@ impl ResourcePath {
} }
} }
/// Converts the resource path to a path relative to the `base` (usually the
/// directory storing the lockfile).
pub fn relative_to(&self, base: &Path) -> Option<Self> { pub fn relative_to(&self, base: &Path) -> Option<Self> {
if self.0 == "file" { if self.0 == "file" {
let path = Path::new(&self.1); let path = Path::new(&self.1);

View file

@ -1,3 +1,5 @@
//! Shared arguments to create a world.
use core::fmt; use core::fmt;
use std::{ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -14,7 +16,7 @@ use crate::EntryOpts;
const ENV_PATH_SEP: char = if cfg!(windows) { ';' } else { ':' }; const ENV_PATH_SEP: char = if cfg!(windows) { ';' } else { ':' };
/// The font arguments for the compiler. /// The font arguments for the world.
#[derive(Debug, Clone, Default, Parser, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Clone, Default, Parser, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CompileFontArgs { pub struct CompileFontArgs {
@ -107,10 +109,12 @@ pub struct CompileOnceArgs {
} }
impl CompileOnceArgs { impl CompileOnceArgs {
/// Resolves the features.
pub fn resolve_features(&self) -> typst::Features { pub fn resolve_features(&self) -> typst::Features {
typst::Features::from_iter(self.features.iter().map(|f| (*f).into())) typst::Features::from_iter(self.features.iter().map(|f| (*f).into()))
} }
/// Resolves the inputs.
pub fn resolve_inputs(&self) -> Option<ImmutDict> { pub fn resolve_inputs(&self) -> Option<ImmutDict> {
if self.inputs.is_empty() { if self.inputs.is_empty() {
return None; return None;
@ -318,6 +322,7 @@ display_possible_values!(PdfStandard);
/// An in-development feature that may be changed or removed at any time. /// An in-development feature that may be changed or removed at any time.
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)] #[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
pub enum Feature { pub enum Feature {
/// The HTML feature.
Html, Html,
} }

View file

@ -10,11 +10,12 @@ use crate::font::FontResolverImpl;
use crate::package::RegistryPathMapper; use crate::package::RegistryPathMapper;
use crate::package::registry::JsRegistry; use crate::package::registry::JsRegistry;
/// A world that provides access to the browser. /// A universe that provides access to the browser.
/// It is under development.
pub type TypstBrowserUniverse = crate::world::CompilerUniverse<BrowserCompilerFeat>; pub type TypstBrowserUniverse = crate::world::CompilerUniverse<BrowserCompilerFeat>;
/// A world that provides access to the browser.
pub type TypstBrowserWorld = crate::world::CompilerWorld<BrowserCompilerFeat>; pub type TypstBrowserWorld = crate::world::CompilerWorld<BrowserCompilerFeat>;
/// The feature of the browser world.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct BrowserCompilerFeat; pub struct BrowserCompilerFeat;
@ -26,6 +27,7 @@ impl crate::CompilerFeat for BrowserCompilerFeat {
} }
impl TypstBrowserUniverse { impl TypstBrowserUniverse {
/// Creates a new browser universe.
pub fn new( pub fn new(
root_dir: PathBuf, root_dir: PathBuf,
inputs: Option<Arc<LazyHash<TypstDict>>>, inputs: Option<Arc<LazyHash<TypstDict>>>,

View file

@ -36,6 +36,7 @@ pub struct WorldComputeGraph<F: CompilerFeat> {
/// A world computable trait. /// A world computable trait.
pub trait WorldComputable<F: CompilerFeat>: std::any::Any + Send + Sync + Sized { pub trait WorldComputable<F: CompilerFeat>: std::any::Any + Send + Sync + Sized {
/// The output type.
type Output: Send + Sync + 'static; type Output: Send + Sync + 'static;
/// The computation implementation. /// The computation implementation.
@ -128,6 +129,7 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
computed.get().cloned().map(WorldComputeEntry::cast) computed.get().cloned().map(WorldComputeEntry::cast)
} }
/// Provides an exact instance.
pub fn exact_provide<T: WorldComputable<F>>(&self, ins: Result<Arc<T::Output>>) { pub fn exact_provide<T: WorldComputable<F>>(&self, ins: Result<Arc<T::Output>>) {
if self.provide::<T>(ins).is_err() { if self.provide::<T>(ins).is_err() {
panic!( panic!(
@ -166,29 +168,39 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
} }
} }
/// Gets the world.
pub fn world(&self) -> &CompilerWorld<F> { pub fn world(&self) -> &CompilerWorld<F> {
&self.snap.world &self.snap.world
} }
/// Gets the registry.
pub fn registry(&self) -> &Arc<F::Registry> { pub fn registry(&self) -> &Arc<F::Registry> {
&self.snap.world.registry &self.snap.world.registry
} }
/// Gets the library.
pub fn library(&self) -> &typst::Library { pub fn library(&self) -> &typst::Library {
&self.snap.world.library &self.snap.world.library
} }
} }
/// A trait to detect the export of a document.
pub trait ExportDetection<F: CompilerFeat, D> { pub trait ExportDetection<F: CompilerFeat, D> {
/// The configuration type.
type Config: Send + Sync + 'static; type Config: Send + Sync + 'static;
/// Determines whether the export needs to be computed.
fn needs_run(graph: &Arc<WorldComputeGraph<F>>, config: &Self::Config) -> bool; fn needs_run(graph: &Arc<WorldComputeGraph<F>>, config: &Self::Config) -> bool;
} }
/// A trait to compute the export of a document.
pub trait ExportComputation<F: CompilerFeat, D> { pub trait ExportComputation<F: CompilerFeat, D> {
/// The output type.
type Output; type Output;
/// The configuration type.
type Config: Send + Sync + 'static; type Config: Send + Sync + 'static;
/// Runs the export computation.
fn run_with<C: WorldComputable<F, Output = Option<Arc<D>>>>( fn run_with<C: WorldComputable<F, Output = Option<Arc<D>>>>(
g: &Arc<WorldComputeGraph<F>>, g: &Arc<WorldComputeGraph<F>>,
config: &Self::Config, config: &Self::Config,
@ -198,6 +210,7 @@ pub trait ExportComputation<F: CompilerFeat, D> {
Self::run(g, doc, config) Self::run(g, doc, config)
} }
/// Runs the export computation with a caster.
fn cast_run<'a>( fn cast_run<'a>(
g: &Arc<WorldComputeGraph<F>>, g: &Arc<WorldComputeGraph<F>>,
doc: impl TryInto<&'a Arc<D>, Error = tinymist_std::Error>, doc: impl TryInto<&'a Arc<D>, Error = tinymist_std::Error>,
@ -209,6 +222,7 @@ pub trait ExportComputation<F: CompilerFeat, D> {
Self::run(g, doc.try_into()?, config) Self::run(g, doc.try_into()?, config)
} }
/// Runs the export computation.
fn run( fn run(
g: &Arc<WorldComputeGraph<F>>, g: &Arc<WorldComputeGraph<F>>,
doc: &Arc<D>, doc: &Arc<D>,
@ -216,6 +230,7 @@ pub trait ExportComputation<F: CompilerFeat, D> {
) -> Result<Self::Output>; ) -> Result<Self::Output>;
} }
/// A task that computes a configuration.
pub struct ConfigTask<T>(pub T); pub struct ConfigTask<T>(pub T);
impl<F: CompilerFeat, T: Send + Sync + 'static> WorldComputable<F> for ConfigTask<T> { impl<F: CompilerFeat, T: Send + Sync + 'static> WorldComputable<F> for ConfigTask<T> {
@ -227,13 +242,19 @@ impl<F: CompilerFeat, T: Send + Sync + 'static> WorldComputable<F> for ConfigTas
} }
} }
/// A task that computes a flag.
pub type FlagTask<T> = ConfigTask<TaskFlagBase<T>>; pub type FlagTask<T> = ConfigTask<TaskFlagBase<T>>;
/// A base task flag.
pub struct TaskFlagBase<T> { pub struct TaskFlagBase<T> {
/// Whether the task is enabled.
pub enabled: bool, pub enabled: bool,
/// The phantom data.
_phantom: std::marker::PhantomData<T>, _phantom: std::marker::PhantomData<T>,
} }
impl<T> FlagTask<T> { impl<T> FlagTask<T> {
/// Creates a new flag task.
pub fn flag(flag: bool) -> Arc<TaskFlagBase<T>> { pub fn flag(flag: bool) -> Arc<TaskFlagBase<T>> {
Arc::new(TaskFlagBase { Arc::new(TaskFlagBase {
enabled: flag, enabled: flag,
@ -242,18 +263,24 @@ impl<T> FlagTask<T> {
} }
} }
/// A task that compiles a paged document.
pub type PagedCompilationTask = CompilationTask<TypstPagedDocument>; pub type PagedCompilationTask = CompilationTask<TypstPagedDocument>;
/// A task that compiles an HTML document.
pub type HtmlCompilationTask = CompilationTask<TypstHtmlDocument>; pub type HtmlCompilationTask = CompilationTask<TypstHtmlDocument>;
/// A task that compiles a document.
pub struct CompilationTask<D>(std::marker::PhantomData<D>); pub struct CompilationTask<D>(std::marker::PhantomData<D>);
impl<D: typst::Document + Send + Sync + 'static> CompilationTask<D> { impl<D: typst::Document + Send + Sync + 'static> CompilationTask<D> {
/// Ensures the main document.
pub fn ensure_main<F: CompilerFeat>(world: &CompilerWorld<F>) -> SourceResult<()> { pub fn ensure_main<F: CompilerFeat>(world: &CompilerWorld<F>) -> SourceResult<()> {
let main_id = world.main_id(); let main_id = world.main_id();
let checked = main_id.ok_or_else(|| typst::diag::eco_format!("entry file is not set")); let checked = main_id.ok_or_else(|| typst::diag::eco_format!("entry file is not set"));
checked.at(Span::detached()).map(|_| ()) checked.at(Span::detached()).map(|_| ())
} }
/// Executes the compilation.
pub fn execute<F: CompilerFeat>(world: &CompilerWorld<F>) -> Warned<SourceResult<Arc<D>>> { pub fn execute<F: CompilerFeat>(world: &CompilerWorld<F>) -> Warned<SourceResult<Arc<D>>> {
let res = Self::ensure_main(world); let res = Self::ensure_main(world);
if let Err(err) = res { if let Err(err) = res {
@ -311,6 +338,7 @@ where
} }
} }
/// A task that computes an optional document.
pub struct OptionDocumentTask<D>(std::marker::PhantomData<D>); pub struct OptionDocumentTask<D>(std::marker::PhantomData<D>);
impl<F: CompilerFeat, D> WorldComputable<F> for OptionDocumentTask<D> impl<F: CompilerFeat, D> WorldComputable<F> for OptionDocumentTask<D>
@ -332,12 +360,14 @@ where
impl<D> OptionDocumentTask<D> where D: typst::Document + Send + Sync + 'static {} impl<D> OptionDocumentTask<D> where D: typst::Document + Send + Sync + 'static {}
/// A task that computes the diagnostics of a document.
struct CompilationDiagnostics { struct CompilationDiagnostics {
errors: Option<EcoVec<typst::diag::SourceDiagnostic>>, errors: Option<EcoVec<typst::diag::SourceDiagnostic>>,
warnings: Option<EcoVec<typst::diag::SourceDiagnostic>>, warnings: Option<EcoVec<typst::diag::SourceDiagnostic>>,
} }
impl CompilationDiagnostics { impl CompilationDiagnostics {
/// Creates a new diagnostics from a result.
fn from_result<T>(result: &Option<Warned<SourceResult<T>>>) -> Self { fn from_result<T>(result: &Option<Warned<SourceResult<T>>>) -> Self {
let errors = result let errors = result
.as_ref() .as_ref()
@ -348,6 +378,7 @@ impl CompilationDiagnostics {
} }
} }
/// A task that computes the diagnostics of a document.
pub struct DiagnosticsTask { pub struct DiagnosticsTask {
paged: CompilationDiagnostics, paged: CompilationDiagnostics,
html: CompilationDiagnostics, html: CompilationDiagnostics,
@ -368,16 +399,19 @@ impl<F: CompilerFeat> WorldComputable<F> for DiagnosticsTask {
} }
impl DiagnosticsTask { impl DiagnosticsTask {
/// Gets the number of errors.
pub fn error_cnt(&self) -> usize { pub fn error_cnt(&self) -> usize {
self.paged.errors.as_ref().map_or(0, |e| e.len()) self.paged.errors.as_ref().map_or(0, |e| e.len())
+ self.html.errors.as_ref().map_or(0, |e| e.len()) + self.html.errors.as_ref().map_or(0, |e| e.len())
} }
/// Gets the number of warnings.
pub fn warning_cnt(&self) -> usize { pub fn warning_cnt(&self) -> usize {
self.paged.warnings.as_ref().map_or(0, |e| e.len()) self.paged.warnings.as_ref().map_or(0, |e| e.len())
+ self.html.warnings.as_ref().map_or(0, |e| e.len()) + self.html.warnings.as_ref().map_or(0, |e| e.len())
} }
/// Gets the diagnostics.
pub fn diagnostics(&self) -> impl Iterator<Item = &typst::diag::SourceDiagnostic> { pub fn diagnostics(&self) -> impl Iterator<Item = &typst::diag::SourceDiagnostic> {
self.paged self.paged
.errors .errors
@ -390,40 +424,42 @@ impl DiagnosticsTask {
} }
impl<F: CompilerFeat> WorldComputeGraph<F> { impl<F: CompilerFeat> WorldComputeGraph<F> {
/// Ensures the main document.
pub fn ensure_main(&self) -> SourceResult<()> { pub fn ensure_main(&self) -> SourceResult<()> {
CompilationTask::<TypstPagedDocument>::ensure_main(&self.snap.world) CompilationTask::<TypstPagedDocument>::ensure_main(&self.snap.world)
} }
/// Compile once from scratch. /// Compiles once from scratch.
pub fn pure_compile<D: ::typst::Document + Send + Sync + 'static>( pub fn pure_compile<D: ::typst::Document + Send + Sync + 'static>(
&self, &self,
) -> Warned<SourceResult<Arc<D>>> { ) -> Warned<SourceResult<Arc<D>>> {
CompilationTask::<D>::execute(&self.snap.world) CompilationTask::<D>::execute(&self.snap.world)
} }
/// Compile once from scratch. /// Compiles once from scratch.
pub fn compile(&self) -> Warned<SourceResult<Arc<TypstPagedDocument>>> { pub fn compile(&self) -> Warned<SourceResult<Arc<TypstPagedDocument>>> {
self.pure_compile() self.pure_compile()
} }
/// Compile to html once from scratch. /// Compiles to html once from scratch.
pub fn compile_html(&self) -> Warned<SourceResult<Arc<TypstHtmlDocument>>> { pub fn compile_html(&self) -> Warned<SourceResult<Arc<TypstHtmlDocument>>> {
self.pure_compile() self.pure_compile()
} }
/// Compile paged document with cache /// Compiles paged document with cache
pub fn shared_compile(self: &Arc<Self>) -> Result<Option<Arc<TypstPagedDocument>>> { pub fn shared_compile(self: &Arc<Self>) -> Result<Option<Arc<TypstPagedDocument>>> {
let doc = self.compute::<OptionDocumentTask<TypstPagedDocument>>()?; let doc = self.compute::<OptionDocumentTask<TypstPagedDocument>>()?;
Ok(doc.as_ref().clone()) Ok(doc.as_ref().clone())
} }
/// Compile HTML document with cache /// Compiles HTML document with cache
pub fn shared_compile_html(self: &Arc<Self>) -> Result<Option<Arc<TypstHtmlDocument>>> { pub fn shared_compile_html(self: &Arc<Self>) -> Result<Option<Arc<TypstHtmlDocument>>> {
let doc = self.compute::<OptionDocumentTask<TypstHtmlDocument>>()?; let doc = self.compute::<OptionDocumentTask<TypstHtmlDocument>>()?;
Ok(doc.as_ref().clone()) Ok(doc.as_ref().clone())
} }
/// Gets the diagnostics from shared compilation. /// Gets the diagnostics from shared compilation.
#[must_use = "the result must be checked"]
pub fn shared_diagnostics(self: &Arc<Self>) -> Result<Arc<DiagnosticsTask>> { pub fn shared_diagnostics(self: &Arc<Self>) -> Result<Arc<DiagnosticsTask>> {
self.compute::<DiagnosticsTask>() self.compute::<DiagnosticsTask>()
} }

View file

@ -1,3 +1,5 @@
//! The configuration of the world.
use std::borrow::Cow; use std::borrow::Cow;
use std::path::PathBuf; use std::path::PathBuf;
@ -8,49 +10,51 @@ use typst::foundations::Dict;
use crate::EntryOpts; use crate::EntryOpts;
/// The options to create the world.
#[serde_as] #[serde_as]
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
pub struct CompileOpts { pub struct CompileOpts {
/// Path to entry /// The path to the entry.
pub entry: EntryOpts, pub entry: EntryOpts,
/// Additional input arguments to compile the entry file. /// Additional input arguments to compile the entry file.
pub inputs: Dict, pub inputs: Dict,
/// Path to font profile for cache /// The path to the font profile for cache.
#[serde(rename = "fontProfileCachePath")] #[serde(rename = "fontProfileCachePath")]
pub font_profile_cache_path: PathBuf, pub font_profile_cache_path: PathBuf,
/// will remove later /// The paths to the font files.
#[serde(rename = "fontPaths")] #[serde(rename = "fontPaths")]
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
/// Exclude system font paths /// Whether to exclude system font paths.
#[serde(rename = "noSystemFonts")] #[serde(rename = "noSystemFonts")]
pub no_system_fonts: bool, pub no_system_fonts: bool,
/// Include embedded fonts /// Whether to include embedded fonts.
#[serde(rename = "withEmbeddedFonts")] #[serde(rename = "withEmbeddedFonts")]
#[serde_as(as = "Vec<AsCowBytes>")] #[serde_as(as = "Vec<AsCowBytes>")]
pub with_embedded_fonts: Vec<Cow<'static, [u8]>>, pub with_embedded_fonts: Vec<Cow<'static, [u8]>>,
/// Fixed creation timestamp for the world. /// The fixed creation timestamp for the world.
#[serde(rename = "creationTimestamp")] #[serde(rename = "creationTimestamp")]
pub creation_timestamp: Option<i64>, pub creation_timestamp: Option<i64>,
} }
/// The options to specify the fonts for the world.
#[serde_as] #[serde_as]
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
pub struct CompileFontOpts { pub struct CompileFontOpts {
/// will remove later /// The paths to the font files.
#[serde(rename = "fontPaths")] #[serde(rename = "fontPaths")]
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
/// Exclude system font paths /// Whether to exclude system font paths.
#[serde(rename = "noSystemFonts")] #[serde(rename = "noSystemFonts")]
pub no_system_fonts: bool, pub no_system_fonts: bool,
/// Include embedded fonts /// The embedded fonts to include.
#[serde(rename = "withEmbeddedFonts")] #[serde(rename = "withEmbeddedFonts")]
#[serde_as(as = "Vec<AsCowBytes>")] #[serde_as(as = "Vec<AsCowBytes>")]
pub with_embedded_fonts: Vec<Cow<'static, [u8]>>, pub with_embedded_fonts: Vec<Cow<'static, [u8]>>,

View file

@ -1,3 +1,5 @@
//! The diagnostic utilities.
use ecow::EcoString; use ecow::EcoString;
use std::str::FromStr; use std::str::FromStr;
@ -12,7 +14,7 @@ use typst::syntax::Span;
use crate::{CodeSpanReportWorld, DiagnosticFormat, SourceWorld}; use crate::{CodeSpanReportWorld, DiagnosticFormat, SourceWorld};
/// Prints diagnostic messages to the terminal. /// Prints diagnostic messages to a string.
pub fn print_diagnostics_to_string<'d, 'files>( pub fn print_diagnostics_to_string<'d, 'files>(
world: &'files dyn SourceWorld, world: &'files dyn SourceWorld,
errors: impl Iterator<Item = &'d SourceDiagnostic>, errors: impl Iterator<Item = &'d SourceDiagnostic>,
@ -30,7 +32,7 @@ pub fn print_diagnostics_to_string<'d, 'files>(
Ok(output) Ok(output)
} }
/// Prints diagnostic messages to the terminal. /// Prints diagnostic messages to a writer.
pub fn print_diagnostics_to<'d, 'files>( pub fn print_diagnostics_to<'d, 'files>(
world: &'files dyn SourceWorld, world: &'files dyn SourceWorld,
errors: impl Iterator<Item = &'d SourceDiagnostic>, errors: impl Iterator<Item = &'d SourceDiagnostic>,

View file

@ -1,3 +1,5 @@
//! The entry state of the world.
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::LazyLock; use std::sync::LazyLock;
@ -7,35 +9,47 @@ use tinymist_vfs::{WorkspaceResolution, WorkspaceResolver};
use typst::diag::SourceResult; use typst::diag::SourceResult;
use typst::syntax::{FileId, VirtualPath}; use typst::syntax::{FileId, VirtualPath};
/// A trait to read the entry state.
pub trait EntryReader { pub trait EntryReader {
/// Gets the entry state.
fn entry_state(&self) -> EntryState; fn entry_state(&self) -> EntryState;
/// Gets the main file id.
fn main_id(&self) -> Option<FileId> { fn main_id(&self) -> Option<FileId> {
self.entry_state().main() self.entry_state().main()
} }
} }
/// A trait to manage the entry state.
pub trait EntryManager: EntryReader { pub trait EntryManager: EntryReader {
/// Mutates the entry state.
fn mutate_entry(&mut self, state: EntryState) -> SourceResult<EntryState>; fn mutate_entry(&mut self, state: EntryState) -> SourceResult<EntryState>;
} }
/// The state of the entry.
#[derive(Debug, Clone, Hash, PartialEq, Eq, Default)] #[derive(Debug, Clone, Hash, PartialEq, Eq, Default)]
pub struct EntryState { pub struct EntryState {
/// Path to the root directory of compilation. /// The path to the root directory of compilation.
/// The world forbids direct access to files outside this directory. /// The world forbids direct access to files outside this directory.
///
/// If the root is `None`, the world cannot access the file system.
root: Option<ImmutPath>, root: Option<ImmutPath>,
/// Identifier of the main file in the workspace /// The identifier of the main file in the workspace.
///
/// If the main is `None`, the world is inactive.
main: Option<FileId>, main: Option<FileId>,
} }
/// The detached entry.
pub static DETACHED_ENTRY: LazyLock<FileId> = pub static DETACHED_ENTRY: LazyLock<FileId> =
LazyLock::new(|| FileId::new(None, VirtualPath::new(Path::new("/__detached.typ")))); LazyLock::new(|| FileId::new(None, VirtualPath::new(Path::new("/__detached.typ"))));
/// The memory main entry.
pub static MEMORY_MAIN_ENTRY: LazyLock<FileId> = pub static MEMORY_MAIN_ENTRY: LazyLock<FileId> =
LazyLock::new(|| FileId::new(None, VirtualPath::new(Path::new("/__main__.typ")))); LazyLock::new(|| FileId::new(None, VirtualPath::new(Path::new("/__main__.typ"))));
impl EntryState { impl EntryState {
/// Create an entry state with no workspace root and no main file. /// Creates an entry state with no workspace root and no main file.
pub fn new_detached() -> Self { pub fn new_detached() -> Self {
Self { Self {
root: None, root: None,
@ -43,12 +57,12 @@ impl EntryState {
} }
} }
/// Create an entry state with a workspace root and no main file. /// Creates an entry state with a workspace root and no main file.
pub fn new_workspace(root: ImmutPath) -> Self { pub fn new_workspace(root: ImmutPath) -> Self {
Self::new_rooted(root, None) Self::new_rooted(root, None)
} }
/// Create an entry state without permission to access the file system. /// Creates an entry state without permission to access the file system.
pub fn new_rootless(main: VirtualPath) -> Self { pub fn new_rootless(main: VirtualPath) -> Self {
Self { Self {
root: None, root: None,
@ -56,12 +70,12 @@ impl EntryState {
} }
} }
/// Create an entry state with a workspace root and an main file. /// Creates an entry state with a workspace root and an main file.
pub fn new_rooted_by_id(root: ImmutPath, main: FileId) -> Self { pub fn new_rooted_by_id(root: ImmutPath, main: FileId) -> Self {
Self::new_rooted(root, Some(main.vpath().clone())) Self::new_rooted(root, Some(main.vpath().clone()))
} }
/// Create an entry state with a workspace root and an optional main file. /// Creates an entry state with a workspace root and an optional main file.
pub fn new_rooted(root: ImmutPath, main: Option<VirtualPath>) -> Self { pub fn new_rooted(root: ImmutPath, main: Option<VirtualPath>) -> Self {
let main = main.map(|main| WorkspaceResolver::workspace_file(Some(&root), main)); let main = main.map(|main| WorkspaceResolver::workspace_file(Some(&root), main));
Self { Self {
@ -70,7 +84,7 @@ impl EntryState {
} }
} }
/// Create an entry state with only a main file given. /// Creates an entry state with only a main file given.
pub fn new_rooted_by_parent(entry: ImmutPath) -> Option<Self> { pub fn new_rooted_by_parent(entry: ImmutPath) -> Option<Self> {
let root = entry.parent().map(ImmutPath::from); let root = entry.parent().map(ImmutPath::from);
let main = let main =
@ -82,14 +96,17 @@ impl EntryState {
}) })
} }
/// Gets the main file id.
pub fn main(&self) -> Option<FileId> { pub fn main(&self) -> Option<FileId> {
self.main self.main
} }
/// Gets the specified root directory.
pub fn root(&self) -> Option<ImmutPath> { pub fn root(&self) -> Option<ImmutPath> {
self.root.clone() self.root.clone()
} }
/// Gets the root directory of the main file.
pub fn workspace_root(&self) -> Option<ImmutPath> { pub fn workspace_root(&self) -> Option<ImmutPath> {
if let Some(main) = self.main { if let Some(main) = self.main {
match WorkspaceResolver::resolve(main).ok()? { match WorkspaceResolver::resolve(main).ok()? {
@ -104,6 +121,7 @@ impl EntryState {
} }
} }
/// Selects an entry in the workspace.
pub fn select_in_workspace(&self, path: &Path) -> EntryState { pub fn select_in_workspace(&self, path: &Path) -> EntryState {
let id = WorkspaceResolver::workspace_file(self.root.as_ref(), VirtualPath::new(path)); let id = WorkspaceResolver::workspace_file(self.root.as_ref(), VirtualPath::new(path));
@ -113,6 +131,7 @@ impl EntryState {
} }
} }
/// Tries to select an entry in the workspace.
pub fn try_select_path_in_workspace(&self, path: &Path) -> Result<Option<EntryState>> { pub fn try_select_path_in_workspace(&self, path: &Path) -> Result<Option<EntryState>> {
Ok(match self.workspace_root() { Ok(match self.workspace_root() {
Some(root) => match path.strip_prefix(&root) { Some(root) => match path.strip_prefix(&root) {
@ -130,21 +149,26 @@ impl EntryState {
}) })
} }
/// Checks if the world is detached.
pub fn is_detached(&self) -> bool { pub fn is_detached(&self) -> bool {
self.root.is_none() && self.main.is_none() self.root.is_none() && self.main.is_none()
} }
/// Checks if the world is inactive.
pub fn is_inactive(&self) -> bool { pub fn is_inactive(&self) -> bool {
self.main.is_none() self.main.is_none()
} }
/// Checks if the world is in a package.
pub fn is_in_package(&self) -> bool { pub fn is_in_package(&self) -> bool {
self.main.is_some_and(WorkspaceResolver::is_package_file) self.main.is_some_and(WorkspaceResolver::is_package_file)
} }
} }
/// The options to create the entry
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum EntryOpts { pub enum EntryOpts {
/// Creates the entry with a specified root directory and a main file.
Workspace { Workspace {
/// Path to the root directory of compilation. /// Path to the root directory of compilation.
/// The world forbids direct access to files outside this directory. /// The world forbids direct access to files outside this directory.
@ -152,10 +176,12 @@ pub enum EntryOpts {
/// Relative path to the main file in the workspace. /// Relative path to the main file in the workspace.
main: Option<PathBuf>, main: Option<PathBuf>,
}, },
/// Creates the entry with a main file and a parent directory as the root.
RootByParent { RootByParent {
/// Path to the entry file of compilation. /// Path to the entry file of compilation.
entry: PathBuf, entry: PathBuf,
}, },
/// Creates the entry with no root and no main file.
Detached, Detached,
} }
@ -166,18 +192,22 @@ impl Default for EntryOpts {
} }
impl EntryOpts { impl EntryOpts {
/// Creates the entry with no root and no main file.
pub fn new_detached() -> Self { pub fn new_detached() -> Self {
Self::Detached Self::Detached
} }
/// Creates the entry with a specified root directory and no main file.
pub fn new_workspace(root: PathBuf) -> Self { pub fn new_workspace(root: PathBuf) -> Self {
Self::Workspace { root, main: None } Self::Workspace { root, main: None }
} }
/// Creates the entry with a specified root directory and a main file.
pub fn new_rooted(root: PathBuf, main: Option<PathBuf>) -> Self { pub fn new_rooted(root: PathBuf, main: Option<PathBuf>) -> Self {
Self::Workspace { root, main } Self::Workspace { root, main }
} }
/// Creates the entry with a main file and a parent directory as the root.
pub fn new_rootless(entry: PathBuf) -> Option<Self> { pub fn new_rootless(entry: PathBuf) -> Option<Self> {
if entry.is_relative() { if entry.is_relative() {
return None; return None;

View file

@ -1,20 +1,28 @@
//! The cache of the font info.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use typst::text::FontInfo; use typst::text::FontInfo;
/// The condition of the cache.
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(tag = "t", content = "v")] #[serde(tag = "t", content = "v")]
pub enum CacheCondition { pub enum CacheCondition {
/// The sha256 hash of the data.
Sha256(String), Sha256(String),
} }
/// The cache of the font info.
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct FontInfoCache { pub struct FontInfoCache {
/// The font info.
pub info: Vec<FontInfo>, pub info: Vec<FontInfo>,
/// The conditions of the cache.
pub conditions: Vec<CacheCondition>, pub conditions: Vec<CacheCondition>,
} }
impl FontInfoCache { impl FontInfoCache {
/// Creates a new font info cache from the data.
pub fn from_data(buffer: &[u8]) -> Self { pub fn from_data(buffer: &[u8]) -> Self {
let hash = hex::encode(Sha256::digest(buffer)); let hash = hex::encode(Sha256::digest(buffer));

View file

@ -5,12 +5,15 @@ use crate::Bytes;
/// A FontLoader helps load a font from somewhere. /// A FontLoader helps load a font from somewhere.
pub trait FontLoader { pub trait FontLoader {
/// Loads a font.
fn load(&mut self) -> Option<Font>; fn load(&mut self) -> Option<Font>;
} }
/// Loads font from a buffer. /// Loads a font from a buffer.
pub struct BufferFontLoader { pub struct BufferFontLoader {
/// The buffer to load the font from.
pub buffer: Option<Bytes>, pub buffer: Option<Bytes>,
/// The index in a font file.
pub index: u32, pub index: u32,
} }
@ -20,13 +23,16 @@ impl FontLoader for BufferFontLoader {
} }
} }
/// Loads font from a reader. /// Loads a font from a reader.
pub struct LazyBufferFontLoader<R> { pub struct LazyBufferFontLoader<R> {
/// The reader to load the font from.
pub read: Option<R>, pub read: Option<R>,
/// The index in a font file.
pub index: u32, pub index: u32,
} }
impl<R: ReadAllOnce + Sized> LazyBufferFontLoader<R> { impl<R: ReadAllOnce + Sized> LazyBufferFontLoader<R> {
/// Creates a new lazy buffer font loader.
pub fn new(read: R, index: u32) -> Self { pub fn new(read: R, index: u32) -> Self {
Self { Self {
read: Some(read), read: Some(read),

View file

@ -1,3 +1,6 @@
//! Font searchers to run the compiler in the generic environment (for example,
//! `nostd`).
use std::sync::Arc; use std::sync::Arc;
use rayon::iter::{IntoParallelIterator, ParallelIterator}; use rayon::iter::{IntoParallelIterator, ParallelIterator};
@ -10,6 +13,7 @@ use crate::font::{BufferFontLoader, FontResolverImpl, FontSlot};
/// A memory font searcher. /// A memory font searcher.
#[derive(Debug)] #[derive(Debug)]
pub struct MemoryFontSearcher { pub struct MemoryFontSearcher {
/// The fonts found during the search.
pub fonts: Vec<(FontInfo, FontSlot)>, pub fonts: Vec<(FontInfo, FontSlot)>,
} }
@ -25,7 +29,7 @@ impl MemoryFontSearcher {
Self { fonts: vec![] } Self { fonts: vec![] }
} }
/// Create a new browser searcher with fonts in a FontResolverImpl. /// Creates a new searcher with fonts in a font resolver.
pub fn from_resolver(resolver: FontResolverImpl) -> Self { pub fn from_resolver(resolver: FontResolverImpl) -> Self {
let fonts = resolver let fonts = resolver
.slots .slots
@ -46,12 +50,12 @@ impl MemoryFontSearcher {
Self { fonts } Self { fonts }
} }
/// Adds an in-memory font. /// Adds an in-memory font to the searcher.
pub fn add_memory_font(&mut self, data: Bytes) { pub fn add_memory_font(&mut self, data: Bytes) {
self.add_memory_fonts(rayon::iter::once(data)); self.add_memory_fonts(rayon::iter::once(data));
} }
/// Adds in-memory fonts. /// Adds in-memory fonts to the searcher.
pub fn add_memory_fonts(&mut self, data: impl IntoParallelIterator<Item = Bytes>) { pub fn add_memory_fonts(&mut self, data: impl IntoParallelIterator<Item = Bytes>) {
let source = DataSource::Memory(MemoryDataSource { let source = DataSource::Memory(MemoryDataSource {
name: "<memory>".to_owned(), name: "<memory>".to_owned(),
@ -62,7 +66,7 @@ impl MemoryFontSearcher {
); );
} }
/// Adds a number of raw font resources. /// Adds a number of raw font resources to the searcher.
/// ///
/// Note: if you would like to reuse font resources across builds, use /// Note: if you would like to reuse font resources across builds, use
/// [`Self::extend_bytes`] instead. /// [`Self::extend_bytes`] instead.
@ -108,5 +112,6 @@ impl MemoryFontSearcher {
} }
} }
/// A deprecated type to build a font resolver.
#[deprecated(note = "use [`MemoryFontSearcher`] instead")] #[deprecated(note = "use [`MemoryFontSearcher`] instead")]
pub type MemoryFontBuilder = MemoryFontSearcher; pub type MemoryFontBuilder = MemoryFontSearcher;

View file

@ -1,19 +1,24 @@
//! The profile of the font.
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::Digest; use sha2::Digest;
use std::{collections::HashMap, time::SystemTime}; use std::{collections::HashMap, time::SystemTime};
use typst::text::{Coverage, FontInfo}; use typst::text::{Coverage, FontInfo};
/// The metadata of the font.
type FontMetaDict = HashMap<String, String>; type FontMetaDict = HashMap<String, String>;
/// The item of the font profile.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FontInfoItem { pub struct FontInfoItem {
/// customized profile data /// The metadata of the font.
pub meta: FontMetaDict, pub meta: FontMetaDict,
/// The informatioin of the font /// The information of the font.
pub info: FontInfo, pub info: FontInfo,
} }
impl FontInfoItem { impl FontInfoItem {
/// Creates a new font info item.
pub fn new(info: FontInfo) -> Self { pub fn new(info: FontInfo) -> Self {
Self { Self {
meta: Default::default(), meta: Default::default(),
@ -21,41 +26,49 @@ impl FontInfoItem {
} }
} }
/// Gets the index of the font.
pub fn index(&self) -> Option<u32> { pub fn index(&self) -> Option<u32> {
self.meta.get("index").and_then(|v| v.parse::<u32>().ok()) self.meta.get("index").and_then(|v| v.parse::<u32>().ok())
} }
/// Sets the index of the font.
pub fn set_index(&mut self, v: u32) { pub fn set_index(&mut self, v: u32) {
self.meta.insert("index".to_owned(), v.to_string()); self.meta.insert("index".to_owned(), v.to_string());
} }
/// Gets the coverage hash of the font.
pub fn coverage_hash(&self) -> Option<&String> { pub fn coverage_hash(&self) -> Option<&String> {
self.meta.get("coverage_hash") self.meta.get("coverage_hash")
} }
/// Sets the coverage hash of the font.
pub fn set_coverage_hash(&mut self, v: String) { pub fn set_coverage_hash(&mut self, v: String) {
self.meta.insert("coverage_hash".to_owned(), v); self.meta.insert("coverage_hash".to_owned(), v);
} }
/// Gets the metadata of the font.
pub fn meta(&self) -> &FontMetaDict { pub fn meta(&self) -> &FontMetaDict {
&self.meta &self.meta
} }
/// Gets the information of the font.
pub fn info(&self) -> &FontInfo { pub fn info(&self) -> &FontInfo {
&self.info &self.info
} }
} }
/// The item of the font profile.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FontProfileItem { pub struct FontProfileItem {
/// The hash of the file /// The hash of the file.
pub hash: String, pub hash: String,
/// customized profile data /// The metadata of the font.
pub meta: FontMetaDict, pub meta: FontMetaDict,
/// The informatioin of the font /// The information of the font.
pub info: Vec<FontInfoItem>, pub info: Vec<FontInfoItem>,
} }
/// Converts a system time to a microsecond lossy value.
fn to_micro_lossy(t: SystemTime) -> u128 { fn to_micro_lossy(t: SystemTime) -> u128 {
t.duration_since(SystemTime::UNIX_EPOCH) t.duration_since(SystemTime::UNIX_EPOCH)
.unwrap() .unwrap()
@ -63,6 +76,7 @@ fn to_micro_lossy(t: SystemTime) -> u128 {
} }
impl FontProfileItem { impl FontProfileItem {
/// Creates a new font profile item.
pub fn new(kind: &str, hash: String) -> Self { pub fn new(kind: &str, hash: String) -> Self {
let mut meta: FontMetaDict = Default::default(); let mut meta: FontMetaDict = Default::default();
meta.insert("kind".to_owned(), kind.to_string()); meta.insert("kind".to_owned(), kind.to_string());
@ -74,10 +88,12 @@ impl FontProfileItem {
} }
} }
/// Gets the path of the font.
pub fn path(&self) -> Option<&String> { pub fn path(&self) -> Option<&String> {
self.meta.get("path") self.meta.get("path")
} }
/// Gets the modification time of the font.
pub fn mtime(&self) -> Option<SystemTime> { pub fn mtime(&self) -> Option<SystemTime> {
self.meta.get("mtime").and_then(|v| { self.meta.get("mtime").and_then(|v| {
let v = v.parse::<u64>().ok(); let v = v.parse::<u64>().ok();
@ -85,6 +101,7 @@ impl FontProfileItem {
}) })
} }
/// Checks if the modification time is exact.
pub fn mtime_is_exact(&self, t: SystemTime) -> bool { pub fn mtime_is_exact(&self, t: SystemTime) -> bool {
self.mtime() self.mtime()
.map(|s| { .map(|s| {
@ -95,39 +112,50 @@ impl FontProfileItem {
.unwrap_or_default() .unwrap_or_default()
} }
/// Sets the path of the font.
pub fn set_path(&mut self, v: String) { pub fn set_path(&mut self, v: String) {
self.meta.insert("path".to_owned(), v); self.meta.insert("path".to_owned(), v);
} }
/// Sets the modification time of the font.
pub fn set_mtime(&mut self, v: SystemTime) { pub fn set_mtime(&mut self, v: SystemTime) {
self.meta self.meta
.insert("mtime".to_owned(), to_micro_lossy(v).to_string()); .insert("mtime".to_owned(), to_micro_lossy(v).to_string());
} }
/// Gets the hash of the font.
pub fn hash(&self) -> &str { pub fn hash(&self) -> &str {
&self.hash &self.hash
} }
/// Gets the metadata of the font.
pub fn meta(&self) -> &FontMetaDict { pub fn meta(&self) -> &FontMetaDict {
&self.meta &self.meta
} }
/// Gets the information of the font.
pub fn info(&self) -> &[FontInfoItem] { pub fn info(&self) -> &[FontInfoItem] {
&self.info &self.info
} }
/// Adds an information of the font.
pub fn add_info(&mut self, info: FontInfoItem) { pub fn add_info(&mut self, info: FontInfoItem) {
self.info.push(info); self.info.push(info);
} }
} }
/// The profile of the font.
#[derive(Default, Debug, Clone, Serialize, Deserialize)] #[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct FontProfile { pub struct FontProfile {
/// The version of the profile.
pub version: String, pub version: String,
/// The build information of the profile.
pub build_info: String, pub build_info: String,
/// The items of the profile.
pub items: Vec<FontProfileItem>, pub items: Vec<FontProfileItem>,
} }
/// Gets the coverage hash of the font.
pub fn get_font_coverage_hash(coverage: &Coverage) -> String { pub fn get_font_coverage_hash(coverage: &Coverage) -> String {
let mut coverage_hash = sha2::Sha256::new(); let mut coverage_hash = sha2::Sha256::new();
coverage coverage

View file

@ -77,6 +77,7 @@ impl<T: FontResolver> FontResolver for Arc<T> {
} }
} }
/// A reusable font resolver.
pub trait ReusableFontResolver: FontResolver { pub trait ReusableFontResolver: FontResolver {
/// Reuses the font resolver. /// Reuses the font resolver.
fn slots(&self) -> impl Iterator<Item = FontSlot>; fn slots(&self) -> impl Iterator<Item = FontSlot>;
@ -96,8 +97,11 @@ impl<T: ReusableFontResolver> ReusableFontResolver for Arc<T> {
/// - Otherwise, [`crate::font::pure::MemoryFontBuilder`] in memory. /// - Otherwise, [`crate::font::pure::MemoryFontBuilder`] in memory.
#[derive(Debug)] #[derive(Debug)]
pub struct FontResolverImpl { pub struct FontResolverImpl {
/// The user-specified font paths.
pub(crate) font_paths: Vec<PathBuf>, pub(crate) font_paths: Vec<PathBuf>,
/// The font book.
pub(crate) book: LazyHash<FontBook>, pub(crate) book: LazyHash<FontBook>,
/// The slots of the font resolver.
pub(crate) slots: Vec<FontSlot>, pub(crate) slots: Vec<FontSlot>,
} }
@ -111,6 +115,7 @@ impl FontResolverImpl {
} }
} }
/// Creates a new font resolver with fonts.
pub fn new_with_fonts( pub fn new_with_fonts(
font_paths: Vec<PathBuf>, font_paths: Vec<PathBuf>,
fonts: impl Iterator<Item = (FontInfo, FontSlot)>, fonts: impl Iterator<Item = (FontInfo, FontSlot)>,
@ -135,7 +140,7 @@ impl FontResolverImpl {
self.slots.len() self.slots.len()
} }
/// Tests whether the resolver doesn't hold any fonts. /// Checks if the resolver holds any fonts.
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.slots.is_empty() self.slots.is_empty()
} }
@ -183,6 +188,7 @@ impl FontResolverImpl {
self.slot(index)?.description.clone() self.slot(index)?.description.clone()
} }
/// Sets the user-specified font paths.
pub fn with_font_paths(mut self, font_paths: Vec<PathBuf>) -> Self { pub fn with_font_paths(mut self, font_paths: Vec<PathBuf>) -> Self {
self.font_paths = font_paths; self.font_paths = font_paths;
self self
@ -218,6 +224,7 @@ impl fmt::Display for FontResolverImpl {
} }
impl ReusableFontResolver for FontResolverImpl { impl ReusableFontResolver for FontResolverImpl {
/// Returns an iterator over all slots in the resolver.
fn slots(&self) -> impl Iterator<Item = FontSlot> { fn slots(&self) -> impl Iterator<Item = FontSlot> {
self.slots.iter().cloned() self.slots.iter().cloned()
} }

View file

@ -16,16 +16,17 @@ type FontSlotInner = QueryRef<Option<Font>, (), Box<dyn FontLoader + Send>>;
#[derive(Clone)] #[derive(Clone)]
pub struct FontSlot { pub struct FontSlot {
inner: Arc<FontSlotInner>, inner: Arc<FontSlotInner>,
/// The description of the font slot.
pub description: Option<Arc<DataSource>>, pub description: Option<Arc<DataSource>>,
} }
impl FontSlot { impl FontSlot {
/// Creates a font slot to load. /// Creates a new font slot to load.
pub fn new<F: FontLoader + Send + 'static>(f: F) -> Self { pub fn new<F: FontLoader + Send + 'static>(f: F) -> Self {
Self::new_boxed(Box::new(f)) Self::new_boxed(Box::new(f))
} }
/// Creates a font slot from a boxed font loader trait object. /// Creates a new font slot from a boxed font loader trait object.
pub fn new_boxed(f: Box<dyn FontLoader + Send>) -> Self { pub fn new_boxed(f: Box<dyn FontLoader + Send>) -> Self {
Self { Self {
inner: Arc::new(FontSlotInner::with_context(f)), inner: Arc::new(FontSlotInner::with_context(f)),
@ -33,7 +34,7 @@ impl FontSlot {
} }
} }
/// Creates a font slot with a loaded font. /// Creates a new font slot with a loaded font.
pub fn new_loaded(f: Option<Font>) -> Self { pub fn new_loaded(f: Option<Font>) -> Self {
Self { Self {
inner: Arc::new(FontSlotInner::with_value(f)), inner: Arc::new(FontSlotInner::with_value(f)),
@ -41,12 +42,12 @@ impl FontSlot {
} }
} }
/// Attaches a description to the font slot. /// Attaches a description to the font slot and returns a new slot.
pub fn with_describe(self, desc: DataSource) -> Self { pub fn with_describe(self, desc: DataSource) -> Self {
self.with_describe_arc(Arc::new(desc)) self.with_describe_arc(Arc::new(desc))
} }
/// Attaches a description to the font slot. /// Attaches a description to the font slot and returns a new slot.
pub fn with_describe_arc(self, desc: Arc<DataSource>) -> Self { pub fn with_describe_arc(self, desc: Arc<DataSource>) -> Self {
Self { Self {
inner: self.inner, inner: self.inner,
@ -54,13 +55,13 @@ impl FontSlot {
} }
} }
/// Gets or make the font load result. /// Gets or makes the font load result.
pub fn get_or_init(&self) -> Option<Font> { pub fn get_or_init(&self) -> Option<Font> {
let res = self.inner.compute_with_context(|mut c| Ok(c.load())); let res = self.inner.compute_with_context(|mut c| Ok(c.load()));
res.unwrap().clone() res.unwrap().clone()
} }
/// Gets the reference to the font load result (possible uninitialized). /// Gets the reference to the font load result (possibly uninitialized).
/// ///
/// Returns `None` if the cell is empty, or being initialized. This /// Returns `None` if the cell is empty, or being initialized. This
/// method never blocks. /// method never blocks.

View file

@ -1,3 +1,5 @@
//! The font searcher to run the compiler in the system environment.
use std::borrow::Cow; use std::borrow::Cow;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -14,7 +16,7 @@ use super::{FontResolverImpl, FontSlot, LazyBufferFontLoader};
use crate::config::CompileFontOpts; use crate::config::CompileFontOpts;
use crate::debug_loc::{DataSource, FsDataSource}; use crate::debug_loc::{DataSource, FsDataSource};
/// Searches for fonts in system. /// Searches for fonts in the system.
#[derive(Debug)] #[derive(Debug)]
pub struct SystemFontSearcher { pub struct SystemFontSearcher {
/// The base font searcher. /// The base font searcher.
@ -27,7 +29,7 @@ pub struct SystemFontSearcher {
} }
impl SystemFontSearcher { impl SystemFontSearcher {
/// Creates a system searcher. /// Creates a new searcher.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
base: MemoryFontSearcher::default(), base: MemoryFontSearcher::default(),
@ -36,14 +38,14 @@ impl SystemFontSearcher {
} }
} }
/// Builds a FontResolverImpl. /// Builds a font resolver.
pub fn build(self) -> FontResolverImpl { pub fn build(self) -> FontResolverImpl {
self.base.build().with_font_paths(self.font_paths) self.base.build().with_font_paths(self.font_paths)
} }
} }
impl SystemFontSearcher { impl SystemFontSearcher {
/// Resolve fonts from given options. /// Resolves fonts from given options.
pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> Result<()> { pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> Result<()> {
// Note: the order of adding fonts is important. // Note: the order of adding fonts is important.
// See: https://github.com/typst/typst/blob/9c7f31870b4e1bf37df79ebbe1df9a56df83d878/src/font/book.rs#L151-L154 // See: https://github.com/typst/typst/blob/9c7f31870b4e1bf37df79ebbe1df9a56df83d878/src/font/book.rs#L151-L154
@ -75,6 +77,7 @@ impl SystemFontSearcher {
Ok(()) Ok(())
} }
/// Flushes the searcher, needed before adding fonts in memory.
pub fn flush(&mut self) { pub fn flush(&mut self) {
use fontdb::Source; use fontdb::Source;
@ -103,7 +106,7 @@ impl SystemFontSearcher {
self.db = Database::new(); self.db = Database::new();
} }
/// Add an in-memory font. /// Adds an in-memory font to the searcher.
pub fn add_memory_font(&mut self, data: Bytes) { pub fn add_memory_font(&mut self, data: Bytes) {
if !self.db.is_empty() { if !self.db.is_empty() {
panic!("dirty font search state, please flush the searcher before adding memory fonts"); panic!("dirty font search state, please flush the searcher before adding memory fonts");
@ -112,7 +115,7 @@ impl SystemFontSearcher {
self.base.add_memory_font(data); self.base.add_memory_font(data);
} }
/// Adds in-memory fonts. /// Adds in-memory fonts to the searcher.
pub fn add_memory_fonts(&mut self, data: impl ParallelIterator<Item = Bytes>) { pub fn add_memory_fonts(&mut self, data: impl ParallelIterator<Item = Bytes>) {
if !self.db.is_empty() { if !self.db.is_empty() {
panic!("dirty font search state, please flush the searcher before adding memory fonts"); panic!("dirty font search state, please flush the searcher before adding memory fonts");
@ -121,10 +124,12 @@ impl SystemFontSearcher {
self.base.add_memory_fonts(data); self.base.add_memory_fonts(data);
} }
/// Searches for fonts in the system and adds them to the searcher.
pub fn search_system(&mut self) { pub fn search_system(&mut self) {
self.db.load_system_fonts(); self.db.load_system_fonts();
} }
/// Records a path to the searcher.
fn record_path(&mut self, path: &Path) { fn record_path(&mut self, path: &Path) {
self.font_paths.push(if !path.is_relative() { self.font_paths.push(if !path.is_relative() {
path.to_owned() path.to_owned()
@ -137,14 +142,14 @@ impl SystemFontSearcher {
}); });
} }
/// Search for all fonts in a directory recursively. /// Searches for all fonts in a directory recursively.
pub fn search_dir(&mut self, path: impl AsRef<Path>) { pub fn search_dir(&mut self, path: impl AsRef<Path>) {
self.record_path(path.as_ref()); self.record_path(path.as_ref());
self.db.load_fonts_dir(path); self.db.load_fonts_dir(path);
} }
/// Index the fonts in the file at the given path. /// Indexes the fonts in the file at the given path.
pub fn search_file(&mut self, path: impl AsRef<Path>) -> FileResult<()> { pub fn search_file(&mut self, path: impl AsRef<Path>) -> FileResult<()> {
self.record_path(path.as_ref()); self.record_path(path.as_ref());

View file

@ -1,3 +1,5 @@
//! Font searchers to run the compiler in the browser environment.
use std::borrow::Cow; use std::borrow::Cow;
use js_sys::ArrayBuffer; use js_sys::ArrayBuffer;
@ -318,15 +320,21 @@ impl FontBuilder {
} }
} }
/// A web font.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct WebFont { pub struct WebFont {
/// The font info.
pub info: FontInfo, pub info: FontInfo,
/// The context of the font.
pub context: JsValue, pub context: JsValue,
/// The blob loader.
pub blob: js_sys::Function, pub blob: js_sys::Function,
/// The index in a font file.
pub index: u32, pub index: u32,
} }
impl WebFont { impl WebFont {
/// Loads the font from the blob.
pub fn load(&self) -> Option<ArrayBuffer> { pub fn load(&self) -> Option<ArrayBuffer> {
self.blob self.blob
.call1(&self.context, &self.index.into()) .call1(&self.context, &self.index.into())
@ -340,13 +348,17 @@ impl WebFont {
/// cannot share data between workers. /// cannot share data between workers.
unsafe impl Send for WebFont {} unsafe impl Send for WebFont {}
/// A web font loader.
#[derive(Debug)] #[derive(Debug)]
pub struct WebFontLoader { pub struct WebFontLoader {
/// The font.
font: WebFont, font: WebFont,
/// The index in a font file.
index: u32, index: u32,
} }
impl WebFontLoader { impl WebFontLoader {
/// Creates a new web font loader.
pub fn new(font: WebFont, index: u32) -> Self { pub fn new(font: WebFont, index: u32) -> Self {
Self { font, index } Self { font, index }
} }
@ -368,7 +380,7 @@ impl FontLoader for WebFontLoader {
} }
} }
/// Searches for fonts. /// Searches for fonts in the browser.
pub struct BrowserFontSearcher { pub struct BrowserFontSearcher {
/// The base font searcher. /// The base font searcher.
base: MemoryFontSearcher, base: MemoryFontSearcher,
@ -382,20 +394,20 @@ impl BrowserFontSearcher {
} }
} }
/// Creates a new browser searcher with fonts in a FontResolverImpl. /// Creates a new searcher with fonts in a font resolver.
pub fn from_resolver(resolver: FontResolverImpl) -> Self { pub fn from_resolver(resolver: FontResolverImpl) -> Self {
let base = MemoryFontSearcher::from_resolver(resolver); let base = MemoryFontSearcher::from_resolver(resolver);
Self { base } Self { base }
} }
/// Builds a FontResolverImpl. /// Builds a font resolver.
pub fn build(self) -> FontResolverImpl { pub fn build(self) -> FontResolverImpl {
self.base.build() self.base.build()
} }
} }
impl BrowserFontSearcher { impl BrowserFontSearcher {
/// Resolves fonts from given options. /// Resolves fonts from given options and adds them to the searcher.
pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> Result<()> { pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> Result<()> {
// Source3: add the fonts in memory. // Source3: add the fonts in memory.
self.add_memory_fonts(opts.with_embedded_fonts.into_par_iter().map(|font_data| { self.add_memory_fonts(opts.with_embedded_fonts.into_par_iter().map(|font_data| {
@ -408,7 +420,7 @@ impl BrowserFontSearcher {
Ok(()) Ok(())
} }
/// Adds fonts that are embedded in the binary. /// Adds fonts that are embedded in the binary to the searcher.
#[cfg(feature = "fonts")] #[cfg(feature = "fonts")]
#[deprecated(note = "use `typst_assets::fonts` directly")] #[deprecated(note = "use `typst_assets::fonts` directly")]
pub fn add_embedded(&mut self) { pub fn add_embedded(&mut self) {
@ -422,11 +434,12 @@ impl BrowserFontSearcher {
} }
} }
/// Adds in-memory fonts. /// Adds in-memory fonts to the searcher.
pub fn add_memory_fonts(&mut self, data: impl ParallelIterator<Item = Bytes>) { pub fn add_memory_fonts(&mut self, data: impl ParallelIterator<Item = Bytes>) {
self.base.add_memory_fonts(data); self.base.add_memory_fonts(data);
} }
/// Adds web fonts to the searcher.
pub async fn add_web_fonts(&mut self, fonts: js_sys::Array) -> Result<()> { pub async fn add_web_fonts(&mut self, fonts: js_sys::Array) -> Result<()> {
let font_builder = FontBuilder {}; let font_builder = FontBuilder {};
@ -453,6 +466,7 @@ impl BrowserFontSearcher {
Ok(()) Ok(())
} }
/// Adds font data to the searcher.
pub fn add_font_data(&mut self, buffer: Bytes) { pub fn add_font_data(&mut self, buffer: Bytes) {
for (i, info) in FontInfo::iter(buffer.as_slice()).enumerate() { for (i, info) in FontInfo::iter(buffer.as_slice()).enumerate() {
let buffer = buffer.clone(); let buffer = buffer.clone();
@ -466,6 +480,7 @@ impl BrowserFontSearcher {
} }
} }
/// Mutates the fonts in the searcher.
pub fn with_fonts_mut(&mut self, func: impl FnOnce(&mut Vec<(FontInfo, FontSlot)>)) { pub fn with_fonts_mut(&mut self, func: impl FnOnce(&mut Vec<(FontInfo, FontSlot)>)) {
func(&mut self.base.fonts); func(&mut self.base.fonts);
} }

View file

@ -1,7 +1,5 @@
//! World implementation of typst for tinymist. //! World implementation of typst for tinymist.
#![allow(missing_docs)]
pub mod args; pub mod args;
pub mod config; pub mod config;
pub mod debug_loc; pub mod debug_loc;
@ -48,37 +46,39 @@ use package::PackageRegistry;
/// Latest version of the shadow api, which is in beta. /// Latest version of the shadow api, which is in beta.
pub trait ShadowApi { pub trait ShadowApi {
/// Get the shadow files. /// Gets the shadow files.
fn shadow_paths(&self) -> Vec<Arc<Path>>; fn shadow_paths(&self) -> Vec<Arc<Path>>;
/// Get the shadow files by file id. /// Gets the shadow files by file id.
fn shadow_ids(&self) -> Vec<FileId>; fn shadow_ids(&self) -> Vec<FileId>;
/// Reset the shadow files. /// Resets the shadow files.
fn reset_shadow(&mut self) { fn reset_shadow(&mut self) {
for path in self.shadow_paths() { for path in self.shadow_paths() {
self.unmap_shadow(&path).unwrap(); self.unmap_shadow(&path).unwrap();
} }
} }
/// Add a shadow file to the driver. /// Adds a shadow file to the driver.
fn map_shadow(&mut self, path: &Path, content: Bytes) -> FileResult<()>; fn map_shadow(&mut self, path: &Path, content: Bytes) -> FileResult<()>;
/// Add a shadow file to the driver. /// Removes a shadow file from the driver.
fn unmap_shadow(&mut self, path: &Path) -> FileResult<()>; fn unmap_shadow(&mut self, path: &Path) -> FileResult<()>;
/// Add a shadow file to the driver by file id. /// Adds a shadow file to the driver by file id.
/// Note: If a *path* is both shadowed by id and by path, the shadow by id /// Note: If a *path* is both shadowed by id and by path, the shadow by id
/// will be used. /// will be used.
fn map_shadow_by_id(&mut self, file_id: FileId, content: Bytes) -> FileResult<()>; fn map_shadow_by_id(&mut self, file_id: FileId, content: Bytes) -> FileResult<()>;
/// Add a shadow file to the driver by file id. /// Removes a shadow file from the driver by file id.
/// Note: If a *path* is both shadowed by id and by path, the shadow by id /// Note: If a *path* is both shadowed by id and by path, the shadow by id
/// will be used. /// will be used.
fn unmap_shadow_by_id(&mut self, file_id: FileId) -> FileResult<()>; fn unmap_shadow_by_id(&mut self, file_id: FileId) -> FileResult<()>;
} }
/// The extension trait for the shadow api.
pub trait ShadowApiExt { pub trait ShadowApiExt {
/// Wrap the driver with a given shadow file and run the inner function. /// Wraps the universe or world with a given shadow file and runs the inner
/// function.
fn with_shadow_file<T>( fn with_shadow_file<T>(
&mut self, &mut self,
file_path: &Path, file_path: &Path,
@ -86,8 +86,8 @@ pub trait ShadowApiExt {
f: impl FnOnce(&mut Self) -> SourceResult<T>, f: impl FnOnce(&mut Self) -> SourceResult<T>,
) -> SourceResult<T>; ) -> SourceResult<T>;
/// Wrap the driver with a given shadow file and run the inner function by /// Wraps the universe or world with a given shadow file and runs the inner
/// file id. /// function by file id.
/// Note: to enable this function, `ShadowApi` must implement /// Note: to enable this function, `ShadowApi` must implement
/// `_shadow_map_id`. /// `_shadow_map_id`.
fn with_shadow_file_by_id<T>( fn with_shadow_file_by_id<T>(
@ -99,7 +99,8 @@ pub trait ShadowApiExt {
} }
impl<C: ShadowApi> ShadowApiExt for C { impl<C: ShadowApi> ShadowApiExt for C {
/// Wrap the driver with a given shadow file and run the inner function. /// Wraps the universe or world with a given shadow file and runs the inner
/// function.
fn with_shadow_file<T>( fn with_shadow_file<T>(
&mut self, &mut self,
file_path: &Path, file_path: &Path,
@ -112,8 +113,8 @@ impl<C: ShadowApi> ShadowApiExt for C {
res res
} }
/// Wrap the driver with a given shadow file and run the inner function by /// Wraps the universe or world with a given shadow file and runs the inner
/// file id. /// function by file id.
/// Note: to enable this function, `ShadowApi` must implement /// Note: to enable this function, `ShadowApi` must implement
/// `_shadow_map_id`. /// `_shadow_map_id`.
fn with_shadow_file_by_id<T>( fn with_shadow_file_by_id<T>(
@ -132,28 +133,32 @@ impl<C: ShadowApi> ShadowApiExt for C {
/// Latest version of the world dependencies api, which is in beta. /// Latest version of the world dependencies api, which is in beta.
pub trait WorldDeps { pub trait WorldDeps {
/// Iterates over the dependencies of the world.
fn iter_dependencies(&self, f: &mut dyn FnMut(FileId)); fn iter_dependencies(&self, f: &mut dyn FnMut(FileId));
} }
/// type trait interface of [`CompilerWorld`]. /// The type trait interface of [`CompilerWorld`].
pub trait CompilerFeat: Send + Sync + 'static { pub trait CompilerFeat: Send + Sync + 'static {
/// Specify the font resolver for typst compiler. /// The font resolver for the typst compiler.
type FontResolver: FontResolver + Send + Sync + Sized; type FontResolver: FontResolver + Send + Sync + Sized;
/// Specify the access model for VFS. /// The access model for the VFS.
type AccessModel: VfsAccessModel + Clone + Send + Sync + Sized; type AccessModel: VfsAccessModel + Clone + Send + Sync + Sized;
/// Specify the package registry. /// The package registry for the typst compiler.
type Registry: PackageRegistry + Send + Sync + Sized; type Registry: PackageRegistry + Send + Sync + Sized;
} }
/// Which format to use for diagnostics. /// The format to use for diagnostics.
#[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Ord, PartialOrd)] #[derive(Debug, Copy, Clone, Default, Eq, PartialEq, Ord, PartialOrd)]
pub enum DiagnosticFormat { pub enum DiagnosticFormat {
/// The human-readable format.
#[default] #[default]
Human, Human,
/// The short (Unix-flavor) format.
Short, Short,
} }
/// The build information of the world crate.
pub mod build_info { pub mod build_info {
/// The version of the reflexo-world crate. /// The version of the world crate.
pub static VERSION: &str = env!("CARGO_PKG_VERSION"); pub static VERSION: &str = env!("CARGO_PKG_VERSION");
} }

View file

@ -1,3 +1,5 @@
//! The package registry of the world.
use std::sync::Arc; use std::sync::Arc;
use tinymist_std::ImmutPath; use tinymist_std::ImmutPath;
@ -5,11 +7,14 @@ use typst::diag::FileResult;
pub use tinymist_package::*; pub use tinymist_package::*;
/// A path mapper for the package registry.
pub struct RegistryPathMapper<T> { pub struct RegistryPathMapper<T> {
/// The package registry.
pub registry: Arc<T>, pub registry: Arc<T>,
} }
impl<T> RegistryPathMapper<T> { impl<T> RegistryPathMapper<T> {
/// Creates a new path mapper for the package registry.
pub fn new(registry: Arc<T>) -> Self { pub fn new(registry: Arc<T>) -> Self {
Self { registry } Self { registry }
} }

View file

@ -1,4 +1,4 @@
#![allow(missing_docs)] //! The legacy parser of the world.
mod modifier_set; mod modifier_set;
mod semantic_tokens; mod semantic_tokens;

View file

@ -6,14 +6,18 @@ use typst::syntax::{LinkedNode, Source, SyntaxKind, ast};
use super::modifier_set::ModifierSet; use super::modifier_set::ModifierSet;
use super::typst_tokens::{Modifier, TokenType}; use super::typst_tokens::{Modifier, TokenType};
/// The legend of the semantic tokens.
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
pub struct SemanticTokensLegend { pub struct SemanticTokensLegend {
/// The token types.
#[serde(rename = "tokenTypes")] #[serde(rename = "tokenTypes")]
pub token_types: Vec<String>, pub token_types: Vec<String>,
/// The token modifiers.
#[serde(rename = "tokenModifiers")] #[serde(rename = "tokenModifiers")]
pub token_modifiers: Vec<String>, pub token_modifiers: Vec<String>,
} }
/// Gets the legend of the semantic tokens.
pub fn get_semantic_tokens_legend() -> SemanticTokensLegend { pub fn get_semantic_tokens_legend() -> SemanticTokensLegend {
SemanticTokensLegend { SemanticTokensLegend {
token_types: TokenType::iter() token_types: TokenType::iter()
@ -33,12 +37,16 @@ pub fn get_semantic_tokens_legend() -> SemanticTokensLegend {
} }
} }
/// The encoding of the offset.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum OffsetEncoding { pub enum OffsetEncoding {
/// The UTF-8 encoding.
Utf8, Utf8,
/// The UTF-16 encoding.
Utf16, Utf16,
} }
/// Gets the full semantic tokens.
pub fn get_semantic_tokens_full(source: &Source, encoding: OffsetEncoding) -> Vec<SemanticToken> { pub fn get_semantic_tokens_full(source: &Source, encoding: OffsetEncoding) -> Vec<SemanticToken> {
let root = LinkedNode::new(source.root()); let root = LinkedNode::new(source.root());
let mut full = tokenize_tree(&root, ModifierSet::empty()); let mut full = tokenize_tree(&root, ModifierSet::empty());
@ -65,6 +73,7 @@ pub fn get_semantic_tokens_full(source: &Source, encoding: OffsetEncoding) -> Ve
full full
} }
/// Tokenizes a single node.
fn tokenize_single_node(node: &LinkedNode, modifiers: ModifierSet) -> Option<SemanticToken> { fn tokenize_single_node(node: &LinkedNode, modifiers: ModifierSet) -> Option<SemanticToken> {
let is_leaf = node.children().next().is_none(); let is_leaf = node.children().next().is_none();
@ -73,7 +82,7 @@ fn tokenize_single_node(node: &LinkedNode, modifiers: ModifierSet) -> Option<Sem
.map(|token_type| SemanticToken::new(token_type, modifiers, node)) .map(|token_type| SemanticToken::new(token_type, modifiers, node))
} }
/// Tokenize a node and its children /// Tokenizes a node and its children.
fn tokenize_tree(root: &LinkedNode<'_>, parent_modifiers: ModifierSet) -> Vec<SemanticToken> { fn tokenize_tree(root: &LinkedNode<'_>, parent_modifiers: ModifierSet) -> Vec<SemanticToken> {
let modifiers = parent_modifiers | modifiers_from_node(root); let modifiers = parent_modifiers | modifiers_from_node(root);
@ -84,16 +93,23 @@ fn tokenize_tree(root: &LinkedNode<'_>, parent_modifiers: ModifierSet) -> Vec<Se
token.chain(children).collect() token.chain(children).collect()
} }
/// A semantic token.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct SemanticToken { pub struct SemanticToken {
/// The delta line.
pub delta_line: u32, pub delta_line: u32,
/// The delta start character.
pub delta_start_character: u32, pub delta_start_character: u32,
/// The length.
pub length: u32, pub length: u32,
/// The token type.
pub token_type: u32, pub token_type: u32,
/// The token modifiers.
pub token_modifiers: u32, pub token_modifiers: u32,
} }
impl SemanticToken { impl SemanticToken {
/// Creates a new semantic token.
fn new(token_type: TokenType, modifiers: ModifierSet, node: &LinkedNode) -> Self { fn new(token_type: TokenType, modifiers: ModifierSet, node: &LinkedNode) -> Self {
let source = node.get().clone().into_text(); let source = node.get().clone().into_text();
@ -112,6 +128,8 @@ impl SemanticToken {
/// Determines the [`Modifier`]s to be applied to a node and all its children. /// Determines the [`Modifier`]s to be applied to a node and all its children.
/// ///
/// Returns `ModifierSet::empty()` if the node is not a valid node.
///
/// Note that this does not recurse up, so calling it on a child node may not /// Note that this does not recurse up, so calling it on a child node may not
/// return a modifier that should be applied to it due to a parent. /// return a modifier that should be applied to it due to a parent.
fn modifiers_from_node(node: &LinkedNode) -> ModifierSet { fn modifiers_from_node(node: &LinkedNode) -> ModifierSet {
@ -170,7 +188,9 @@ fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
} }
} }
// TODO: differentiate also using tokens in scope, not just context /// Checks if the identifier is a function.
///
/// TODO: differentiate also using tokens in scope, not just context
fn is_function_ident(ident: &LinkedNode) -> bool { fn is_function_ident(ident: &LinkedNode) -> bool {
let Some(next) = ident.next_leaf() else { let Some(next) = ident.next_leaf() else {
return false; return false;
@ -185,6 +205,7 @@ fn is_function_ident(ident: &LinkedNode) -> bool {
function_call || function_content function_call || function_content
} }
/// Gets the token type from an identifier.
fn token_from_ident(ident: &LinkedNode) -> TokenType { fn token_from_ident(ident: &LinkedNode) -> TokenType {
if is_function_ident(ident) { if is_function_ident(ident) {
TokenType::Function TokenType::Function
@ -193,6 +214,7 @@ fn token_from_ident(ident: &LinkedNode) -> TokenType {
} }
} }
/// Gets the expression following a hashtag.
fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode<'a>> { fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode<'a>> {
hashtag hashtag
.next_sibling() .next_sibling()
@ -200,12 +222,14 @@ fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode
.and_then(|node| node.leftmost_leaf()) .and_then(|node| node.leftmost_leaf())
} }
/// Gets the token type from a hashtag.
fn token_from_hashtag(hashtag: &LinkedNode) -> Option<TokenType> { fn token_from_hashtag(hashtag: &LinkedNode) -> Option<TokenType> {
get_expr_following_hashtag(hashtag) get_expr_following_hashtag(hashtag)
.as_ref() .as_ref()
.and_then(token_from_node) .and_then(token_from_node)
} }
/// Converts an offset to a position in UTF-8.
fn offset_to_position_utf8(typst_offset: usize, typst_source: &Source) -> (u32, u32) { fn offset_to_position_utf8(typst_offset: usize, typst_source: &Source) -> (u32, u32) {
let line_index = typst_source.byte_to_line(typst_offset).unwrap(); let line_index = typst_source.byte_to_line(typst_offset).unwrap();
let column_index = typst_source.byte_to_column(typst_offset).unwrap(); let column_index = typst_source.byte_to_column(typst_offset).unwrap();
@ -213,6 +237,7 @@ fn offset_to_position_utf8(typst_offset: usize, typst_source: &Source) -> (u32,
(line_index as u32, column_index as u32) (line_index as u32, column_index as u32)
} }
/// Converts an offset to a position in UTF-16.
fn offset_to_position_utf16(typst_offset: usize, typst_source: &Source) -> (u32, u32) { fn offset_to_position_utf16(typst_offset: usize, typst_source: &Source) -> (u32, u32) {
let line_index = typst_source.byte_to_line(typst_offset).unwrap(); let line_index = typst_source.byte_to_line(typst_offset).unwrap();

View file

@ -23,10 +23,11 @@ impl ProjectInsId {
pub const PRIMARY: ProjectInsId = ProjectInsId(EcoString::inline("primary")); pub const PRIMARY: ProjectInsId = ProjectInsId(EcoString::inline("primary"));
} }
/// The export signal for the document.
#[deprecated(note = "Use `CompileSignal` directly.")] #[deprecated(note = "Use `CompileSignal` directly.")]
pub type ExportSignal = CompileSignal; pub type ExportSignal = CompileSignal;
/// A signal that possibly triggers an compile (export). /// A signal that possibly triggers a compile (export).
/// ///
/// Whether to compile (export) depends on the current state of the document and /// Whether to compile (export) depends on the current state of the document and
/// the user settings. /// the user settings.
@ -42,7 +43,7 @@ pub struct CompileSignal {
} }
impl CompileSignal { impl CompileSignal {
/// Merge two signals. /// Merges two signals.
pub fn merge(&mut self, other: CompileSignal) { pub fn merge(&mut self, other: CompileSignal) {
self.by_mem_events |= other.by_mem_events; self.by_mem_events |= other.by_mem_events;
self.by_fs_events |= other.by_fs_events; self.by_fs_events |= other.by_fs_events;
@ -50,11 +51,13 @@ impl CompileSignal {
} }
/// Whether there is any reason to compile (export). /// Whether there is any reason to compile (export).
///
/// This is used to determine if the document should be compiled.
pub fn any(&self) -> bool { pub fn any(&self) -> bool {
self.by_mem_events || self.by_fs_events || self.by_entry_update self.by_mem_events || self.by_fs_events || self.by_entry_update
} }
/// Exclude some signals. /// Excludes some signals.
pub fn exclude(&self, excluded: Self) -> Self { pub fn exclude(&self, excluded: Self) -> Self {
Self { Self {
by_mem_events: self.by_mem_events && !excluded.by_mem_events, by_mem_events: self.by_mem_events && !excluded.by_mem_events,
@ -63,6 +66,7 @@ impl CompileSignal {
} }
} }
/// Whether the task should run.
pub fn should_run_task_dyn( pub fn should_run_task_dyn(
&self, &self,
when: &TaskWhen, when: &TaskWhen,
@ -75,6 +79,7 @@ impl CompileSignal {
} }
} }
/// Whether the task should run.
pub fn should_run_task<D: typst::Document>( pub fn should_run_task<D: typst::Document>(
&self, &self,
when: &TaskWhen, when: &TaskWhen,
@ -95,12 +100,14 @@ impl CompileSignal {
} }
/// A snapshot of the project and compilation state. /// A snapshot of the project and compilation state.
///
/// This is used to store the state of the project and compilation.
pub struct CompileSnapshot<F: CompilerFeat> { pub struct CompileSnapshot<F: CompilerFeat> {
/// The project id. /// The project id.
pub id: ProjectInsId, pub id: ProjectInsId,
/// The export signal for the document. /// The export signal for the document.
pub signal: CompileSignal, pub signal: CompileSignal,
/// Using world /// The world.
pub world: CompilerWorld<F>, pub world: CompilerWorld<F>,
/// The last successfully compiled document. /// The last successfully compiled document.
pub success_doc: Option<TypstDocument>, pub success_doc: Option<TypstDocument>,

View file

@ -1,4 +1,4 @@
// use std::sync::Arc; //! The source database of the world.
use core::fmt; use core::fmt;
use std::sync::Arc; use std::sync::Arc;
@ -11,10 +11,15 @@ use typst::syntax::Source;
type FileQuery<T> = QueryRef<T, FileError>; type FileQuery<T> = QueryRef<T, FileError>;
/// A cache for a single file.
pub struct SourceCache { pub struct SourceCache {
/// Whether the file is touched by the compile.
touched_by_compile: bool, touched_by_compile: bool,
/// The file id.
fid: FileId, fid: FileId,
/// The source of the file.
source: FileQuery<Source>, source: FileQuery<Source>,
/// The buffer of the file.
buffer: FileQuery<Bytes>, buffer: FileQuery<Bytes>,
} }
@ -24,8 +29,10 @@ impl fmt::Debug for SourceCache {
} }
} }
/// The source database of the world.
#[derive(Clone)] #[derive(Clone)]
pub struct SourceDb { pub struct SourceDb {
/// Whether the database is currently compiling.
pub is_compiling: bool, pub is_compiling: bool,
/// The slots for all the files during a single lifecycle. /// The slots for all the files during a single lifecycle.
pub slots: Arc<Mutex<FxHashMap<FileId, SourceCache>>>, pub slots: Arc<Mutex<FxHashMap<FileId, SourceCache>>>,
@ -38,11 +45,12 @@ impl fmt::Debug for SourceDb {
} }
impl SourceDb { impl SourceDb {
/// Sets whether the database is currently compiling.
pub fn set_is_compiling(&mut self, is_compiling: bool) { pub fn set_is_compiling(&mut self, is_compiling: bool) {
self.is_compiling = is_compiling; self.is_compiling = is_compiling;
} }
/// Returns the overall memory usage for the stored files. /// Gets the overall memory usage for the stored files.
pub fn memory_usage(&self) -> usize { pub fn memory_usage(&self) -> usize {
let mut w = self.slots.lock().len() * core::mem::size_of::<SourceCache>(); let mut w = self.slots.lock().len() * core::mem::size_of::<SourceCache>();
w += self w += self
@ -65,7 +73,7 @@ impl SourceDb {
w w
} }
/// Get all the files that are currently in the VFS. /// Gets all the files that are currently in the VFS.
/// ///
/// This is typically corresponds to the file dependencies of a single /// This is typically corresponds to the file dependencies of a single
/// compilation. /// compilation.
@ -81,12 +89,12 @@ impl SourceDb {
} }
} }
/// Get file content by path. /// Gets the file content by path.
pub fn file(&self, fid: FileId, p: &impl FsProvider) -> FileResult<Bytes> { pub fn file(&self, fid: FileId, p: &impl FsProvider) -> FileResult<Bytes> {
self.slot(fid, |slot| slot.buffer.compute(|| p.read(fid)).cloned()) self.slot(fid, |slot| slot.buffer.compute(|| p.read(fid)).cloned())
} }
/// Get source content by path and assign the source with a given typst /// Gets the source content by path and assign the source with a given typst
/// global file id. /// global file id.
/// ///
/// See `Vfs::resolve_with_f` for more information. /// See `Vfs::resolve_with_f` for more information.
@ -96,7 +104,7 @@ impl SourceDb {
}) })
} }
/// Insert a new slot into the vfs. /// Inserts a new slot into the vfs.
fn slot<T>(&self, fid: FileId, f: impl FnOnce(&SourceCache) -> T) -> T { fn slot<T>(&self, fid: FileId, f: impl FnOnce(&SourceCache) -> T) -> T {
let mut slots = self.slots.lock(); let mut slots = self.slots.lock();
f({ f({
@ -115,6 +123,7 @@ impl SourceDb {
}) })
} }
/// Takes state of the source database.
pub(crate) fn take(&mut self) -> Self { pub(crate) fn take(&mut self) -> Self {
Self { Self {
is_compiling: self.is_compiling, is_compiling: self.is_compiling,

View file

@ -1,3 +1,17 @@
//! The world of the compiler.
//!
//! A world is a collection of resources that are used by the compiler.
//! A world is created by a universe.
//!
//! The universe is not shared between threads.
//! The world can be shared between threads.
//!
//! Both the universe and the world can be mutated. The difference is that the
//! universe is mutated to change the global state of the compiler, while the
//! world is mutated to run some intermediate computation.
//!
//! Note: If a world is mutated, the cache of the world is invalidated.
use std::{ use std::{
borrow::Cow, borrow::Cow,
num::NonZeroUsize, num::NonZeroUsize,
@ -39,7 +53,7 @@ use crate::{CompilerFeat, ShadowApi, WorldDeps, font::FontResolver};
type CodespanResult<T> = Result<T, CodespanError>; type CodespanResult<T> = Result<T, CodespanError>;
type CodespanError = codespan_reporting::files::Error; type CodespanError = codespan_reporting::files::Error;
/// A universe that provides access to the operating system. /// A universe that provides access to the operating system and the compiler.
/// ///
/// Use [`CompilerUniverse::new_raw`] to create a new universe. The concrete /// Use [`CompilerUniverse::new_raw`] to create a new universe. The concrete
/// implementation usually wraps this function with a more user-friendly `new` /// implementation usually wraps this function with a more user-friendly `new`
@ -47,30 +61,33 @@ type CodespanError = codespan_reporting::files::Error;
/// Use [`CompilerUniverse::snapshot`] to create a new world. /// Use [`CompilerUniverse::snapshot`] to create a new world.
#[derive(Debug)] #[derive(Debug)]
pub struct CompilerUniverse<F: CompilerFeat> { pub struct CompilerUniverse<F: CompilerFeat> {
/// State for the *root & entry* of compilation. /// The state for the *root & entry* of compilation.
/// The world forbids direct access to files outside this directory. /// The world forbids direct access to files outside this directory.
entry: EntryState, entry: EntryState,
/// Additional input arguments to compile the entry file. /// The additional input arguments to compile the entry file.
inputs: Arc<LazyHash<Dict>>, inputs: Arc<LazyHash<Dict>>,
/// Features enabled for the compiler. /// The features enabled for the compiler.
pub features: Features, pub features: Features,
/// Provides font management for typst compiler. /// The font resolver for the compiler.
pub font_resolver: Arc<F::FontResolver>, pub font_resolver: Arc<F::FontResolver>,
/// Provides package management for typst compiler. /// The package registry for the compiler.
pub registry: Arc<F::Registry>, pub registry: Arc<F::Registry>,
/// Provides path-based data access for typst compiler. /// The virtual file system for the compiler.
vfs: Vfs<F::AccessModel>, vfs: Vfs<F::AccessModel>,
/// The current revision of the universe. /// The current revision of the universe.
///
/// The revision is incremented when the universe is mutated.
pub revision: NonZeroUsize, pub revision: NonZeroUsize,
/// The creation timestamp for reproducible builds. /// The creation timestamp for reproducible builds.
pub creation_timestamp: Option<i64>, pub creation_timestamp: Option<i64>,
} }
/// Creates, snapshots, and manages the compiler universe. /// Creates, snapshots, and manages the compiler universe.
impl<F: CompilerFeat> CompilerUniverse<F> { impl<F: CompilerFeat> CompilerUniverse<F> {
/// Create a [`CompilerUniverse`] with feature implementation. /// Creates a [`CompilerUniverse`] with feature implementation.
/// ///
/// Although this function is public, it is always unstable and not intended /// Although this function is public, it is always unstable and not intended
/// to be used directly. /// to be used directly.
@ -99,44 +116,53 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
} }
} }
/// Wrap driver with a given entry file. /// Wraps the universe with a given entry file.
pub fn with_entry_file(mut self, entry_file: PathBuf) -> Self { pub fn with_entry_file(mut self, entry_file: PathBuf) -> Self {
let _ = self.increment_revision(|this| this.set_entry_file_(entry_file.as_path().into())); let _ = self.increment_revision(|this| this.set_entry_file_(entry_file.as_path().into()));
self self
} }
/// Gets the entry file of the universe.
pub fn entry_file(&self) -> Option<PathResolution> { pub fn entry_file(&self) -> Option<PathResolution> {
self.path_for_id(self.main_id()?).ok() self.path_for_id(self.main_id()?).ok()
} }
/// Gets the inputs of the universe.
pub fn inputs(&self) -> Arc<LazyHash<Dict>> { pub fn inputs(&self) -> Arc<LazyHash<Dict>> {
self.inputs.clone() self.inputs.clone()
} }
/// Creates a new world from the universe.
pub fn snapshot(&self) -> CompilerWorld<F> { pub fn snapshot(&self) -> CompilerWorld<F> {
self.snapshot_with(None) self.snapshot_with(None)
} }
// todo: remove me. /// Creates a new computation graph from the universe.
///
/// This is a legacy method and will be removed in the future.
///
/// TODO: remove me.
pub fn computation(&self) -> Arc<WorldComputeGraph<F>> { pub fn computation(&self) -> Arc<WorldComputeGraph<F>> {
let world = self.snapshot(); let world = self.snapshot();
let snap = CompileSnapshot::from_world(world); let snap = CompileSnapshot::from_world(world);
WorldComputeGraph::new(snap) WorldComputeGraph::new(snap)
} }
/// Creates a new computation graph from the universe with a given mutant.
pub fn computation_with(&self, mutant: TaskInputs) -> Arc<WorldComputeGraph<F>> { pub fn computation_with(&self, mutant: TaskInputs) -> Arc<WorldComputeGraph<F>> {
let world = self.snapshot_with(Some(mutant)); let world = self.snapshot_with(Some(mutant));
let snap = CompileSnapshot::from_world(world); let snap = CompileSnapshot::from_world(world);
WorldComputeGraph::new(snap) WorldComputeGraph::new(snap)
} }
/// Creates a new computation graph from the universe with a given entry
/// content and inputs.
pub fn snapshot_with_entry_content( pub fn snapshot_with_entry_content(
&self, &self,
content: Bytes, content: Bytes,
inputs: Option<TaskInputs>, inputs: Option<TaskInputs>,
) -> Arc<WorldComputeGraph<F>> { ) -> Arc<WorldComputeGraph<F>> {
// checkout the entry file // Checks out the entry file.
let mut world = if self.main_id().is_some() { let mut world = if self.main_id().is_some() {
self.snapshot_with(inputs) self.snapshot_with(inputs)
} else { } else {
@ -155,6 +181,7 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
WorldComputeGraph::new(snap) WorldComputeGraph::new(snap)
} }
/// Creates a new world from the universe with a given mutant.
pub fn snapshot_with(&self, mutant: Option<TaskInputs>) -> CompilerWorld<F> { pub fn snapshot_with(&self, mutant: Option<TaskInputs>) -> CompilerWorld<F> {
let w = CompilerWorld { let w = CompilerWorld {
entry: self.entry.clone(), entry: self.entry.clone(),
@ -176,7 +203,7 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
mutant.map(|m| w.task(m)).unwrap_or(w) mutant.map(|m| w.task(m)).unwrap_or(w)
} }
/// Increment revision with actions. /// Increments the revision with actions.
pub fn increment_revision<T>(&mut self, f: impl FnOnce(&mut RevisingUniverse<F>) -> T) -> T { pub fn increment_revision<T>(&mut self, f: impl FnOnce(&mut RevisingUniverse<F>) -> T) -> T {
f(&mut RevisingUniverse { f(&mut RevisingUniverse {
vfs_revision: self.vfs.revision(), vfs_revision: self.vfs.revision(),
@ -190,13 +217,13 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
}) })
} }
/// Mutate the entry state and return the old state. /// Mutates the entry state and returns the old state.
fn mutate_entry_(&mut self, mut state: EntryState) -> SourceResult<EntryState> { fn mutate_entry_(&mut self, mut state: EntryState) -> SourceResult<EntryState> {
std::mem::swap(&mut self.entry, &mut state); std::mem::swap(&mut self.entry, &mut state);
Ok(state) Ok(state)
} }
/// set an entry file. /// Sets an entry file.
fn set_entry_file_(&mut self, entry_file: Arc<Path>) -> SourceResult<()> { fn set_entry_file_(&mut self, entry_file: Arc<Path>) -> SourceResult<()> {
let state = self.entry_state(); let state = self.entry_state();
let state = state let state = state
@ -210,30 +237,33 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
Ok(()) Ok(())
} }
/// Gets the virtual file system of the universe.
///
/// To mutate the vfs, use [`CompilerUniverse::increment_revision`].
pub fn vfs(&self) -> &Vfs<F::AccessModel> { pub fn vfs(&self) -> &Vfs<F::AccessModel> {
&self.vfs &self.vfs
} }
} }
impl<F: CompilerFeat> CompilerUniverse<F> { impl<F: CompilerFeat> CompilerUniverse<F> {
/// Reset the world for a new lifecycle (of garbage collection). /// Resets the world for a new lifecycle (of garbage collection).
pub fn reset(&mut self) { pub fn reset(&mut self) {
self.vfs.reset_all(); self.vfs.reset_all();
// todo: shared state // todo: shared state
} }
/// Clear the vfs cache that is not touched for a long time. /// Clears the vfs cache that is not touched for a long time.
pub fn evict(&mut self, vfs_threshold: usize) { pub fn evict(&mut self, vfs_threshold: usize) {
self.vfs.reset_access_model(); self.vfs.reset_access_model();
self.vfs.evict(vfs_threshold); self.vfs.evict(vfs_threshold);
} }
/// Resolve the real path for a file id. /// Resolves the real path for a file id.
pub fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError> { pub fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError> {
self.vfs.file_path(id) self.vfs.file_path(id)
} }
/// Resolve the root of the workspace. /// Resolves the root of the workspace.
pub fn id_for_path(&self, path: &Path) -> Option<FileId> { pub fn id_for_path(&self, path: &Path) -> Option<FileId> {
let root = self.entry.workspace_root()?; let root = self.entry.workspace_root()?;
Some(WorkspaceResolver::workspace_file( Some(WorkspaceResolver::workspace_file(
@ -242,10 +272,12 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
)) ))
} }
/// Gets the semantic token legend.
pub fn get_semantic_token_legend(&self) -> Arc<SemanticTokensLegend> { pub fn get_semantic_token_legend(&self) -> Arc<SemanticTokensLegend> {
Arc::new(get_semantic_tokens_legend()) Arc::new(get_semantic_tokens_legend())
} }
/// Gets the semantic tokens.
pub fn get_semantic_tokens( pub fn get_semantic_tokens(
&self, &self,
file_path: Option<String>, file_path: Option<String>,
@ -324,14 +356,23 @@ impl<F: CompilerFeat> EntryManager for CompilerUniverse<F> {
} }
} }
/// The state of the universe during revision.
pub struct RevisingUniverse<'a, F: CompilerFeat> { pub struct RevisingUniverse<'a, F: CompilerFeat> {
/// Whether the view has changed.
view_changed: bool, view_changed: bool,
/// The revision of the vfs.
vfs_revision: NonZeroUsize, vfs_revision: NonZeroUsize,
/// Whether the font has changed.
font_changed: bool, font_changed: bool,
/// Whether the creation timestamp has changed.
creation_timestamp_changed: bool, creation_timestamp_changed: bool,
/// The revision of the font.
font_revision: Option<NonZeroUsize>, font_revision: Option<NonZeroUsize>,
/// Whether the registry has changed.
registry_changed: bool, registry_changed: bool,
/// The revision of the registry.
registry_revision: Option<NonZeroUsize>, registry_revision: Option<NonZeroUsize>,
/// The inner revising universe.
pub inner: &'a mut CompilerUniverse<F>, pub inner: &'a mut CompilerUniverse<F>,
} }
@ -377,37 +418,42 @@ impl<F: CompilerFeat> Drop for RevisingUniverse<'_, F> {
} }
impl<F: CompilerFeat> RevisingUniverse<'_, F> { impl<F: CompilerFeat> RevisingUniverse<'_, F> {
/// Gets the revising vfs.
pub fn vfs(&mut self) -> RevisingVfs<'_, F::AccessModel> { pub fn vfs(&mut self) -> RevisingVfs<'_, F::AccessModel> {
self.vfs.revise() self.vfs.revise()
} }
/// Sets the fonts.
pub fn set_fonts(&mut self, fonts: Arc<F::FontResolver>) { pub fn set_fonts(&mut self, fonts: Arc<F::FontResolver>) {
self.font_changed = true; self.font_changed = true;
self.inner.font_resolver = fonts; self.inner.font_resolver = fonts;
} }
/// Sets the package.
pub fn set_package(&mut self, packages: Arc<F::Registry>) { pub fn set_package(&mut self, packages: Arc<F::Registry>) {
self.registry_changed = true; self.registry_changed = true;
self.inner.registry = packages; self.inner.registry = packages;
} }
/// Set the inputs for the compiler. /// Sets the inputs for the compiler.
pub fn set_inputs(&mut self, inputs: Arc<LazyHash<Dict>>) { pub fn set_inputs(&mut self, inputs: Arc<LazyHash<Dict>>) {
self.view_changed = true; self.view_changed = true;
self.inner.inputs = inputs; self.inner.inputs = inputs;
} }
/// Set the creation timestamp for reproducible builds. /// Sets the creation timestamp for reproducible builds.
pub fn set_creation_timestamp(&mut self, creation_timestamp: Option<i64>) { pub fn set_creation_timestamp(&mut self, creation_timestamp: Option<i64>) {
self.creation_timestamp_changed = creation_timestamp != self.inner.creation_timestamp; self.creation_timestamp_changed = creation_timestamp != self.inner.creation_timestamp;
self.inner.creation_timestamp = creation_timestamp; self.inner.creation_timestamp = creation_timestamp;
} }
/// Sets the entry file.
pub fn set_entry_file(&mut self, entry_file: Arc<Path>) -> SourceResult<()> { pub fn set_entry_file(&mut self, entry_file: Arc<Path>) -> SourceResult<()> {
self.view_changed = true; self.view_changed = true;
self.inner.set_entry_file_(entry_file) self.inner.set_entry_file_(entry_file)
} }
/// Mutates the entry state.
pub fn mutate_entry(&mut self, state: EntryState) -> SourceResult<EntryState> { pub fn mutate_entry(&mut self, state: EntryState) -> SourceResult<EntryState> {
self.view_changed = true; self.view_changed = true;
@ -421,28 +467,34 @@ impl<F: CompilerFeat> RevisingUniverse<'_, F> {
self.inner.mutate_entry_(state) self.inner.mutate_entry_(state)
} }
/// Increments the revision without any changes.
pub fn flush(&mut self) { pub fn flush(&mut self) {
self.view_changed = true; self.view_changed = true;
} }
/// Checks if the font has changed.
pub fn font_changed(&self) -> bool { pub fn font_changed(&self) -> bool {
self.font_changed && is_revision_changed(self.font_revision, self.font_resolver.revision()) self.font_changed && is_revision_changed(self.font_revision, self.font_resolver.revision())
} }
/// Checks if the creation timestamp has changed.
pub fn creation_timestamp_changed(&self) -> bool { pub fn creation_timestamp_changed(&self) -> bool {
self.creation_timestamp_changed self.creation_timestamp_changed
} }
/// Checks if the registry has changed.
pub fn registry_changed(&self) -> bool { pub fn registry_changed(&self) -> bool {
self.registry_changed self.registry_changed
&& is_revision_changed(self.registry_revision, self.registry.revision()) && is_revision_changed(self.registry_revision, self.registry.revision())
} }
/// Checks if the vfs has changed.
pub fn vfs_changed(&self) -> bool { pub fn vfs_changed(&self) -> bool {
self.vfs_revision != self.vfs.revision() self.vfs_revision != self.vfs.revision()
} }
} }
/// Checks if the revision has changed.
fn is_revision_changed(a: Option<NonZeroUsize>, b: Option<NonZeroUsize>) -> bool { fn is_revision_changed(a: Option<NonZeroUsize>, b: Option<NonZeroUsize>) -> bool {
a.is_none() || b.is_none() || a != b a.is_none() || b.is_none() || a != b
} }
@ -452,6 +504,7 @@ type NowStorage = chrono::DateTime<chrono::Local>;
#[cfg(not(any(feature = "web", feature = "system")))] #[cfg(not(any(feature = "web", feature = "system")))]
type NowStorage = tinymist_std::time::UtcDateTime; type NowStorage = tinymist_std::time::UtcDateTime;
/// The world of the compiler.
pub struct CompilerWorld<F: CompilerFeat> { pub struct CompilerWorld<F: CompilerFeat> {
/// State for the *root & entry* of compilation. /// State for the *root & entry* of compilation.
/// The world forbids direct access to files outside this directory. /// The world forbids direct access to files outside this directory.
@ -486,13 +539,17 @@ impl<F: CompilerFeat> Clone for CompilerWorld<F> {
} }
} }
/// The inputs for the compiler.
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub struct TaskInputs { pub struct TaskInputs {
/// The entry state.
pub entry: Option<EntryState>, pub entry: Option<EntryState>,
/// The inputs.
pub inputs: Option<Arc<LazyHash<Dict>>>, pub inputs: Option<Arc<LazyHash<Dict>>>,
} }
impl<F: CompilerFeat> CompilerWorld<F> { impl<F: CompilerFeat> CompilerWorld<F> {
/// Creates a new world from the current world with the given inputs.
pub fn task(&self, mutant: TaskInputs) -> CompilerWorld<F> { pub fn task(&self, mutant: TaskInputs) -> CompilerWorld<F> {
// Fetch to avoid inconsistent state. // Fetch to avoid inconsistent state.
let _ = self.today(None); let _ = self.today(None);
@ -549,10 +606,16 @@ impl<F: CompilerFeat> CompilerWorld<F> {
self.source_db.take() self.source_db.take()
} }
/// Gets the vfs.
pub fn vfs(&self) -> &Vfs<F::AccessModel> { pub fn vfs(&self) -> &Vfs<F::AccessModel> {
&self.vfs &self.vfs
} }
/// Gets the inputs.
pub fn inputs(&self) -> Arc<LazyHash<Dict>> {
self.inputs.clone()
}
/// Sets flag to indicate whether the compiler is currently compiling. /// Sets flag to indicate whether the compiler is currently compiling.
/// Note: Since `CompilerWorld` can be cloned, you can clone the world and /// Note: Since `CompilerWorld` can be cloned, you can clone the world and
/// set the flag then to avoid affecting the original world. /// set the flag then to avoid affecting the original world.
@ -560,18 +623,17 @@ impl<F: CompilerFeat> CompilerWorld<F> {
self.source_db.is_compiling = is_compiling; self.source_db.is_compiling = is_compiling;
} }
pub fn inputs(&self) -> Arc<LazyHash<Dict>> { /// Gets the revision.
self.inputs.clone()
}
pub fn revision(&self) -> NonZeroUsize { pub fn revision(&self) -> NonZeroUsize {
self.revision self.revision
} }
/// Evicts the vfs.
pub fn evict_vfs(&mut self, threshold: usize) { pub fn evict_vfs(&mut self, threshold: usize) {
self.vfs.evict(threshold); self.vfs.evict(threshold);
} }
/// Evicts the source cache.
pub fn evict_source_cache(&mut self, threshold: usize) { pub fn evict_source_cache(&mut self, threshold: usize) {
self.vfs self.vfs
.clone_source_cache() .clone_source_cache()
@ -592,6 +654,7 @@ impl<F: CompilerFeat> CompilerWorld<F> {
)) ))
} }
/// Resolves the file id by path.
pub fn file_id_by_path(&self, path: &Path) -> FileResult<FileId> { pub fn file_id_by_path(&self, path: &Path) -> FileResult<FileId> {
// todo: source in packages // todo: source in packages
match self.id_for_path(path) { match self.id_for_path(path) {
@ -603,10 +666,12 @@ impl<F: CompilerFeat> CompilerWorld<F> {
} }
} }
/// Resolves the source by path.
pub fn source_by_path(&self, path: &Path) -> FileResult<Source> { pub fn source_by_path(&self, path: &Path) -> FileResult<Source> {
self.source(self.file_id_by_path(path)?) self.source(self.file_id_by_path(path)?)
} }
/// Gets the depended files.
pub fn depended_files(&self) -> EcoVec<FileId> { pub fn depended_files(&self) -> EcoVec<FileId> {
let mut deps = EcoVec::new(); let mut deps = EcoVec::new();
self.iter_dependencies(&mut |file_id| { self.iter_dependencies(&mut |file_id| {
@ -615,6 +680,7 @@ impl<F: CompilerFeat> CompilerWorld<F> {
deps deps
} }
/// Gets the depended fs paths.
pub fn depended_fs_paths(&self) -> EcoVec<ImmutPath> { pub fn depended_fs_paths(&self) -> EcoVec<ImmutPath> {
let mut deps = EcoVec::new(); let mut deps = EcoVec::new();
self.iter_dependencies(&mut |file_id| { self.iter_dependencies(&mut |file_id| {
@ -635,6 +701,7 @@ impl<F: CompilerFeat> CompilerWorld<F> {
self.registry.packages() self.registry.packages()
} }
/// Creates a task target for paged documents.
pub fn paged_task(&self) -> Cow<'_, CompilerWorld<F>> { pub fn paged_task(&self) -> Cow<'_, CompilerWorld<F>> {
let force_html = self.features.is_enabled(typst::Feature::Html); let force_html = self.features.is_enabled(typst::Feature::Html);
let enabled_paged = !self.library.features.is_enabled(typst::Feature::Html) || force_html; let enabled_paged = !self.library.features.is_enabled(typst::Feature::Html) || force_html;
@ -649,6 +716,7 @@ impl<F: CompilerFeat> CompilerWorld<F> {
Cow::Owned(world) Cow::Owned(world)
} }
/// Creates a task target for html documents.
pub fn html_task(&self) -> Cow<'_, CompilerWorld<F>> { pub fn html_task(&self) -> Cow<'_, CompilerWorld<F>> {
let enabled_html = self.library.features.is_enabled(typst::Feature::Html); let enabled_html = self.library.features.is_enabled(typst::Feature::Html);
@ -848,6 +916,7 @@ pub fn with_main(world: &dyn World, id: FileId) -> WorldWithMain<'_> {
WorldWithMain { world, main: id } WorldWithMain { world, main: id }
} }
/// A world with a main file.
pub struct WorldWithMain<'a> { pub struct WorldWithMain<'a> {
world: &'a dyn World, world: &'a dyn World,
main: FileId, main: FileId,
@ -883,15 +952,21 @@ impl typst::World for WorldWithMain<'_> {
} }
} }
/// A world that can be used for source code reporting.
pub trait SourceWorld: World { pub trait SourceWorld: World {
/// Gets the world as a world.
fn as_world(&self) -> &dyn World; fn as_world(&self) -> &dyn World;
/// Gets the path for a file id.
fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError>; fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError>;
/// Gets the source by file id.
fn lookup(&self, id: FileId) -> Source { fn lookup(&self, id: FileId) -> Source {
self.source(id) self.source(id)
.expect("file id does not point to any source file") .expect("file id does not point to any source file")
} }
/// Gets the source range by span.
fn source_range(&self, span: Span) -> Option<std::ops::Range<usize>> { fn source_range(&self, span: Span) -> Option<std::ops::Range<usize>> {
self.range(span) self.range(span)
} }
@ -902,17 +977,20 @@ impl<F: CompilerFeat> SourceWorld for CompilerWorld<F> {
self self
} }
/// Resolve the real path for a file id. /// Resolves the real path for a file id.
fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError> { fn path_for_id(&self, id: FileId) -> Result<PathResolution, FileError> {
self.path_for_id(id) self.path_for_id(id)
} }
} }
/// A world that can be used for source code reporting.
pub struct CodeSpanReportWorld<'a> { pub struct CodeSpanReportWorld<'a> {
/// The world to report.
pub world: &'a dyn SourceWorld, pub world: &'a dyn SourceWorld,
} }
impl<'a> CodeSpanReportWorld<'a> { impl<'a> CodeSpanReportWorld<'a> {
/// Creates a new code span report world.
pub fn new(world: &'a dyn SourceWorld) -> Self { pub fn new(world: &'a dyn SourceWorld) -> Self {
Self { world } Self { world }
} }

View file

@ -17,8 +17,6 @@
//! //!
//! The [`CompileHandlerImpl`] will push information to other actors. //! The [`CompileHandlerImpl`] will push information to other actors.
#![allow(missing_docs)]
use reflexo_typst::TypstDocument; use reflexo_typst::TypstDocument;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub use tinymist_project::*; pub use tinymist_project::*;
@ -252,12 +250,18 @@ impl ServerState {
} }
} }
/// The extra state of a project instance.
#[derive(Default)] #[derive(Default)]
pub struct ProjectInsStateExt { pub struct ProjectInsStateExt {
/// The revision notified to the compile handler.
pub notified_revision: usize, pub notified_revision: usize,
/// The pending reasons that are not emitted yet during compilation.
pub pending_reasons: CompileSignal, pub pending_reasons: CompileSignal,
/// The emitted reasons that are emitted after the last compilation.
pub emitted_reasons: CompileSignal, pub emitted_reasons: CompileSignal,
/// The compiling since the last compilation.
pub compiling_since: Option<tinymist_std::time::Time>, pub compiling_since: Option<tinymist_std::time::Time>,
/// The last compilation.
pub last_compilation: Option<LspCompiledArtifact>, pub last_compilation: Option<LspCompiledArtifact>,
} }
@ -313,33 +317,40 @@ impl ProjectInsStateExt {
} }
} }
/// A project state.
pub struct ProjectState { pub struct ProjectState {
/// The compiler instance.
pub compiler: LspProjectCompiler, pub compiler: LspProjectCompiler,
/// The analysis data.
pub analysis: Arc<Analysis>, pub analysis: Arc<Analysis>,
/// The query statistics.
pub stats: CompilerQueryStats, pub stats: CompilerQueryStats,
/// The preview state.
#[cfg(feature = "preview")] #[cfg(feature = "preview")]
pub preview: ProjectPreviewState, pub preview: ProjectPreviewState,
/// The export task.
#[cfg(feature = "export")] #[cfg(feature = "export")]
pub export: crate::task::ExportTask, pub export: crate::task::ExportTask,
} }
impl ProjectState { impl ProjectState {
/// The primary instance id /// The primary instance id.
pub fn primary_id(&self) -> &ProjectInsId { pub fn primary_id(&self) -> &ProjectInsId {
&self.compiler.primary.id &self.compiler.primary.id
} }
/// Snapshot the compiler thread for tasks /// Snapshots the compiler thread for tasks.
pub fn snapshot(&mut self) -> Result<LspComputeGraph> { pub fn snapshot(&mut self) -> Result<LspComputeGraph> {
Ok(self.compiler.snapshot()) Ok(self.compiler.snapshot())
} }
/// Snapshot the compiler thread for language queries /// Snapshots the compiler thread for language queries.
pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> Result<LspQuerySnapshot> { pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> Result<LspQuerySnapshot> {
let snap = self.snapshot()?; let snap = self.snapshot()?;
Ok(self.analysis.clone().query_snapshot(snap, q)) Ok(self.analysis.clone().query_snapshot(snap, q))
} }
/// Handles an interrupt.
pub fn do_interrupt(compiler: &mut LspProjectCompiler, intr: Interrupt<LspCompilerFeat>) { pub fn do_interrupt(compiler: &mut LspProjectCompiler, intr: Interrupt<LspCompilerFeat>) {
if let Interrupt::Compiled(compiled) = &intr { if let Interrupt::Compiled(compiled) = &intr {
let proj = compiler.projects().find(|p| &p.id == compiled.id()); let proj = compiler.projects().find(|p| &p.id == compiled.id());
@ -357,14 +368,17 @@ impl ProjectState {
compiler.process(intr); compiler.process(intr);
} }
/// Interrupts the compiler.
pub fn interrupt(&mut self, intr: Interrupt<LspCompilerFeat>) { pub fn interrupt(&mut self, intr: Interrupt<LspCompilerFeat>) {
Self::do_interrupt(&mut self.compiler, intr); Self::do_interrupt(&mut self.compiler, intr);
} }
/// Stops the project.
pub(crate) fn stop(&mut self) { pub(crate) fn stop(&mut self) {
// todo: stop all compilations // todo: stop all compilations
} }
/// Restarts a dedicate project.
pub(crate) fn restart_dedicate( pub(crate) fn restart_dedicate(
&mut self, &mut self,
group: &str, group: &str,
@ -374,10 +388,11 @@ impl ProjectState {
} }
} }
/// The implementation of the periscope provider.
struct TypstPeriscopeProvider(PeriscopeRenderer); struct TypstPeriscopeProvider(PeriscopeRenderer);
impl PeriscopeProvider for TypstPeriscopeProvider { impl PeriscopeProvider for TypstPeriscopeProvider {
/// Resolve periscope image at the given position. /// Resolves the periscope image at the given position.
fn periscope_at( fn periscope_at(
&self, &self,
ctx: &mut LocalContext, ctx: &mut LocalContext,
@ -388,14 +403,17 @@ impl PeriscopeProvider for TypstPeriscopeProvider {
} }
} }
/// The preview state of a project.
#[derive(Default, Clone)] #[derive(Default, Clone)]
pub struct ProjectPreviewState { pub struct ProjectPreviewState {
/// The inner state.
#[cfg(feature = "preview")] #[cfg(feature = "preview")]
pub(crate) inner: Arc<Mutex<FxHashMap<ProjectInsId, Arc<tinymist_preview::CompileWatcher>>>>, pub(crate) inner: Arc<Mutex<FxHashMap<ProjectInsId, Arc<tinymist_preview::CompileWatcher>>>>,
} }
#[cfg(feature = "preview")] #[cfg(feature = "preview")]
impl ProjectPreviewState { impl ProjectPreviewState {
/// Registers a compile watcher.
#[must_use] #[must_use]
pub fn register( pub fn register(
&self, &self,
@ -413,18 +431,22 @@ impl ProjectPreviewState {
true true
} }
/// Unregisters a compile watcher.
#[must_use] #[must_use]
pub fn unregister(&self, task_id: &ProjectInsId) -> bool { pub fn unregister(&self, task_id: &ProjectInsId) -> bool {
self.inner.lock().remove(task_id).is_some() self.inner.lock().remove(task_id).is_some()
} }
/// Gets a compile watcher.
#[must_use] #[must_use]
pub fn get(&self, task_id: &ProjectInsId) -> Option<Arc<tinymist_preview::CompileWatcher>> { pub fn get(&self, task_id: &ProjectInsId) -> Option<Arc<tinymist_preview::CompileWatcher>> {
self.inner.lock().get(task_id).cloned() self.inner.lock().get(task_id).cloned()
} }
} }
/// The implementation of the compile handler.
pub struct CompileHandlerImpl { pub struct CompileHandlerImpl {
/// The analysis data.
pub(crate) analysis: Arc<Analysis>, pub(crate) analysis: Arc<Analysis>,
#[cfg(feature = "preview")] #[cfg(feature = "preview")]
@ -432,20 +454,28 @@ pub struct CompileHandlerImpl {
/// Whether the compile server is running in standalone CLI (not as a /// Whether the compile server is running in standalone CLI (not as a
/// language server). /// language server).
pub is_standalone: bool, pub is_standalone: bool,
/// The export task.
#[cfg(feature = "export")] #[cfg(feature = "export")]
pub(crate) export: crate::task::ExportTask, pub(crate) export: crate::task::ExportTask,
/// The editor sender, used to send editor requests to the editor.
pub(crate) editor_tx: EditorSender, pub(crate) editor_tx: EditorSender,
/// The client used to send events back to the server itself or the clients.
pub(crate) client: Arc<dyn ProjectClient>, pub(crate) client: Arc<dyn ProjectClient>,
/// The status revision map, used to track the status of the projects.
pub(crate) status_revision: Mutex<FxHashMap<ProjectInsId, usize>>, pub(crate) status_revision: Mutex<FxHashMap<ProjectInsId, usize>>,
/// The notified revision map, used to track the notified revisions of the
/// projects.
pub(crate) notified_revision: Mutex<FxHashMap<ProjectInsId, (usize, CompileSignal)>>, pub(crate) notified_revision: Mutex<FxHashMap<ProjectInsId, (usize, CompileSignal)>>,
} }
/// The client of the project.
pub trait ProjectClient: Send + Sync + 'static { pub trait ProjectClient: Send + Sync + 'static {
/// Sends an interrupt event back to the server.
fn interrupt(&self, event: LspInterrupt); fn interrupt(&self, event: LspInterrupt);
/// Sends a server event back to the server.
#[cfg(feature = "preview")] #[cfg(feature = "preview")]
fn server_event(&self, event: ServerEvent); fn server_event(&self, event: ServerEvent);
/// Sends a dev event to the client, used for neovim's E2E testing.
#[cfg(feature = "export")] #[cfg(feature = "export")]
fn dev_event(&self, event: DevEvent); fn dev_event(&self, event: DevEvent);
} }
@ -485,12 +515,14 @@ impl ProjectClient for mpsc::UnboundedSender<LspInterrupt> {
} }
impl CompileHandlerImpl { impl CompileHandlerImpl {
/// Pushes diagnostics to the editor.
fn push_diagnostics(&self, dv: ProjVersion, diagnostics: Option<DiagnosticsMap>) { fn push_diagnostics(&self, dv: ProjVersion, diagnostics: Option<DiagnosticsMap>) {
self.editor_tx self.editor_tx
.send(EditorRequest::Diag(dv, diagnostics)) .send(EditorRequest::Diag(dv, diagnostics))
.log_error("failed to send diagnostics"); .log_error("failed to send diagnostics");
} }
/// Notifies the diagnostics.
fn notify_diagnostics(&self, art: &LspCompiledArtifact) { fn notify_diagnostics(&self, art: &LspCompiledArtifact) {
let dv = ProjVersion { let dv = ProjVersion {
id: art.id().clone(), id: art.id().clone(),
@ -747,21 +779,34 @@ impl CompileHandler<LspCompilerFeat, ProjectInsStateExt> for CompileHandlerImpl
} }
} }
/// A query snapshot with statistics.
pub type QuerySnapWithStat = (LspQuerySnapshot, QueryStatGuard); pub type QuerySnapWithStat = (LspQuerySnapshot, QueryStatGuard);
/// A notification event that an export was checked.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DevExportEvent { pub struct DevExportEvent {
/// The project id.
pub id: String, pub id: String,
/// The configured timing to execute the export.
pub when: TaskWhen, pub when: TaskWhen,
/// Whether the export is needed.
pub need_export: bool, pub need_export: bool,
/// The signal to check if the export is needed.
pub signal: CompileSignal, pub signal: CompileSignal,
/// The path to write the exported artifact.
///
/// If `None`, the artifact will be written to the default path according
/// to the input path.
pub path: Option<String>, pub path: Option<String>,
} }
/// A notification event that a dev event was triggered, used for neovim's E2E
/// testing.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", tag = "type")] #[serde(rename_all = "camelCase", tag = "type")]
pub enum DevEvent { pub enum DevEvent {
/// A notification event that an export was triggered.
Export(DevExportEvent), Export(DevExportEvent),
} }

View file

@ -1,4 +1,5 @@
#![allow(missing_docs)] //! Next generation of the export task. Not used because it is still
//! complicated.
use std::sync::Arc; use std::sync::Arc;
@ -18,10 +19,12 @@ use crate::world::base::{
OptionDocumentTask, PagedCompilationTask, WorldComputable, WorldComputeGraph, OptionDocumentTask, PagedCompilationTask, WorldComputable, WorldComputeGraph,
}; };
/// A task that checks if the project needs to be compiled.
#[derive(Clone, Copy, Default)] #[derive(Clone, Copy, Default)]
pub struct ProjectCompilation; pub struct ProjectCompilation;
impl ProjectCompilation { impl ProjectCompilation {
/// Preconfigures the timings for the project compilation.
pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> { pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> {
// todo: configure run_diagnostics! // todo: configure run_diagnostics!
let paged_diag = Some(TaskWhen::OnType); let paged_diag = Some(TaskWhen::OnType);
@ -80,9 +83,11 @@ impl<F: CompilerFeat> WorldComputable<F> for ProjectCompilation {
} }
} }
/// A task that runs the export.
pub struct ProjectExport; pub struct ProjectExport;
impl ProjectExport { impl ProjectExport {
/// Exports the document to bytes artifact.
fn export_bytes< fn export_bytes<
D: typst::Document + Send + Sync + 'static, D: typst::Document + Send + Sync + 'static,
T: ExportComputation<LspCompilerFeat, D, Output = Bytes>, T: ExportComputation<LspCompilerFeat, D, Output = Bytes>,
@ -102,6 +107,7 @@ impl ProjectExport {
res.transpose() res.transpose()
} }
/// Exports the document to string artifact.
fn export_string< fn export_string<
D: typst::Document + Send + Sync + 'static, D: typst::Document + Send + Sync + 'static,
T: ExportComputation<LspCompilerFeat, D, Output = String>, T: ExportComputation<LspCompilerFeat, D, Output = String>,
@ -189,6 +195,7 @@ impl WorldComputable<LspCompilerFeat> for ProjectExport {
} }
} }
/// A task that exports the document to a specific format by typlite.
pub struct TypliteExport<const FORMAT: char>; pub struct TypliteExport<const FORMAT: char>;
const fn typlite_format(f: char) -> Format { const fn typlite_format(f: char) -> Format {
@ -226,5 +233,7 @@ impl<const F: char> WorldComputable<LspCompilerFeat> for TypliteExport<F> {
} }
} }
/// A task that exports the document to markdown.
pub type TypliteMdExport = TypliteExport<'m'>; pub type TypliteMdExport = TypliteExport<'m'>;
/// A task that exports the document to LaTeX.
pub type TypliteTeXExport = TypliteExport<'x'>; pub type TypliteTeXExport = TypliteExport<'x'>;

View file

@ -66,5 +66,5 @@ web = ["tinymist-project/web"]
# into the binary. # into the binary.
fonts = ["tinymist-project/fonts"] fonts = ["tinymist-project/fonts"]
# [lints] [lints]
# workspace = true workspace = true

View file

@ -1,5 +1,8 @@
//! # Typlite //! # Typlite
// todo: remove me
#![allow(missing_docs)]
pub mod attributes; pub mod attributes;
pub mod common; pub mod common;
mod error; mod error;

View file

@ -1,4 +1,6 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
// todo: remove me
#![allow(missing_docs)]
use std::{ use std::{
io::Write, io::Write,

View file

@ -39,3 +39,6 @@ default = ["no-content-hint"]
# Disable the default content hint. # Disable the default content hint.
# This requires modifying typst. # This requires modifying typst.
no-content-hint = ["reflexo-typst/no-content-hint"] no-content-hint = ["reflexo-typst/no-content-hint"]
[lints]
workspace = true

View file

@ -1,3 +1,6 @@
// todo: remove me
#![allow(missing_docs)]
mod actor; mod actor;
mod debug_loc; mod debug_loc;
mod outline; mod outline;