diff --git a/Cargo.toml b/Cargo.toml index c09c2b47..217a9616 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -211,6 +211,9 @@ debug = true inherits = "release" lto = "thin" +[workspace.lints.rustdoc] +broken_intra_doc_links = "warn" + [workspace.lints.rust] missing_docs = "warn" # missing_crate_level_docs = "warn" diff --git a/crates/tinymist-derive/src/lib.rs b/crates/tinymist-derive/src/lib.rs index af23b6db..91928ea7 100644 --- a/crates/tinymist-derive/src/lib.rs +++ b/crates/tinymist-derive/src/lib.rs @@ -4,20 +4,6 @@ use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput}; -#[proc_macro_attribute] -pub fn toml_model( - _metadata: proc_macro::TokenStream, - input: proc_macro::TokenStream, -) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - let output = quote! { - #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] - #[serde(rename_all = "kebab-case")] - #input - }; - output.into() -} - #[proc_macro_derive(BindTyCtx, attributes(bind))] pub fn bind_ty_ctx(input: TokenStream) -> TokenStream { // Parse the input tokens into a syntax tree diff --git a/crates/tinymist-project/src/args.rs b/crates/tinymist-project/src/args.rs index 95da7424..04f63792 100644 --- a/crates/tinymist-project/src/args.rs +++ b/crates/tinymist-project/src/args.rs @@ -1,171 +1,14 @@ -use crate::DocIdArgs; -use core::fmt; -use serde::{Deserialize, Serialize}; -use std::{num::NonZeroUsize, ops::RangeInclusive, path::Path, str::FromStr, sync::OnceLock}; +use std::{path::Path, sync::OnceLock}; + +use clap::ValueHint; use tinymist_std::{bail, error::prelude::Result}; + pub use tinymist_world::args::{CompileFontArgs, CompilePackageArgs}; pub use typst_preview::{PreviewArgs, PreviewMode}; -use clap::{ValueEnum, ValueHint}; - use crate::model::*; use crate::PROJECT_ROUTE_USER_ACTION_PRIORITY; -macro_rules! display_possible_values { - ($ty:ty) => { - impl fmt::Display for $ty { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.to_possible_value() - .expect("no values are skipped") - .get_name() - .fmt(f) - } - } - }; -} - -/// When to export an output file. -#[derive( - Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, ValueEnum, Serialize, Deserialize, -)] -#[serde(rename_all = "camelCase")] -#[clap(rename_all = "camelCase")] -pub enum TaskWhen { - /// Never watch to run task. - Never, - /// Run task on save. - OnSave, - /// Run task on type. - OnType, - /// *DEPRECATED* Run task when a document has a title and on saved, which is - /// useful to filter out template files. - /// - /// Note: this is deprecating. - OnDocumentHasTitle, -} - -impl TaskWhen { - /// Returns `true` if the task should never be run automatically. - pub fn is_never(&self) -> bool { - matches!(self, TaskWhen::Never) - } -} - -display_possible_values!(TaskWhen); - -/// Which format to use for the generated output file. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)] -pub enum OutputFormat { - /// Export to PDF. - Pdf, - /// Export to PNG. - Png, - /// Export to SVG. - Svg, - /// Export to HTML. - Html, -} - -display_possible_values!(OutputFormat); - -/// A PDF standard that Typst can enforce conformance with. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)] -#[allow(non_camel_case_types)] -pub enum PdfStandard { - /// PDF 1.7. - #[value(name = "1.7")] - #[serde(rename = "1.7")] - V_1_7, - /// PDF/A-2b. - #[value(name = "a-2b")] - #[serde(rename = "a-2b")] - A_2b, -} - -display_possible_values!(PdfStandard); - -/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the -/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a -/// value parser, in order to generate better errors. -/// -/// See also: -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Pages(pub RangeInclusive>); - -impl FromStr for Pages { - type Err = &'static str; - - fn from_str(value: &str) -> Result { - match value - .split('-') - .map(str::trim) - .collect::>() - .as_slice() - { - [] | [""] => Err("page export range must not be empty"), - [single_page] => { - let page_number = parse_page_number(single_page)?; - Ok(Pages(Some(page_number)..=Some(page_number))) - } - ["", ""] => Err("page export range must have start or end"), - [start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)), - ["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))), - [start, end] => { - let start = parse_page_number(start)?; - let end = parse_page_number(end)?; - if start > end { - Err("page export range must end at a page after the start") - } else { - Ok(Pages(Some(start)..=Some(end))) - } - } - [_, _, _, ..] => Err("page export range must have a single hyphen"), - } - } -} - -impl fmt::Display for Pages { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let start = match self.0.start() { - Some(start) => start.to_string(), - None => String::from(""), - }; - let end = match self.0.end() { - Some(end) => end.to_string(), - None => String::from(""), - }; - write!(f, "{start}-{end}") - } -} - -impl serde::Serialize for Pages { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> serde::Deserialize<'de> for Pages { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let value = String::deserialize(deserializer)?; - value.parse().map_err(serde::de::Error::custom) - } -} - -/// Parses a single page number. -fn parse_page_number(value: &str) -> Result { - if value == "0" { - Err("page numbers start at one") - } else { - NonZeroUsize::from_str(value).map_err(|_| "not a valid page number") - } -} - /// Project document commands. #[derive(Debug, Clone, clap::Subcommand)] #[clap(rename_all = "kebab-case")] @@ -205,6 +48,35 @@ pub struct DocNewArgs { pub package: CompilePackageArgs, } +/// The id of a document. +/// +/// If an identifier is not provided, the document's path is used as the id. +#[derive(Debug, Clone, clap::Parser)] +pub struct DocIdArgs { + /// Give a name to the document. + #[clap(long = "name")] + pub name: Option, + /// Path to input Typst file. + #[clap(value_hint = ValueHint::FilePath)] + pub input: String, +} + +impl From<&ResourcePath> for Id { + fn from(value: &ResourcePath) -> Self { + Id::new(value.to_string()) + } +} + +impl From<&DocIdArgs> for Id { + fn from(args: &DocIdArgs) -> Self { + if let Some(id) = &args.name { + Id::new(id.clone()) + } else { + (&ResourcePath::from_user_sys(Path::new(&args.input))).into() + } + } +} + /// Configure project's priorities. #[derive(Debug, Clone, clap::Parser)] pub struct DocConfigureArgs { @@ -269,11 +141,13 @@ pub struct TaskCompileArgs { #[arg(long = "ppi", default_value_t = 144.0)] pub ppi: f32, + /// The output format. #[clap(skip)] pub output_format: OnceLock>, } impl TaskCompileArgs { + /// Convert the arguments to a project task. pub fn to_task(self, doc_id: Id) -> Result { let new_task_id = self.task_name.map(Id::new); let task_id = new_task_id.unwrap_or(doc_id.clone()); diff --git a/crates/tinymist-project/src/compiler.rs b/crates/tinymist-project/src/compiler.rs index 2b3d0226..91e10c3b 100644 --- a/crates/tinymist-project/src/compiler.rs +++ b/crates/tinymist-project/src/compiler.rs @@ -1,39 +1,35 @@ -//! Project Model for tinymist -//! -//! The [`ProjectCompiler`] implementation borrowed from typst.ts. -//! -//! Please check `tinymist::actor::typ_client` for architecture details. - -#![allow(missing_docs)] +//! Project compiler for tinymist. use core::fmt; -use std::{ - collections::HashSet, - path::Path, - sync::{Arc, OnceLock}, -}; +use std::collections::HashSet; +use std::path::Path; +use std::sync::{Arc, OnceLock}; use ecow::{EcoString, EcoVec}; -use reflexo_typst::{ - features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE}, - CompileEnv, CompileReport, Compiler, TypstDocument, -}; +use reflexo_typst::features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE}; +use reflexo_typst::{CompileEnv, CompileReport, Compiler, TypstDocument}; use tinymist_std::error::prelude::Result; +use tinymist_world::vfs::notify::{ + FilesystemEvent, MemoryEvent, NotifyMessage, UpstreamUpdateEvent, +}; +use tinymist_world::vfs::{FileId, FsProvider, RevisingVfs}; +use tinymist_world::{ + CompilerFeat, CompilerUniverse, CompilerWorld, EntryReader, EntryState, TaskInputs, WorldDeps, +}; use tokio::sync::mpsc; use typst::diag::{SourceDiagnostic, SourceResult}; use crate::LspCompilerFeat; -use tinymist_world::{ - vfs::{ - notify::{FilesystemEvent, MemoryEvent, NotifyMessage, UpstreamUpdateEvent}, - FileId, FsProvider, RevisingVfs, - }, - CompilerFeat, CompilerUniverse, CompilerWorld, EntryReader, EntryState, TaskInputs, WorldDeps, -}; +/// LSP compile snapshot. +pub type LspCompileSnapshot = CompileSnapshot; +/// LSP compiled artifact. +pub type LspCompiledArtifact = CompiledArtifact; /// LSP interrupt. pub type LspInterrupt = Interrupt; +/// Project instance id. This is slightly different from the project ids that +/// persist in disk. #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] pub struct ProjectInsId(EcoString); @@ -51,6 +47,7 @@ pub struct ExportSignal { pub by_entry_update: bool, } +/// A snapshot of the project and compilation state. pub struct CompileSnapshot { /// The project id. pub id: ProjectInsId, @@ -65,6 +62,11 @@ pub struct CompileSnapshot { } impl CompileSnapshot { + /// Forks a new snapshot that compiles a different document. + /// + /// Note: the resulting document should not be shared in system, because we + /// generally believe that the document is revisioned, but temporary + /// tasks break this assumption. pub fn task(mut self, inputs: TaskInputs) -> Self { 'check_changed: { if let Some(entry) = &inputs.entry { @@ -86,6 +88,7 @@ impl CompileSnapshot { self } + /// Runs the compiler and returns the compiled document. pub fn compile(self) -> CompiledArtifact { let mut snap = self; snap.world.set_is_compiling(true); @@ -116,6 +119,7 @@ impl Clone for CompileSnapshot { } } +/// A compiled artifact. pub struct CompiledArtifact { /// The used snapshot. pub snap: CompileSnapshot, @@ -147,6 +151,7 @@ impl Clone for CompiledArtifact { } impl CompiledArtifact { + /// Returns the last successfully compiled document. pub fn success_doc(&self) -> Option> { self.doc .as_ref() @@ -155,6 +160,7 @@ impl CompiledArtifact { .or_else(|| self.snap.success_doc.clone()) } + /// Returns the depended files. pub fn depended_files(&self) -> &EcoVec { self.deps.get_or_init(|| { let mut deps = EcoVec::default(); @@ -167,10 +173,15 @@ impl CompiledArtifact { } } +/// A project compiler handler. pub trait CompileHandler: Send + Sync + 'static { + /// Called when there is any reason to compile. This doesn't mean that the + /// project should be compiled. fn on_any_compile_reason(&self, state: &mut ProjectCompiler); // todo: notify project specific compile + /// Called when a compilation is done. fn notify_compile(&self, res: &CompiledArtifact, rep: CompileReport); + /// Called when the compilation status is changed. fn status(&self, revision: usize, id: &ProjectInsId, rep: CompileReport); } @@ -185,6 +196,7 @@ impl CompileHandler { /// Compile anyway. Compile(ProjectInsId), @@ -218,6 +230,7 @@ impl fmt::Debug for Interrupt { } } +/// An accumulated compile reason stored in the project state. #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub struct CompileReasons { /// The snapshot is taken by the memory editing events. @@ -241,6 +254,7 @@ impl CompileReasons { self.by_memory_events || self.by_fs_events || self.by_entry_update } + /// Exclude some reasons. pub fn exclude(&self, excluded: Self) -> Self { Self { by_memory_events: self.by_memory_events && !excluded.by_memory_events, @@ -283,9 +297,13 @@ struct TaggedMemoryEvent { event: MemoryEvent, } +/// The compiler server options. pub struct CompileServerOpts { + /// The compilation handler. pub handler: Arc>, + /// The feature set. pub feature_set: FeatureSet, + /// Whether to enable file system watching. pub enable_watch: bool, } @@ -322,7 +340,7 @@ pub struct ProjectCompiler { } impl ProjectCompiler { - /// Create a compiler with options + /// Creates a compiler with options pub fn new( verse: CompilerUniverse, dep_tx: mpsc::UnboundedSender, @@ -354,6 +372,22 @@ impl ProjectCom } } + /// Creates a snapshot of the primary project. + pub fn snapshot(&mut self) -> CompileSnapshot { + self.primary.snapshot() + } + + /// Compiles the document once. + pub fn compile_once(&mut self) -> CompiledArtifact { + let snap = self.primary.make_snapshot(true); + ProjectState::run_compile(self.handler.clone(), snap)() + } + + /// Gets the iterator of all projects. + pub fn projects(&mut self) -> impl Iterator> { + std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut()) + } + fn create_project( id: ProjectInsId, verse: CompilerUniverse, @@ -382,65 +416,8 @@ impl ProjectCom } } - pub fn process(&mut self, intr: Interrupt) { - // todo: evcit cache - self.process_inner(intr); - // Customized Project Compilation Handler - self.handler.clone().on_any_compile_reason(self); - } - - pub fn snapshot(&mut self) -> CompileSnapshot { - self.primary.snapshot() - } - - /// Compile the document once. - pub fn compile_once(&mut self) -> CompiledArtifact { - let snap = self.primary.make_snapshot(true); - ProjectState::run_compile(self.handler.clone(), snap)() - } - - /// Apply delayed memory changes to underlying compiler. - fn apply_delayed_memory_changes( - verse: &mut RevisingVfs<'_, F::AccessModel>, - dirty_shadow_logical_tick: &mut usize, - event: &Option, - ) -> Option<()> { - // Handle delayed upstream update event before applying file system changes - if let Some(event) = event { - let TaggedMemoryEvent { - logical_tick, - event, - } = event.opaque.as_ref().downcast_ref()?; - - // Recovery from dirty shadow state. - if logical_tick == dirty_shadow_logical_tick { - *dirty_shadow_logical_tick = 0; - } - - Self::apply_memory_changes(verse, event.clone()); - } - - Some(()) - } - - /// Apply memory changes to underlying compiler. - fn apply_memory_changes(vfs: &mut RevisingVfs<'_, F::AccessModel>, event: MemoryEvent) { - if matches!(event, MemoryEvent::Sync(..)) { - vfs.reset_shadow(); - } - match event { - MemoryEvent::Update(event) | MemoryEvent::Sync(event) => { - for path in event.removes { - let _ = vfs.unmap_shadow(&path); - } - for (path, snap) in event.inserts { - let _ = vfs.map_shadow(&path, snap); - } - } - } - } - - fn find_project<'a>( + /// Find a project by id, but with less borrow checker restriction. + pub fn find_project<'a>( primary: &'a mut ProjectState, dedicates: &'a mut [ProjectState], id: &ProjectInsId, @@ -452,8 +429,48 @@ impl ProjectCom dedicates.iter_mut().find(|e| e.id == *id).unwrap() } - pub fn projects(&mut self) -> impl Iterator> { - std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut()) + /// Restart a dedicate project. + pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> Result { + let id = ProjectInsId(group.into()); + + let verse = CompilerUniverse::::new_raw( + entry, + Some(self.primary.verse.inputs().clone()), + self.primary.verse.vfs().fork(), + self.primary.verse.registry.clone(), + self.primary.verse.font_resolver.clone(), + ); + + let proj = Self::create_project( + id.clone(), + verse, + self.handler.clone(), + self.dep_tx.clone(), + self.primary.once_feature_set.as_ref().to_owned(), + ); + + self.remove_dedicates(&id); + self.dedicates.push(proj); + + Ok(id) + } + + fn remove_dedicates(&mut self, id: &ProjectInsId) { + let proj = self.dedicates.iter().position(|e| e.id == *id); + if let Some(idx) = proj { + let _proj = self.dedicates.remove(idx); + // todo: kill compilations + } else { + log::warn!("ProjectCompiler: settle project not found {id:?}"); + } + } + + /// Process an interrupt. + pub fn process(&mut self, intr: Interrupt) { + // todo: evcit cache + self.process_inner(intr); + // Customized Project Compilation Handler + self.handler.clone().on_any_compile_reason(self); } fn process_inner(&mut self, intr: Interrupt) { @@ -606,43 +623,51 @@ impl ProjectCom } } - pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> Result { - let id = ProjectInsId(group.into()); + /// Apply delayed memory changes to underlying compiler. + fn apply_delayed_memory_changes( + verse: &mut RevisingVfs<'_, F::AccessModel>, + dirty_shadow_logical_tick: &mut usize, + event: &Option, + ) -> Option<()> { + // Handle delayed upstream update event before applying file system changes + if let Some(event) = event { + let TaggedMemoryEvent { + logical_tick, + event, + } = event.opaque.as_ref().downcast_ref()?; - let verse = CompilerUniverse::::new_raw( - entry, - Some(self.primary.verse.inputs().clone()), - self.primary.verse.vfs().fork(), - self.primary.verse.registry.clone(), - self.primary.verse.font_resolver.clone(), - ); + // Recovery from dirty shadow state. + if logical_tick == dirty_shadow_logical_tick { + *dirty_shadow_logical_tick = 0; + } - let proj = Self::create_project( - id.clone(), - verse, - self.handler.clone(), - self.dep_tx.clone(), - self.primary.once_feature_set.as_ref().to_owned(), - ); + Self::apply_memory_changes(verse, event.clone()); + } - self.remove_dedicates(&id); - self.dedicates.push(proj); - - Ok(id) + Some(()) } - fn remove_dedicates(&mut self, id: &ProjectInsId) { - let proj = self.dedicates.iter().position(|e| e.id == *id); - if let Some(idx) = proj { - let _proj = self.dedicates.remove(idx); - // todo: kill compilations - } else { - log::warn!("ProjectCompiler: settle project not found {id:?}"); + /// Apply memory changes to underlying compiler. + fn apply_memory_changes(vfs: &mut RevisingVfs<'_, F::AccessModel>, event: MemoryEvent) { + if matches!(event, MemoryEvent::Sync(..)) { + vfs.reset_shadow(); + } + match event { + MemoryEvent::Update(event) | MemoryEvent::Sync(event) => { + for path in event.removes { + let _ = vfs.unmap_shadow(&path); + } + for (path, snap) in event.inserts { + let _ = vfs.map_shadow(&path, snap); + } + } } } } +/// A project state. pub struct ProjectState { + /// The project instance id. pub id: ProjectInsId, /// The extension pub ext: Ext, @@ -672,10 +697,12 @@ pub struct ProjectState { } impl ProjectState { + /// Creates a new compile environment. pub fn make_env(&self, feature_set: Arc) -> CompileEnv { CompileEnv::default().configure_shared(feature_set) } + /// Creates a snapshot of the project. pub fn snapshot(&mut self) -> CompileSnapshot { match self.snapshot.as_ref() { Some(snap) if snap.world.revision() == self.verse.revision => snap.clone(), @@ -707,52 +734,8 @@ impl ProjectState { } } - fn process_compile(&mut self, artifact: CompiledArtifact) { - let world = &artifact.snap.world; - let compiled_revision = world.revision().get(); - if self.committed_revision >= compiled_revision { - return; - } - - // Update state. - let doc = artifact.doc.ok(); - self.committed_revision = compiled_revision; - self.latest_doc.clone_from(&doc); - if doc.is_some() { - self.latest_success_doc.clone_from(&self.latest_doc); - } - - // Notify the new file dependencies. - let mut deps = vec![]; - world.iter_dependencies(&mut |dep| { - if let Ok(x) = world.file_path(dep).and_then(|e| e.to_err()) { - deps.push(x.into()) - } - }); - let event = NotifyMessage::SyncDependency(deps); - let err = self.dep_tx.send(event); - log_send_error("dep_tx", err); - - let mut world = artifact.snap.world; - - let is_primary = self.id == ProjectInsId("primary".into()); - - // Trigger an evict task. - rayon::spawn(move || { - let evict_start = std::time::Instant::now(); - if is_primary { - comemo::evict(10); - - // Since all the projects share the same cache, we need to evict the cache - // on the primary instance for all the projects. - world.evict_source_cache(30); - } - world.evict_vfs(60); - let elapsed = evict_start.elapsed(); - log::info!("ProjectCompiler: evict cache in {elapsed:?}"); - }); - } - + /// Compile the document once if there is any reason and the entry is + /// active. #[must_use] pub fn may_compile( &mut self, @@ -801,6 +784,52 @@ impl ProjectState { compiled } } + + fn process_compile(&mut self, artifact: CompiledArtifact) { + let world = &artifact.snap.world; + let compiled_revision = world.revision().get(); + if self.committed_revision >= compiled_revision { + return; + } + + // Update state. + let doc = artifact.doc.ok(); + self.committed_revision = compiled_revision; + self.latest_doc.clone_from(&doc); + if doc.is_some() { + self.latest_success_doc.clone_from(&self.latest_doc); + } + + // Notify the new file dependencies. + let mut deps = vec![]; + world.iter_dependencies(&mut |dep| { + if let Ok(x) = world.file_path(dep).and_then(|e| e.to_err()) { + deps.push(x.into()) + } + }); + let event = NotifyMessage::SyncDependency(deps); + let err = self.dep_tx.send(event); + log_send_error("dep_tx", err); + + let mut world = artifact.snap.world; + + let is_primary = self.id == ProjectInsId("primary".into()); + + // Trigger an evict task. + rayon::spawn(move || { + let evict_start = std::time::Instant::now(); + if is_primary { + comemo::evict(10); + + // Since all the projects share the same cache, we need to evict the cache + // on the primary instance for all the projects. + world.evict_source_cache(30); + } + world.evict_vfs(60); + let elapsed = evict_start.elapsed(); + log::info!("ProjectCompiler: evict cache in {elapsed:?}"); + }); + } } fn log_compile_report(env: &CompileEnv, rep: &CompileReport) { diff --git a/crates/tinymist-project/src/entry.rs b/crates/tinymist-project/src/entry.rs index 72730463..47201906 100644 --- a/crates/tinymist-project/src/entry.rs +++ b/crates/tinymist-project/src/entry.rs @@ -108,6 +108,7 @@ impl EntryResolver { }) } + /// Resolves the directory to store the lock file. pub fn resolve_lock(&self, entry: &EntryState) -> Option { match self.project_resolution { ProjectResolutionKind::LockDatabase if entry.is_in_package() => { @@ -124,7 +125,7 @@ impl EntryResolver { } } - /// Determines the default entry path. + /// Resolves the default entry path. pub fn resolve_default(&self) -> Option { let entry = self.entry.as_ref(); // todo: pre-compute this when updating config diff --git a/crates/tinymist-project/src/font.rs b/crates/tinymist-project/src/font.rs index 2129b95c..053231f7 100644 --- a/crates/tinymist-project/src/font.rs +++ b/crates/tinymist-project/src/font.rs @@ -1,20 +1,18 @@ //! Font resolver implementation. -use core::fmt; -use std::{ - collections::HashMap, - path::PathBuf, - sync::{Arc, Mutex}, -}; +pub use crate::world::base::font::*; +use core::fmt; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::{Arc, Mutex}; + +use tinymist_std::debug_loc::DataSource; use tinymist_world::font::system::SystemFontSearcher; use typst::text::{Font, FontBook, FontInfo}; use typst::utils::LazyHash; use crate::world::vfs::Bytes; -use tinymist_std::debug_loc::DataSource; - -pub use crate::world::base::font::*; #[derive(Debug)] /// The default FontResolver implementation. diff --git a/crates/tinymist-project/src/lib.rs b/crates/tinymist-project/src/lib.rs index 0095ba03..633dba95 100644 --- a/crates/tinymist-project/src/lib.rs +++ b/crates/tinymist-project/src/lib.rs @@ -1,7 +1,5 @@ //! Project Model for tinymist -#![allow(missing_docs)] - mod args; mod compiler; mod entry; diff --git a/crates/tinymist-project/src/lock.rs b/crates/tinymist-project/src/lock.rs index 935b06d5..d746634c 100644 --- a/crates/tinymist-project/src/lock.rs +++ b/crates/tinymist-project/src/lock.rs @@ -1,17 +1,223 @@ +#![allow(missing_docs)] + +use std::cmp::Ordering; +use std::io::{Read, Seek, SeekFrom, Write}; use std::{path::Path, sync::Arc}; -use ecow::EcoVec; -use reflexo_typst::ImmutPath; +use ecow::{eco_vec, EcoVec}; +use tinymist_std::error::prelude::*; use tinymist_std::path::unix_slash; +use tinymist_std::{bail, ImmutPath}; use typst::diag::EcoString; use typst::World; use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath}; -use crate::{LspWorld, ProjectPathMaterial}; +use crate::{LockFile, LockFileCompat, LspWorld, ProjectPathMaterial, LOCK_VERSION}; + +pub const LOCK_FILENAME: &str = "tinymist.lock"; + +pub const PROJECT_ROUTE_USER_ACTION_PRIORITY: u32 = 256; + +impl LockFile { + pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> { + self.document.iter().find(|i| &i.id == id) + } + + pub fn replace_document(&mut self, input: ProjectInput) { + let id = input.id.clone(); + let index = self.document.iter().position(|i| i.id == id); + if let Some(index) = index { + self.document[index] = input; + } else { + self.document.push(input); + } + } + + pub fn replace_task(&mut self, task: ProjectTask) { + let id = task.id().clone(); + let index = self.task.iter().position(|i| *i.id() == id); + if let Some(index) = index { + self.task[index] = task; + } else { + self.task.push(task); + } + } + + pub fn replace_route(&mut self, route: ProjectRoute) { + let id = route.id.clone(); + + self.route.retain(|i| i.id != id); + self.route.push(route); + } + + pub fn sort(&mut self) { + self.document.sort_by(|a, b| a.id.cmp(&b.id)); + self.task + .sort_by(|a, b| a.doc_id().cmp(b.doc_id()).then_with(|| a.id().cmp(b.id()))); + // the route's order is important, so we don't sort them. + } + + pub fn serialize_resolve(&self) -> String { + let content = toml::Table::try_from(self).unwrap(); + + let mut out = String::new(); + + // At the start of the file we notify the reader that the file is generated. + // Specifically Phabricator ignores files containing "@generated", so we use + // that. + let marker_line = "# This file is automatically @generated by tinymist."; + let extra_line = "# It is not intended for manual editing."; + + out.push_str(marker_line); + out.push('\n'); + out.push_str(extra_line); + out.push('\n'); + + out.push_str(&format!("version = {LOCK_VERSION:?}\n")); + + let document = content.get("document"); + if let Some(document) = document { + for document in document.as_array().unwrap() { + out.push('\n'); + out.push_str("[[document]]\n"); + emit_document(document, &mut out); + } + } + + let route = content.get("route"); + if let Some(route) = route { + for route in route.as_array().unwrap() { + out.push('\n'); + out.push_str("[[route]]\n"); + emit_route(route, &mut out); + } + } + + let task = content.get("task"); + if let Some(task) = task { + for task in task.as_array().unwrap() { + out.push('\n'); + out.push_str("[[task]]\n"); + emit_output(task, &mut out); + } + } + + return out; + + fn emit_document(input: &toml::Value, out: &mut String) { + let table = input.as_table().unwrap(); + out.push_str(&table.to_string()); + } + + fn emit_output(output: &toml::Value, out: &mut String) { + let mut table = output.clone(); + let table = table.as_table_mut().unwrap(); + // replace transform with task.transforms + if let Some(transform) = table.remove("transform") { + let mut task_table = toml::Table::new(); + task_table.insert("transform".to_string(), transform); + + table.insert("task".to_string(), task_table.into()); + } + + out.push_str(&table.to_string()); + } + + fn emit_route(route: &toml::Value, out: &mut String) { + let table = route.as_table().unwrap(); + out.push_str(&table.to_string()); + } + } + + pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> { + let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned()); + + let mut lock_file = fs + .open_rw_exclusive_create(LOCK_FILENAME, "project commands") + .context("tinymist.lock")?; + + let mut data = vec![]; + lock_file.read_to_end(&mut data).context("read lock")?; + + let old_data = + std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; + + let mut state = if old_data.trim().is_empty() { + LockFile { + document: vec![], + task: vec![], + route: eco_vec![], + } + } else { + let old_state = toml::from_str::(old_data) + .context_ut("tinymist.lock file is not a valid TOML file")?; + + let version = old_state.version()?; + match Version(version).partial_cmp(&Version(LOCK_VERSION)) { + Some(Ordering::Equal | Ordering::Less) => {} + Some(Ordering::Greater) => { + bail!( + "trying to update lock file having a future version, current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}", + ); + } + None => { + bail!( + "cannot compare version, are version strings in right format? current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}", + ); + } + } + + old_state.migrate()? + }; + + f(&mut state)?; + + // todo: for read only operations, we don't have to compare it. + state.sort(); + let new_data = state.serialize_resolve(); + + // If the lock file contents haven't changed so don't rewrite it. This is + // helpful on read-only filesystems. + if old_data == new_data { + return Ok(()); + } + + // todo: even if cargo, they don't update the lock file atomically. This + // indicates that we may get data corruption if the process is killed + // while writing the lock file. This is sensible because `Cargo.lock` is + // only a "resolved result" of the `Cargo.toml`. Thus, we should inform + // users that don't only persist configuration in the lock file. + lock_file.file().set_len(0).context(LOCK_FILENAME)?; + lock_file.seek(SeekFrom::Start(0)).context(LOCK_FILENAME)?; + lock_file + .write_all(new_data.as_bytes()) + .context(LOCK_FILENAME)?; + + Ok(()) + } + + pub fn read(dir: &Path) -> Result { + let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned()); + + let mut lock_file = fs + .open_ro_shared(LOCK_FILENAME, "project commands") + .context(LOCK_FILENAME)?; + + let mut data = vec![]; + lock_file.read_to_end(&mut data).context(LOCK_FILENAME)?; + + let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; + + let state = toml::from_str::(data) + .context_ut("tinymist.lock file is not a valid TOML file")?; + + state.migrate() + } +} /// Make a new project lock updater. -pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater { - ProjectLockUpdater { +pub fn update_lock(root: ImmutPath) -> LockFileUpdate { + LockFileUpdate { root, updates: vec![], } @@ -24,12 +230,12 @@ enum LockUpdate { Route(ProjectRoute), } -pub struct ProjectLockUpdater { +pub struct LockFileUpdate { root: Arc, updates: Vec, } -impl ProjectLockUpdater { +impl LockFileUpdate { pub fn compiled(&mut self, world: &LspWorld) -> Option { let id = Id::from_world(world)?; @@ -143,3 +349,22 @@ impl ProjectLockUpdater { } } } + +struct Version<'a>(&'a str); + +impl PartialEq for Version<'_> { + fn eq(&self, other: &Self) -> bool { + semver::Version::parse(self.0) + .ok() + .and_then(|a| semver::Version::parse(other.0).ok().map(|b| a == b)) + .unwrap_or(false) + } +} + +impl PartialOrd for Version<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + let lhs = semver::Version::parse(self.0).ok()?; + let rhs = semver::Version::parse(other.0).ok()?; + Some(lhs.cmp(&rhs)) + } +} diff --git a/crates/tinymist-project/src/model.rs b/crates/tinymist-project/src/model.rs index dc8868c4..3231de5b 100644 --- a/crates/tinymist-project/src/model.rs +++ b/crates/tinymist-project/src/model.rs @@ -1,11 +1,12 @@ use core::fmt; use std::hash::{Hash, Hasher}; -use std::io::{Read, Seek, SeekFrom, Write}; +use std::num::NonZeroUsize; +use std::ops::RangeInclusive; use std::path::PathBuf; -use std::{cmp::Ordering, path::Path, str::FromStr}; +use std::{path::Path, str::FromStr}; -use clap::ValueHint; -use ecow::{eco_vec, EcoVec}; +use clap::ValueEnum; +use ecow::EcoVec; use serde::{Deserialize, Serialize}; use tinymist_std::error::prelude::*; use tinymist_std::path::unix_slash; @@ -19,258 +20,8 @@ pub use task::*; use crate::LspWorld; -use super::{Pages, PdfStandard, TaskWhen}; - -pub const LOCK_FILENAME: &str = "tinymist.lock"; - -const LOCK_VERSION: &str = "0.1.0-beta0"; - -pub const PROJECT_ROUTE_USER_ACTION_PRIORITY: u32 = 256; - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case", tag = "version")] -pub enum LockFileCompat { - #[serde(rename = "0.1.0-beta0")] - Version010Beta0(LockFile), - #[serde(untagged)] - Other(serde_json::Value), -} - -impl LockFileCompat { - pub fn version(&self) -> Result<&str> { - match self { - LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION), - LockFileCompat::Other(v) => v - .get("version") - .and_then(|v| v.as_str()) - .context("missing version field"), - } - } - - pub fn migrate(self) -> Result { - match self { - LockFileCompat::Version010Beta0(v) => Ok(v), - this @ LockFileCompat::Other(..) => { - bail!( - "cannot migrate from version: {}", - this.version().unwrap_or("unknown version") - ) - } - } - } -} - -#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] -pub struct LockFile { - // The lock file version. - // version: String, - /// The project's document (input). - #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub document: Vec, - /// The project's task (output). - #[serde(skip_serializing_if = "Vec::is_empty", default)] - pub task: Vec, - /// The project's task route. - #[serde(skip_serializing_if = "EcoVec::is_empty", default)] - pub route: EcoVec, -} - -impl LockFile { - pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> { - self.document.iter().find(|i| &i.id == id) - } - - pub fn replace_document(&mut self, input: ProjectInput) { - let id = input.id.clone(); - let index = self.document.iter().position(|i| i.id == id); - if let Some(index) = index { - self.document[index] = input; - } else { - self.document.push(input); - } - } - - pub fn replace_task(&mut self, task: ProjectTask) { - let id = task.id().clone(); - let index = self.task.iter().position(|i| *i.id() == id); - if let Some(index) = index { - self.task[index] = task; - } else { - self.task.push(task); - } - } - - pub fn replace_route(&mut self, route: ProjectRoute) { - let id = route.id.clone(); - - self.route.retain(|i| i.id != id); - self.route.push(route); - } - - pub fn sort(&mut self) { - self.document.sort_by(|a, b| a.id.cmp(&b.id)); - self.task - .sort_by(|a, b| a.doc_id().cmp(b.doc_id()).then_with(|| a.id().cmp(b.id()))); - // the route's order is important, so we don't sort them. - } - - pub fn serialize_resolve(&self) -> String { - let content = toml::Table::try_from(self).unwrap(); - - let mut out = String::new(); - - // At the start of the file we notify the reader that the file is generated. - // Specifically Phabricator ignores files containing "@generated", so we use - // that. - let marker_line = "# This file is automatically @generated by tinymist."; - let extra_line = "# It is not intended for manual editing."; - - out.push_str(marker_line); - out.push('\n'); - out.push_str(extra_line); - out.push('\n'); - - out.push_str(&format!("version = {LOCK_VERSION:?}\n")); - - let document = content.get("document"); - if let Some(document) = document { - for document in document.as_array().unwrap() { - out.push('\n'); - out.push_str("[[document]]\n"); - emit_document(document, &mut out); - } - } - - let route = content.get("route"); - if let Some(route) = route { - for route in route.as_array().unwrap() { - out.push('\n'); - out.push_str("[[route]]\n"); - emit_route(route, &mut out); - } - } - - let task = content.get("task"); - if let Some(task) = task { - for task in task.as_array().unwrap() { - out.push('\n'); - out.push_str("[[task]]\n"); - emit_output(task, &mut out); - } - } - - return out; - - fn emit_document(input: &toml::Value, out: &mut String) { - let table = input.as_table().unwrap(); - out.push_str(&table.to_string()); - } - - fn emit_output(output: &toml::Value, out: &mut String) { - let mut table = output.clone(); - let table = table.as_table_mut().unwrap(); - // replace transform with task.transforms - if let Some(transform) = table.remove("transform") { - let mut task_table = toml::Table::new(); - task_table.insert("transform".to_string(), transform); - - table.insert("task".to_string(), task_table.into()); - } - - out.push_str(&table.to_string()); - } - - fn emit_route(route: &toml::Value, out: &mut String) { - let table = route.as_table().unwrap(); - out.push_str(&table.to_string()); - } - } - - pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> { - let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned()); - - let mut lock_file = fs - .open_rw_exclusive_create(LOCK_FILENAME, "project commands") - .context("tinymist.lock")?; - - let mut data = vec![]; - lock_file.read_to_end(&mut data).context("read lock")?; - - let old_data = - std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; - - let mut state = if old_data.trim().is_empty() { - LockFile { - document: vec![], - task: vec![], - route: eco_vec![], - } - } else { - let old_state = toml::from_str::(old_data) - .context_ut("tinymist.lock file is not a valid TOML file")?; - - let version = old_state.version()?; - match Version(version).partial_cmp(&Version(LOCK_VERSION)) { - Some(Ordering::Equal | Ordering::Less) => {} - Some(Ordering::Greater) => { - bail!( - "trying to update lock file having a future version, current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}", - ); - } - None => { - bail!( - "cannot compare version, are version strings in right format? current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}", - ); - } - } - - old_state.migrate()? - }; - - f(&mut state)?; - - // todo: for read only operations, we don't have to compare it. - state.sort(); - let new_data = state.serialize_resolve(); - - // If the lock file contents haven't changed so don't rewrite it. This is - // helpful on read-only filesystems. - if old_data == new_data { - return Ok(()); - } - - // todo: even if cargo, they don't update the lock file atomically. This - // indicates that we may get data corruption if the process is killed - // while writing the lock file. This is sensible because `Cargo.lock` is - // only a "resolved result" of the `Cargo.toml`. Thus, we should inform - // users that don't only persist configuration in the lock file. - lock_file.file().set_len(0).context(LOCK_FILENAME)?; - lock_file.seek(SeekFrom::Start(0)).context(LOCK_FILENAME)?; - lock_file - .write_all(new_data.as_bytes()) - .context(LOCK_FILENAME)?; - - Ok(()) - } - - pub fn read(dir: &Path) -> Result { - let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned()); - - let mut lock_file = fs - .open_ro_shared(LOCK_FILENAME, "project commands") - .context(LOCK_FILENAME)?; - - let mut data = vec![]; - lock_file.read_to_end(&mut data).context(LOCK_FILENAME)?; - - let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; - - let state = toml::from_str::(data) - .context_ut("tinymist.lock file is not a valid TOML file")?; - - state.migrate() - } -} +/// The currently using lock file version. +pub const LOCK_VERSION: &str = "0.1.0-beta0"; /// A scalar that is not NaN. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] @@ -320,10 +71,12 @@ impl Ord for Scalar { pub struct Id(String); impl Id { + /// Creates a new project Id. pub fn new(s: String) -> Self { Id(s) } + /// Creates a new project Id from a world. pub fn from_world(world: &LspWorld) -> Option { let entry = world.entry_state(); let id = unix_slash(entry.main()?.vpath().as_rootless_path()); @@ -339,35 +92,173 @@ impl fmt::Display for Id { } } -/// The id of a document. -/// -/// If an identifier is not provided, the document's path is used as the id. -#[derive(Debug, Clone, clap::Parser)] -pub struct DocIdArgs { - /// Give a name to the document. - #[clap(long = "name")] - pub name: Option, - /// Path to input Typst file. - #[clap(value_hint = ValueHint::FilePath)] - pub input: String, +macro_rules! display_possible_values { + ($ty:ty) => { + impl fmt::Display for $ty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.to_possible_value() + .expect("no values are skipped") + .get_name() + .fmt(f) + } + } + }; } -impl From<&ResourcePath> for Id { - fn from(value: &ResourcePath) -> Self { - Id(value.to_string()) +/// When to export an output file. +/// +/// By default, a `tinymist compile` only provides input information and +/// doesn't change the `when` field. However, you can still specify a `when` +/// argument to override the default behavior for specific tasks. +/// +/// ## Examples +/// +/// ```bash +/// tinymist compile --when onSave main.typ +/// alias typst="tinymist compile --when=onSave" +/// typst compile main.typ +/// ``` +#[derive( + Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, ValueEnum, Serialize, Deserialize, +)] +#[serde(rename_all = "camelCase")] +#[clap(rename_all = "camelCase")] +pub enum TaskWhen { + /// Never watch to run task. + Never, + /// Run task on save. + OnSave, + /// Run task on type. + OnType, + /// *DEPRECATED* Run task when a document has a title and on saved, which is + /// useful to filter out template files. + /// + /// Note: this is deprecating. + OnDocumentHasTitle, +} + +impl TaskWhen { + /// Returns `true` if the task should never be run automatically. + pub fn is_never(&self) -> bool { + matches!(self, TaskWhen::Never) } } -impl From<&DocIdArgs> for Id { - fn from(args: &DocIdArgs) -> Self { - if let Some(id) = &args.name { - Id(id.clone()) - } else { - (&ResourcePath::from_user_sys(Path::new(&args.input))).into() +display_possible_values!(TaskWhen); + +/// Which format to use for the generated output file. +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)] +pub enum OutputFormat { + /// Export to PDF. + Pdf, + /// Export to PNG. + Png, + /// Export to SVG. + Svg, + /// Export to HTML. + Html, +} + +display_possible_values!(OutputFormat); + +/// A PDF standard that Typst can enforce conformance with. +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)] +#[allow(non_camel_case_types)] +pub enum PdfStandard { + /// PDF 1.7. + #[value(name = "1.7")] + #[serde(rename = "1.7")] + V_1_7, + /// PDF/A-2b. + #[value(name = "a-2b")] + #[serde(rename = "a-2b")] + A_2b, +} + +display_possible_values!(PdfStandard); + +/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the +/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a +/// value parser, in order to generate better errors. +/// +/// See also: +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Pages(pub RangeInclusive>); + +impl FromStr for Pages { + type Err = &'static str; + + fn from_str(value: &str) -> Result { + match value + .split('-') + .map(str::trim) + .collect::>() + .as_slice() + { + [] | [""] => Err("page export range must not be empty"), + [single_page] => { + let page_number = parse_page_number(single_page)?; + Ok(Pages(Some(page_number)..=Some(page_number))) + } + ["", ""] => Err("page export range must have start or end"), + [start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)), + ["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))), + [start, end] => { + let start = parse_page_number(start)?; + let end = parse_page_number(end)?; + if start > end { + Err("page export range must end at a page after the start") + } else { + Ok(Pages(Some(start)..=Some(end))) + } + } + [_, _, _, ..] => Err("page export range must have a single hyphen"), } } } +impl fmt::Display for Pages { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let start = match self.0.start() { + Some(start) => start.to_string(), + None => String::from(""), + }; + let end = match self.0.end() { + Some(end) => end.to_string(), + None => String::from(""), + }; + write!(f, "{start}-{end}") + } +} + +impl serde::Serialize for Pages { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'de> serde::Deserialize<'de> for Pages { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = String::deserialize(deserializer)?; + value.parse().map_err(serde::de::Error::custom) + } +} + +/// Parses a single page number. +fn parse_page_number(value: &str) -> Result { + if value == "0" { + Err("page numbers start at one") + } else { + NonZeroUsize::from_str(value).map_err(|_| "not a valid page number") + } +} + /// A resource path. #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct ResourcePath(EcoString, String); @@ -413,6 +304,7 @@ impl<'de> serde::Deserialize<'de> for ResourcePath { } impl ResourcePath { + /// Creates a new resource path from a user passing system path. pub fn from_user_sys(inp: &Path) -> Self { let rel = if inp.is_relative() { inp.to_path_buf() @@ -423,7 +315,7 @@ impl ResourcePath { let rel = unix_slash(&rel); ResourcePath("file".into(), rel.to_string()) } - + /// Creates a new resource path from a file id. pub fn from_file_id(id: FileId) -> Self { let package = id.package(); match package { @@ -437,7 +329,7 @@ impl ResourcePath { ), } } - + /// Converts the resource path to an absolute file system path. pub fn to_abs_path(&self, rel: &Path) -> Option { if self.0 == "file" { let path = Path::new(&self.1); @@ -452,6 +344,60 @@ impl ResourcePath { } } +/// A lock file compatibility wrapper. +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case", tag = "version")] +pub enum LockFileCompat { + /// The lock file schema with version 0.1.0-beta0. + #[serde(rename = "0.1.0-beta0")] + Version010Beta0(LockFile), + /// Other lock file schema. + #[serde(untagged)] + Other(serde_json::Value), +} + +impl LockFileCompat { + /// Returns the lock file version. + pub fn version(&self) -> Result<&str> { + match self { + LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION), + LockFileCompat::Other(v) => v + .get("version") + .and_then(|v| v.as_str()) + .context("missing version field"), + } + } + + /// Migrates the lock file to the current version. + pub fn migrate(self) -> Result { + match self { + LockFileCompat::Version010Beta0(v) => Ok(v), + this @ LockFileCompat::Other(..) => { + bail!( + "cannot migrate from version: {}", + this.version().unwrap_or("unknown version") + ) + } + } + } +} + +/// A lock file storing project information. +#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] +pub struct LockFile { + // The lock file version. + // version: String, + /// The project's document (input). + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub document: Vec, + /// The project's task (output). + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub task: Vec, + /// The project's task route. + #[serde(skip_serializing_if = "EcoVec::is_empty", default)] + pub route: EcoVec, +} + /// A project input specifier. #[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] @@ -503,6 +449,8 @@ pub struct ProjectPathMaterial { } impl ProjectPathMaterial { + /// Creates a new project path material from a document ID and a list of + /// files. pub fn from_deps(doc_id: Id, files: EcoVec) -> Self { let mut files: Vec<_> = files.into_iter().map(|p| p.as_ref().to_owned()).collect(); files.sort(); @@ -524,22 +472,3 @@ pub struct ProjectRoute { /// The priority of the project. (lower numbers are higher priority). pub priority: u32, } - -struct Version<'a>(&'a str); - -impl PartialEq for Version<'_> { - fn eq(&self, other: &Self) -> bool { - semver::Version::parse(self.0) - .ok() - .and_then(|a| semver::Version::parse(other.0).ok().map(|b| a == b)) - .unwrap_or(false) - } -} - -impl PartialOrd for Version<'_> { - fn partial_cmp(&self, other: &Self) -> Option { - let lhs = semver::Version::parse(self.0).ok()?; - let rhs = semver::Version::parse(other.0).ok()?; - Some(lhs.cmp(&rhs)) - } -} diff --git a/crates/tinymist-project/src/model/task.rs b/crates/tinymist-project/src/model/task.rs index 8c0fb32a..49ace568 100644 --- a/crates/tinymist-project/src/model/task.rs +++ b/crates/tinymist-project/src/model/task.rs @@ -1,13 +1,37 @@ +//! Project task models. + use std::hash::Hash; use serde::{Deserialize, Serialize}; -use tinymist_derive::toml_model; use super::{Id, Pages, PdfStandard, Scalar, TaskWhen}; -/// A project task specifier. -#[toml_model] -#[serde(tag = "type")] +/// A project task specifier. This is used for specifying tasks in a project. +/// When the language service notifies an update event of the project, it will +/// check whether any associated tasks need to be run. +/// +/// Each task can have different timing and conditions for running. See +/// [`TaskWhen`] for more information. +/// +/// The available task types listed in the [`ProjectTask`] only represent the +/// direct formats supported by the typst compiler. More task types can be +/// customized by the [`ExportTransform`]. +/// +/// ## Examples +/// +/// Export a JSON file with the pdfpc notes of the document: +/// +/// ```bash +/// tinymist project query main.typ --format json --selector "" --field value --one +/// ``` +/// +/// Export a PDF file and then runs a ghostscript command to compress it: +/// +/// ```bash +/// tinymist project compile main.typ --pipe 'import "@local/postprocess:0.0.1": ghostscript; ghostscript(output.path)' +/// ``` +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case", tag = "type")] pub enum ProjectTask { /// A preview task. Preview(PreviewTask), @@ -46,7 +70,7 @@ impl ProjectTask { } } - /// Returns the task's ID. + /// Returns the document's ID. pub fn id(&self) -> &Id { match self { ProjectTask::Preview(task) => &task.id, @@ -62,23 +86,26 @@ impl ProjectTask { } } -/// An lsp task specifier. -#[toml_model] +/// A preview task specifier. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct PreviewTask { /// The task's ID. pub id: Id, - /// The doc's ID. + /// The document's ID. pub document: Id, - /// When to run the task + /// When to run the task. See [`TaskWhen`] for more + /// information. pub when: TaskWhen, } /// An export task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportTask { /// The task's ID. pub id: Id, - /// The doc's ID. + /// The document's ID. pub document: Id, /// When to run the task pub when: TaskWhen, @@ -88,7 +115,8 @@ pub struct ExportTask { } /// A project export transform specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum ExportTransform { /// Only pick a subset of pages. Pages { @@ -100,27 +128,35 @@ pub enum ExportTransform { /// The gap between pages (in pt). gap: Scalar, }, + /// Execute a transform script. + Script { + /// The postprocess script (typst script) to run. + #[serde(skip_serializing_if = "Option::is_none", default)] + script: Option, + }, /// Uses a pretty printer to format the output. Pretty { - /// The pretty printer id provided by editor. + /// The pretty command (typst script) to run. + /// /// If not provided, the default pretty printer will be used. /// Note: the builtin one may be only effective for json outputs. #[serde(skip_serializing_if = "Option::is_none", default)] - id: Option, + script: Option, }, } /// An export pdf task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportPdfTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, - /// The pdf standards. + /// One (or multiple comma-separated) PDF standards that Typst will enforce + /// conformance with. #[serde(skip_serializing_if = "Vec::is_empty", default)] pub pdf_standards: Vec, - /// The document's creation date formatted as a UNIX timestamp (in second - /// unit). + /// The document's creation date formatted as a UNIX timestamp (in seconds). /// /// For more information, see . #[serde(skip_serializing_if = "Option::is_none", default)] @@ -128,14 +164,15 @@ pub struct ExportPdfTask { } /// An export png task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportPngTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, /// The PPI (pixels per inch) to use for PNG export. pub ppi: Scalar, - /// The background fill color (in typst script). + /// The expression constructing background fill color (in typst script). /// e.g. `#ffffff`, `#000000`, `rgba(255, 255, 255, 0.5)`. /// /// If not provided, the default background color specified in the document @@ -145,46 +182,52 @@ pub struct ExportPngTask { } /// An export svg task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportSvgTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, } /// An export html task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportHtmlTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, } /// An export markdown task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportMarkdownTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, } /// An export text task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct ExportTextTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, } /// An export query task specifier. -#[toml_model] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub struct QueryTask { - /// The shared export arguments + /// The shared export arguments. #[serde(flatten)] pub export: ExportTask, /// The format to serialize in. Can be `json`, `yaml`, or `txt`, pub format: String, - /// Specify a different output extension than the format. + /// Uses a different output extension from the one inferring from the + /// [`Self::format`]. pub output_extension: String, /// Defines which elements to retrieve. pub selector: String, diff --git a/crates/tinymist-project/src/watch.rs b/crates/tinymist-project/src/watch.rs index b513c7bb..733b7cec 100644 --- a/crates/tinymist-project/src/watch.rs +++ b/crates/tinymist-project/src/watch.rs @@ -513,6 +513,7 @@ fn log_send_error(chan: &'static str, res: Result<(), mpsc::error::SendError< .is_ok() } +/// Watches on a set of *files*. pub async fn watch_deps( inbox: mpsc::UnboundedReceiver, interrupted_by_events: impl FnMut(FilesystemEvent) + Send + Sync + 'static, diff --git a/crates/tinymist-project/src/world.rs b/crates/tinymist-project/src/world.rs index c6999687..a1aec06a 100644 --- a/crates/tinymist-project/src/world.rs +++ b/crates/tinymist-project/src/world.rs @@ -1,36 +1,34 @@ //! World implementation of typst for tinymist. -pub use tinymist_std::error::prelude; pub use tinymist_world as base; pub use tinymist_world::args::*; pub use tinymist_world::config::CompileFontOpts; -use tinymist_world::package::RegistryPathMapper; -pub use tinymist_world::vfs; -pub use tinymist_world::{entry::*, EntryOpts, EntryState}; +pub use tinymist_world::entry::*; +pub use tinymist_world::{font, package, vfs}; pub use tinymist_world::{ - font, package, CompilerUniverse, CompilerWorld, RevisingUniverse, TaskInputs, + CompilerUniverse, CompilerWorld, EntryOpts, EntryState, RevisingUniverse, TaskInputs, }; use std::path::Path; use std::{borrow::Cow, sync::Arc}; -use ::typst::utils::LazyHash; use tinymist_std::error::prelude::*; use tinymist_std::ImmutPath; use tinymist_world::font::system::SystemFontSearcher; -use tinymist_world::package::http::HttpRegistry; +use tinymist_world::package::{http::HttpRegistry, RegistryPathMapper}; use tinymist_world::vfs::{system::SystemAccessModel, Vfs}; use tinymist_world::CompilerFeat; use typst::foundations::{Dict, Str, Value}; +use typst::utils::LazyHash; use crate::font::TinymistFontResolver; /// Compiler feature for LSP universe and worlds without typst.ts to implement /// more for tinymist. type trait of [`CompilerUniverse`]. #[derive(Debug, Clone, Copy)] -pub struct SystemCompilerFeatExtend; +pub struct LspCompilerFeat; -impl CompilerFeat for SystemCompilerFeatExtend { +impl CompilerFeat for LspCompilerFeat { /// Uses [`TinymistFontResolver`] directly. type FontResolver = TinymistFontResolver; /// It accesses a physical file system. @@ -39,11 +37,14 @@ impl CompilerFeat for SystemCompilerFeatExtend { type Registry = HttpRegistry; } -/// The compiler universe in system environment. -pub type TypstSystemUniverseExtend = CompilerUniverse; -/// The compiler world in system environment. -pub type TypstSystemWorldExtend = CompilerWorld; +/// LSP universe that spawns LSP worlds. +pub type LspUniverse = CompilerUniverse; +/// LSP world that holds compilation resources +pub type LspWorld = CompilerWorld; +/// Immutable prehashed reference to dictionary. +pub type ImmutDict = Arc>; +/// World provider for LSP universe and worlds. pub trait WorldProvider { /// Get the entry options from the arguments. fn entry(&self) -> Result; @@ -113,15 +114,6 @@ impl WorldProvider for CompileOnceArgs { } } -/// Compiler feature for LSP universe and worlds. -pub type LspCompilerFeat = SystemCompilerFeatExtend; -/// LSP universe that spawns LSP worlds. -pub type LspUniverse = TypstSystemUniverseExtend; -/// LSP world. -pub type LspWorld = TypstSystemWorldExtend; -/// Immutable prehashed reference to dictionary. -pub type ImmutDict = Arc>; - /// Builder for LSP universe. pub struct LspUniverseBuilder; diff --git a/crates/tinymist-std/src/concepts/marker.rs b/crates/tinymist-std/src/concepts/marker.rs index d7c2b56a..16f78d03 100644 --- a/crates/tinymist-std/src/concepts/marker.rs +++ b/crates/tinymist-std/src/concepts/marker.rs @@ -1,10 +1,8 @@ use std::borrow::{Borrow, Cow}; use serde::{Deserializer, Serializer}; -use serde_with::{ - base64::{Base64, Standard}, - formats::Padded, -}; +use serde_with::base64::{Base64, Standard}; +use serde_with::formats::Padded; use serde_with::{DeserializeAs, SerializeAs}; /// A marker type for serializing and deserializing `Cow<[u8]>` as base64. diff --git a/crates/tinymist-std/src/fs/flock.rs b/crates/tinymist-std/src/fs/flock.rs index 3d296244..3f266aad 100644 --- a/crates/tinymist-std/src/fs/flock.rs +++ b/crates/tinymist-std/src/fs/flock.rs @@ -13,11 +13,12 @@ use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::path::{Display, Path, PathBuf}; -use self::sys::*; -use super::paths; use anyhow::Context as _; use anyhow::Result; +use self::sys::*; +use super::paths; + /// A locked file. /// /// This provides access to file while holding a lock on the file. This type diff --git a/crates/tinymist-std/src/time.rs b/crates/tinymist-std/src/time.rs index 69272878..476993a6 100644 --- a/crates/tinymist-std/src/time.rs +++ b/crates/tinymist-std/src/time.rs @@ -1,8 +1,7 @@ //! Cross platform time utilities. pub use std::time::SystemTime as Time; -pub use web_time::Duration; -pub use web_time::Instant; +pub use web_time::{Duration, Instant}; /// Returns the current system time (UTC+0). #[cfg(any(feature = "system", feature = "web"))] diff --git a/crates/tinymist-vfs/src/lib.rs b/crates/tinymist-vfs/src/lib.rs index 9ee5f2f9..0005a84a 100644 --- a/crates/tinymist-vfs/src/lib.rs +++ b/crates/tinymist-vfs/src/lib.rs @@ -471,8 +471,6 @@ impl RevisingVfs<'_, M> { } /// Reset the shadowing files in [`OverlayAccessModel`]. - /// - /// Note: This function is independent from [`Vfs::reset`]. pub fn reset_shadow(&mut self) { for path in self.am().inner.inner.file_paths() { self.invalidate_path(&path); diff --git a/crates/tinymist-vfs/src/system.rs b/crates/tinymist-vfs/src/system.rs index 792fabde..ac5b9450 100644 --- a/crates/tinymist-vfs/src/system.rs +++ b/crates/tinymist-vfs/src/system.rs @@ -1,9 +1,9 @@ use std::{fs::File, io::Read, path::Path}; +use tinymist_std::ReadAllOnce; use typst::diag::{FileError, FileResult}; use crate::{Bytes, PathAccessModel}; -use tinymist_std::ReadAllOnce; /// Provides SystemAccessModel that makes access to the local file system for /// system compilation. diff --git a/crates/tinymist/src/project.rs b/crates/tinymist/src/project.rs index 262c1534..20f28440 100644 --- a/crates/tinymist/src/project.rs +++ b/crates/tinymist/src/project.rs @@ -11,11 +11,11 @@ //! ``` //! //! We use typst by creating a [`ProjectCompiler`] and -//! running compiler with callbacking [`LspProjectHandler`] incrementally. An +//! running compiler with callbacking [`CompileHandlerImpl`] incrementally. An //! additional [`LocalCompileHandler`] is also created to control the //! [`ProjectCompiler`]. //! -//! The [`LspProjectHandler`] will push information to other actors. +//! The [`CompileHandlerImpl`] will push information to other actors. #![allow(missing_docs)] @@ -82,7 +82,7 @@ impl LspPreviewState { #[derive(Default)] pub struct ProjectStateExt { pub is_compiling: bool, - pub last_compilation: Option>, + pub last_compilation: Option, } /// LSP project compiler. @@ -309,7 +309,7 @@ impl CompileHandler for CompileHandlerImpl { } } - fn notify_compile(&self, snap: &CompiledArtifact, rep: CompileReport) { + fn notify_compile(&self, snap: &LspCompiledArtifact, rep: CompileReport) { // todo: we need to manage the revision for fn status() as well { let mut n_rev = self.notified_revision.lock(); @@ -361,12 +361,12 @@ pub struct QuerySnapWithStat { } pub struct WorldSnapFut { - rx: oneshot::Receiver>, + rx: oneshot::Receiver, } impl WorldSnapFut { /// wait for the snapshot to be ready - pub async fn receive(self) -> Result> { + pub async fn receive(self) -> Result { self.rx .await .map_err(map_string_err("failed to get snapshot")) @@ -392,13 +392,13 @@ impl QuerySnapFut { } pub struct QuerySnap { - pub snap: CompileSnapshot, + pub snap: LspCompileSnapshot, analysis: Arc, rev_lock: AnalysisRevLock, } impl std::ops::Deref for QuerySnap { - type Target = CompileSnapshot; + type Target = LspCompileSnapshot; fn deref(&self) -> &Self::Target { &self.snap diff --git a/crates/tinymist/src/route.rs b/crates/tinymist/src/route.rs index 919f9a2e..45e1163a 100644 --- a/crates/tinymist/src/route.rs +++ b/crates/tinymist/src/route.rs @@ -2,9 +2,7 @@ use std::{path::Path, sync::Arc}; use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash}; use rpds::RedBlackTreeMapSync; -use tinymist_project::{ - CompileSnapshot, Id, LockFile, LspCompilerFeat, ProjectPathMaterial, ProjectRoute, -}; +use tinymist_project::{Id, LockFile, LspCompileSnapshot, ProjectPathMaterial, ProjectRoute}; use tinymist_query::LspWorldExt; use tinymist_std::{hash::FxHashMap, ImmutPath}; use typst::diag::EcoString; @@ -94,7 +92,7 @@ impl ProjectRouteState { pub fn update_existing_material( &mut self, lock_dir: ImmutPath, - snap: &CompileSnapshot, + snap: &LspCompileSnapshot, ) -> Option<()> { let path_route = self.path_routes.get_mut(&lock_dir)?; diff --git a/crates/tinymist/src/server.rs b/crates/tinymist/src/server.rs index a29f9898..aa4e0453 100644 --- a/crates/tinymist/src/server.rs +++ b/crates/tinymist/src/server.rs @@ -11,7 +11,6 @@ use lsp_server::RequestId; use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration}; use lsp_types::*; use once_cell::sync::OnceCell; -use prelude::*; use project::world::EntryState; use project::{watch_deps, LspPreviewState}; use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut}; @@ -24,7 +23,7 @@ use sync_lsp::*; use task::{ ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask, }; -use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId, ProjectResolutionKind}; +use tinymist_project::{EntryResolver, LspCompileSnapshot, ProjectInsId, ProjectResolutionKind}; use tinymist_query::analysis::{Analysis, PeriscopeProvider}; use tinymist_query::{ to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature, @@ -32,7 +31,8 @@ use tinymist_query::{ ServerInfoResponse, SyntaxRequest, VersionedDocument, }; use tinymist_render::PeriscopeRenderer; -use tinymist_std::{Error, ImmutPath}; +use tinymist_std::error::prelude::*; +use tinymist_std::ImmutPath; use tokio::sync::mpsc; use typst::layout::Position as TypstPosition; use typst::{diag::FileResult, syntax::Source}; @@ -844,7 +844,7 @@ impl LanguageState { impl LanguageState { /// Focus main file to some path. - pub fn change_entry(&mut self, path: Option) -> Result { + pub fn change_entry(&mut self, path: Option) -> Result { if path .as_deref() .is_some_and(|p| !p.is_absolute() && !p.starts_with("/untitled")) @@ -865,7 +865,7 @@ impl LanguageState { } /// Pin the entry to the given path - pub fn pin_entry(&mut self, new_entry: Option) -> Result<(), Error> { + pub fn pin_entry(&mut self, new_entry: Option) -> Result<()> { self.pinning = new_entry.is_some(); let entry = new_entry .or_else(|| self.entry_resolver().resolve_default()) @@ -874,7 +874,7 @@ impl LanguageState { } /// Updates the primary (focusing) entry - pub fn focus_entry(&mut self, new_entry: Option) -> Result { + pub fn focus_entry(&mut self, new_entry: Option) -> Result { if self.pinning || self.config.compile.has_default_entry_path { self.focusing = new_entry; return Ok(false); @@ -1063,7 +1063,7 @@ impl LanguageState { let lock_dir = self.compile_config().entry_resolver.resolve_lock(&entry); let update_dep = lock_dir.clone().map(|lock_dir| { - |snap: CompileSnapshot| async move { + |snap: LspCompileSnapshot| async move { let mut updater = update_lock(lock_dir); let world = snap.world.clone(); let doc_id = updater.compiled(&world)?; @@ -1287,14 +1287,14 @@ impl PeriscopeProvider for TypstPeriscopeProvider { } impl LanguageState { - fn update_source(&mut self, files: FileChangeSet) -> Result<(), Error> { + fn update_source(&mut self, files: FileChangeSet) -> Result<()> { self.add_memory_changes(MemoryEvent::Update(files.clone())); Ok(()) } /// Create a new source file. - pub fn create_source(&mut self, path: PathBuf, content: String) -> Result<(), Error> { + pub fn create_source(&mut self, path: PathBuf, content: String) -> Result<()> { let path: ImmutPath = path.into(); log::info!("create source: {path:?}"); @@ -1314,7 +1314,7 @@ impl LanguageState { } /// Remove a source file. - pub fn remove_source(&mut self, path: PathBuf) -> Result<(), Error> { + pub fn remove_source(&mut self, path: PathBuf) -> Result<()> { let path: ImmutPath = path.into(); self.memory_changes.remove(&path); @@ -1332,7 +1332,7 @@ impl LanguageState { path: PathBuf, content: Vec, position_encoding: PositionEncoding, - ) -> Result<(), Error> { + ) -> Result<()> { let path: ImmutPath = path.into(); let meta = self diff --git a/crates/tinymist/src/task/export.rs b/crates/tinymist/src/task/export.rs index 1118885f..90e1b7b4 100644 --- a/crates/tinymist/src/task/export.rs +++ b/crates/tinymist/src/task/export.rs @@ -10,7 +10,7 @@ use crate::project::{ use anyhow::{bail, Context}; use reflexo::ImmutPath; use reflexo_typst::TypstDatetime; -use tinymist_project::{CompileSnapshot, EntryReader}; +use tinymist_project::{EntryReader, LspCompileSnapshot, LspCompiledArtifact}; use tinymist_query::{ExportKind, PageSelection}; use tokio::sync::mpsc; use typlite::Typlite; @@ -23,9 +23,7 @@ use typst::{ use typst_pdf::PdfOptions; use crate::tool::text::FullTextDigest; -use crate::{ - actor::editor::EditorRequest, tool::word_count, world::LspCompilerFeat, ExportMode, PathPattern, -}; +use crate::{actor::editor::EditorRequest, tool::word_count, ExportMode, PathPattern}; use super::*; @@ -60,7 +58,7 @@ impl ExportTask { self.factory.mutate(|data| data.config = config); } - pub fn signal(&self, snap: &CompiledArtifact, s: ExportSignal) { + pub fn signal(&self, snap: &LspCompiledArtifact, s: ExportSignal) { let task = self.factory.task(); task.signal(snap, s, self); } @@ -68,7 +66,7 @@ impl ExportTask { pub struct ExportOnceTask<'a> { pub kind: &'a ExportKind, - pub artifact: CompiledArtifact, + pub artifact: LspCompiledArtifact, pub lock_path: Option, } @@ -82,19 +80,14 @@ pub struct ExportConfig { } impl ExportConfig { - fn signal( - self: Arc, - snap: &CompiledArtifact, - s: ExportSignal, - t: &ExportTask, - ) { + fn signal(self: Arc, snap: &LspCompiledArtifact, s: ExportSignal, t: &ExportTask) { self.signal_export(snap, s, t); self.signal_count_word(snap, t); } fn signal_export( self: &Arc, - artifact: &CompiledArtifact, + artifact: &LspCompiledArtifact, s: ExportSignal, t: &ExportTask, ) -> Option<()> { @@ -134,11 +127,7 @@ impl ExportConfig { Some(()) } - fn signal_count_word( - &self, - artifact: &CompiledArtifact, - t: &ExportTask, - ) -> Option<()> { + fn signal_count_word(&self, artifact: &LspCompiledArtifact, t: &ExportTask) -> Option<()> { if !self.count_words { return None; } @@ -393,7 +382,7 @@ impl ExportConfig { pub async fn oneshot( &self, - snap: CompileSnapshot, + snap: LspCompileSnapshot, kind: ExportKind, lock_path: Option, ) -> anyhow::Result> { diff --git a/crates/tinymist/src/tool/preview.rs b/crates/tinymist/src/tool/preview.rs index 6003f9c6..9841e873 100644 --- a/crates/tinymist/src/tool/preview.rs +++ b/crates/tinymist/src/tool/preview.rs @@ -32,10 +32,9 @@ use typst_preview::{ use typst_shim::syntax::LinkedNodeExt; use crate::project::{ - CompileHandlerImpl, CompileServerOpts, CompiledArtifact, LspInterrupt, ProjectClient, + CompileHandlerImpl, CompileServerOpts, LspCompiledArtifact, LspInterrupt, ProjectClient, ProjectCompiler, }; -use crate::world::LspCompilerFeat; use crate::*; use actor::preview::{PreviewActor, PreviewRequest, PreviewTab}; use project::world::vfs::{notify::MemoryEvent, FileChangeSet}; @@ -44,7 +43,7 @@ use project::{watch_deps, LspPreviewState}; /// The preview's view of the compiled artifact. pub struct PreviewCompileView { /// The artifact and snap. - pub snap: CompiledArtifact, + pub snap: LspCompiledArtifact, } impl typst_preview::CompileView for PreviewCompileView { diff --git a/crates/tinymist/src/tool/project.rs b/crates/tinymist/src/tool/project.rs index f43a7280..67e3f897 100644 --- a/crates/tinymist/src/tool/project.rs +++ b/crates/tinymist/src/tool/project.rs @@ -2,8 +2,9 @@ use std::path::Path; +use tinymist_std::error::prelude::*; + use crate::project::*; -use prelude::Result; trait LockFileExt { fn declare(&mut self, args: &DocNewArgs) -> Id;