refactor: rearrangement and document about tinymist-project (#1204)

* refactor: rearrangement and document about tinymist-project

* refactor: more rearrangement and document about tinymist-project
This commit is contained in:
Myriad-Dreamin 2025-01-21 00:56:30 +08:00 committed by GitHub
parent 8ca6c8118c
commit ac25cc1365
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 823 additions and 761 deletions

View file

@ -211,6 +211,9 @@ debug = true
inherits = "release" inherits = "release"
lto = "thin" lto = "thin"
[workspace.lints.rustdoc]
broken_intra_doc_links = "warn"
[workspace.lints.rust] [workspace.lints.rust]
missing_docs = "warn" missing_docs = "warn"
# missing_crate_level_docs = "warn" # missing_crate_level_docs = "warn"

View file

@ -4,20 +4,6 @@ use proc_macro::TokenStream;
use quote::quote; use quote::quote;
use syn::{parse_macro_input, DeriveInput}; use syn::{parse_macro_input, DeriveInput};
#[proc_macro_attribute]
pub fn toml_model(
_metadata: proc_macro::TokenStream,
input: proc_macro::TokenStream,
) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let output = quote! {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
#input
};
output.into()
}
#[proc_macro_derive(BindTyCtx, attributes(bind))] #[proc_macro_derive(BindTyCtx, attributes(bind))]
pub fn bind_ty_ctx(input: TokenStream) -> TokenStream { pub fn bind_ty_ctx(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree // Parse the input tokens into a syntax tree

View file

@ -1,171 +1,14 @@
use crate::DocIdArgs; use std::{path::Path, sync::OnceLock};
use core::fmt;
use serde::{Deserialize, Serialize}; use clap::ValueHint;
use std::{num::NonZeroUsize, ops::RangeInclusive, path::Path, str::FromStr, sync::OnceLock};
use tinymist_std::{bail, error::prelude::Result}; use tinymist_std::{bail, error::prelude::Result};
pub use tinymist_world::args::{CompileFontArgs, CompilePackageArgs}; pub use tinymist_world::args::{CompileFontArgs, CompilePackageArgs};
pub use typst_preview::{PreviewArgs, PreviewMode}; pub use typst_preview::{PreviewArgs, PreviewMode};
use clap::{ValueEnum, ValueHint};
use crate::model::*; use crate::model::*;
use crate::PROJECT_ROUTE_USER_ACTION_PRIORITY; use crate::PROJECT_ROUTE_USER_ACTION_PRIORITY;
macro_rules! display_possible_values {
($ty:ty) => {
impl fmt::Display for $ty {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.to_possible_value()
.expect("no values are skipped")
.get_name()
.fmt(f)
}
}
};
}
/// When to export an output file.
#[derive(
Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, ValueEnum, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
#[clap(rename_all = "camelCase")]
pub enum TaskWhen {
/// Never watch to run task.
Never,
/// Run task on save.
OnSave,
/// Run task on type.
OnType,
/// *DEPRECATED* Run task when a document has a title and on saved, which is
/// useful to filter out template files.
///
/// Note: this is deprecating.
OnDocumentHasTitle,
}
impl TaskWhen {
/// Returns `true` if the task should never be run automatically.
pub fn is_never(&self) -> bool {
matches!(self, TaskWhen::Never)
}
}
display_possible_values!(TaskWhen);
/// Which format to use for the generated output file.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
pub enum OutputFormat {
/// Export to PDF.
Pdf,
/// Export to PNG.
Png,
/// Export to SVG.
Svg,
/// Export to HTML.
Html,
}
display_possible_values!(OutputFormat);
/// A PDF standard that Typst can enforce conformance with.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.7.
#[value(name = "1.7")]
#[serde(rename = "1.7")]
V_1_7,
/// PDF/A-2b.
#[value(name = "a-2b")]
#[serde(rename = "a-2b")]
A_2b,
}
display_possible_values!(PdfStandard);
/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the
/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a
/// value parser, in order to generate better errors.
///
/// See also: <https://github.com/clap-rs/clap/issues/5065>
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
impl FromStr for Pages {
type Err = &'static str;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value
.split('-')
.map(str::trim)
.collect::<Vec<_>>()
.as_slice()
{
[] | [""] => Err("page export range must not be empty"),
[single_page] => {
let page_number = parse_page_number(single_page)?;
Ok(Pages(Some(page_number)..=Some(page_number)))
}
["", ""] => Err("page export range must have start or end"),
[start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)),
["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))),
[start, end] => {
let start = parse_page_number(start)?;
let end = parse_page_number(end)?;
if start > end {
Err("page export range must end at a page after the start")
} else {
Ok(Pages(Some(start)..=Some(end)))
}
}
[_, _, _, ..] => Err("page export range must have a single hyphen"),
}
}
}
impl fmt::Display for Pages {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let start = match self.0.start() {
Some(start) => start.to_string(),
None => String::from(""),
};
let end = match self.0.end() {
Some(end) => end.to_string(),
None => String::from(""),
};
write!(f, "{start}-{end}")
}
}
impl serde::Serialize for Pages {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> serde::Deserialize<'de> for Pages {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
value.parse().map_err(serde::de::Error::custom)
}
}
/// Parses a single page number.
fn parse_page_number(value: &str) -> Result<NonZeroUsize, &'static str> {
if value == "0" {
Err("page numbers start at one")
} else {
NonZeroUsize::from_str(value).map_err(|_| "not a valid page number")
}
}
/// Project document commands. /// Project document commands.
#[derive(Debug, Clone, clap::Subcommand)] #[derive(Debug, Clone, clap::Subcommand)]
#[clap(rename_all = "kebab-case")] #[clap(rename_all = "kebab-case")]
@ -205,6 +48,35 @@ pub struct DocNewArgs {
pub package: CompilePackageArgs, pub package: CompilePackageArgs,
} }
/// The id of a document.
///
/// If an identifier is not provided, the document's path is used as the id.
#[derive(Debug, Clone, clap::Parser)]
pub struct DocIdArgs {
/// Give a name to the document.
#[clap(long = "name")]
pub name: Option<String>,
/// Path to input Typst file.
#[clap(value_hint = ValueHint::FilePath)]
pub input: String,
}
impl From<&ResourcePath> for Id {
fn from(value: &ResourcePath) -> Self {
Id::new(value.to_string())
}
}
impl From<&DocIdArgs> for Id {
fn from(args: &DocIdArgs) -> Self {
if let Some(id) = &args.name {
Id::new(id.clone())
} else {
(&ResourcePath::from_user_sys(Path::new(&args.input))).into()
}
}
}
/// Configure project's priorities. /// Configure project's priorities.
#[derive(Debug, Clone, clap::Parser)] #[derive(Debug, Clone, clap::Parser)]
pub struct DocConfigureArgs { pub struct DocConfigureArgs {
@ -269,11 +141,13 @@ pub struct TaskCompileArgs {
#[arg(long = "ppi", default_value_t = 144.0)] #[arg(long = "ppi", default_value_t = 144.0)]
pub ppi: f32, pub ppi: f32,
/// The output format.
#[clap(skip)] #[clap(skip)]
pub output_format: OnceLock<Result<OutputFormat>>, pub output_format: OnceLock<Result<OutputFormat>>,
} }
impl TaskCompileArgs { impl TaskCompileArgs {
/// Convert the arguments to a project task.
pub fn to_task(self, doc_id: Id) -> Result<ProjectTask> { pub fn to_task(self, doc_id: Id) -> Result<ProjectTask> {
let new_task_id = self.task_name.map(Id::new); let new_task_id = self.task_name.map(Id::new);
let task_id = new_task_id.unwrap_or(doc_id.clone()); let task_id = new_task_id.unwrap_or(doc_id.clone());

View file

@ -1,39 +1,35 @@
//! Project Model for tinymist //! Project compiler for tinymist.
//!
//! The [`ProjectCompiler`] implementation borrowed from typst.ts.
//!
//! Please check `tinymist::actor::typ_client` for architecture details.
#![allow(missing_docs)]
use core::fmt; use core::fmt;
use std::{ use std::collections::HashSet;
collections::HashSet, use std::path::Path;
path::Path, use std::sync::{Arc, OnceLock};
sync::{Arc, OnceLock},
};
use ecow::{EcoString, EcoVec}; use ecow::{EcoString, EcoVec};
use reflexo_typst::{ use reflexo_typst::features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE};
features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE}, use reflexo_typst::{CompileEnv, CompileReport, Compiler, TypstDocument};
CompileEnv, CompileReport, Compiler, TypstDocument,
};
use tinymist_std::error::prelude::Result; use tinymist_std::error::prelude::Result;
use tinymist_world::vfs::notify::{
FilesystemEvent, MemoryEvent, NotifyMessage, UpstreamUpdateEvent,
};
use tinymist_world::vfs::{FileId, FsProvider, RevisingVfs};
use tinymist_world::{
CompilerFeat, CompilerUniverse, CompilerWorld, EntryReader, EntryState, TaskInputs, WorldDeps,
};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use typst::diag::{SourceDiagnostic, SourceResult}; use typst::diag::{SourceDiagnostic, SourceResult};
use crate::LspCompilerFeat; use crate::LspCompilerFeat;
use tinymist_world::{
vfs::{
notify::{FilesystemEvent, MemoryEvent, NotifyMessage, UpstreamUpdateEvent},
FileId, FsProvider, RevisingVfs,
},
CompilerFeat, CompilerUniverse, CompilerWorld, EntryReader, EntryState, TaskInputs, WorldDeps,
};
/// LSP compile snapshot.
pub type LspCompileSnapshot = CompileSnapshot<LspCompilerFeat>;
/// LSP compiled artifact.
pub type LspCompiledArtifact = CompiledArtifact<LspCompilerFeat>;
/// LSP interrupt. /// LSP interrupt.
pub type LspInterrupt = Interrupt<LspCompilerFeat>; pub type LspInterrupt = Interrupt<LspCompilerFeat>;
/// Project instance id. This is slightly different from the project ids that
/// persist in disk.
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Default, PartialEq, Eq, Hash)]
pub struct ProjectInsId(EcoString); pub struct ProjectInsId(EcoString);
@ -51,6 +47,7 @@ pub struct ExportSignal {
pub by_entry_update: bool, pub by_entry_update: bool,
} }
/// A snapshot of the project and compilation state.
pub struct CompileSnapshot<F: CompilerFeat> { pub struct CompileSnapshot<F: CompilerFeat> {
/// The project id. /// The project id.
pub id: ProjectInsId, pub id: ProjectInsId,
@ -65,6 +62,11 @@ pub struct CompileSnapshot<F: CompilerFeat> {
} }
impl<F: CompilerFeat + 'static> CompileSnapshot<F> { impl<F: CompilerFeat + 'static> CompileSnapshot<F> {
/// Forks a new snapshot that compiles a different document.
///
/// Note: the resulting document should not be shared in system, because we
/// generally believe that the document is revisioned, but temporary
/// tasks break this assumption.
pub fn task(mut self, inputs: TaskInputs) -> Self { pub fn task(mut self, inputs: TaskInputs) -> Self {
'check_changed: { 'check_changed: {
if let Some(entry) = &inputs.entry { if let Some(entry) = &inputs.entry {
@ -86,6 +88,7 @@ impl<F: CompilerFeat + 'static> CompileSnapshot<F> {
self self
} }
/// Runs the compiler and returns the compiled document.
pub fn compile(self) -> CompiledArtifact<F> { pub fn compile(self) -> CompiledArtifact<F> {
let mut snap = self; let mut snap = self;
snap.world.set_is_compiling(true); snap.world.set_is_compiling(true);
@ -116,6 +119,7 @@ impl<F: CompilerFeat> Clone for CompileSnapshot<F> {
} }
} }
/// A compiled artifact.
pub struct CompiledArtifact<F: CompilerFeat> { pub struct CompiledArtifact<F: CompilerFeat> {
/// The used snapshot. /// The used snapshot.
pub snap: CompileSnapshot<F>, pub snap: CompileSnapshot<F>,
@ -147,6 +151,7 @@ impl<F: CompilerFeat> Clone for CompiledArtifact<F> {
} }
impl<F: CompilerFeat> CompiledArtifact<F> { impl<F: CompilerFeat> CompiledArtifact<F> {
/// Returns the last successfully compiled document.
pub fn success_doc(&self) -> Option<Arc<TypstDocument>> { pub fn success_doc(&self) -> Option<Arc<TypstDocument>> {
self.doc self.doc
.as_ref() .as_ref()
@ -155,6 +160,7 @@ impl<F: CompilerFeat> CompiledArtifact<F> {
.or_else(|| self.snap.success_doc.clone()) .or_else(|| self.snap.success_doc.clone())
} }
/// Returns the depended files.
pub fn depended_files(&self) -> &EcoVec<FileId> { pub fn depended_files(&self) -> &EcoVec<FileId> {
self.deps.get_or_init(|| { self.deps.get_or_init(|| {
let mut deps = EcoVec::default(); let mut deps = EcoVec::default();
@ -167,10 +173,15 @@ impl<F: CompilerFeat> CompiledArtifact<F> {
} }
} }
/// A project compiler handler.
pub trait CompileHandler<F: CompilerFeat, Ext>: Send + Sync + 'static { pub trait CompileHandler<F: CompilerFeat, Ext>: Send + Sync + 'static {
/// Called when there is any reason to compile. This doesn't mean that the
/// project should be compiled.
fn on_any_compile_reason(&self, state: &mut ProjectCompiler<F, Ext>); fn on_any_compile_reason(&self, state: &mut ProjectCompiler<F, Ext>);
// todo: notify project specific compile // todo: notify project specific compile
/// Called when a compilation is done.
fn notify_compile(&self, res: &CompiledArtifact<F>, rep: CompileReport); fn notify_compile(&self, res: &CompiledArtifact<F>, rep: CompileReport);
/// Called when the compilation status is changed.
fn status(&self, revision: usize, id: &ProjectInsId, rep: CompileReport); fn status(&self, revision: usize, id: &ProjectInsId, rep: CompileReport);
} }
@ -185,6 +196,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: 'static> CompileHandler<F, Ex
fn status(&self, _revision: usize, _id: &ProjectInsId, _rep: CompileReport) {} fn status(&self, _revision: usize, _id: &ProjectInsId, _rep: CompileReport) {}
} }
/// An interrupt to the compiler.
pub enum Interrupt<F: CompilerFeat> { pub enum Interrupt<F: CompilerFeat> {
/// Compile anyway. /// Compile anyway.
Compile(ProjectInsId), Compile(ProjectInsId),
@ -218,6 +230,7 @@ impl fmt::Debug for Interrupt<LspCompilerFeat> {
} }
} }
/// An accumulated compile reason stored in the project state.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct CompileReasons { pub struct CompileReasons {
/// The snapshot is taken by the memory editing events. /// The snapshot is taken by the memory editing events.
@ -241,6 +254,7 @@ impl CompileReasons {
self.by_memory_events || self.by_fs_events || self.by_entry_update self.by_memory_events || self.by_fs_events || self.by_entry_update
} }
/// Exclude some reasons.
pub fn exclude(&self, excluded: Self) -> Self { pub fn exclude(&self, excluded: Self) -> Self {
Self { Self {
by_memory_events: self.by_memory_events && !excluded.by_memory_events, by_memory_events: self.by_memory_events && !excluded.by_memory_events,
@ -283,9 +297,13 @@ struct TaggedMemoryEvent {
event: MemoryEvent, event: MemoryEvent,
} }
/// The compiler server options.
pub struct CompileServerOpts<F: CompilerFeat, Ext> { pub struct CompileServerOpts<F: CompilerFeat, Ext> {
/// The compilation handler.
pub handler: Arc<dyn CompileHandler<F, Ext>>, pub handler: Arc<dyn CompileHandler<F, Ext>>,
/// The feature set.
pub feature_set: FeatureSet, pub feature_set: FeatureSet,
/// Whether to enable file system watching.
pub enable_watch: bool, pub enable_watch: bool,
} }
@ -322,7 +340,7 @@ pub struct ProjectCompiler<F: CompilerFeat, Ext> {
} }
impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCompiler<F, Ext> { impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCompiler<F, Ext> {
/// Create a compiler with options /// Creates a compiler with options
pub fn new( pub fn new(
verse: CompilerUniverse<F>, verse: CompilerUniverse<F>,
dep_tx: mpsc::UnboundedSender<NotifyMessage>, dep_tx: mpsc::UnboundedSender<NotifyMessage>,
@ -354,6 +372,22 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
} }
} }
/// Creates a snapshot of the primary project.
pub fn snapshot(&mut self) -> CompileSnapshot<F> {
self.primary.snapshot()
}
/// Compiles the document once.
pub fn compile_once(&mut self) -> CompiledArtifact<F> {
let snap = self.primary.make_snapshot(true);
ProjectState::run_compile(self.handler.clone(), snap)()
}
/// Gets the iterator of all projects.
pub fn projects(&mut self) -> impl Iterator<Item = &mut ProjectState<F, Ext>> {
std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut())
}
fn create_project( fn create_project(
id: ProjectInsId, id: ProjectInsId,
verse: CompilerUniverse<F>, verse: CompilerUniverse<F>,
@ -382,65 +416,8 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
} }
} }
pub fn process(&mut self, intr: Interrupt<F>) { /// Find a project by id, but with less borrow checker restriction.
// todo: evcit cache pub fn find_project<'a>(
self.process_inner(intr);
// Customized Project Compilation Handler
self.handler.clone().on_any_compile_reason(self);
}
pub fn snapshot(&mut self) -> CompileSnapshot<F> {
self.primary.snapshot()
}
/// Compile the document once.
pub fn compile_once(&mut self) -> CompiledArtifact<F> {
let snap = self.primary.make_snapshot(true);
ProjectState::run_compile(self.handler.clone(), snap)()
}
/// Apply delayed memory changes to underlying compiler.
fn apply_delayed_memory_changes(
verse: &mut RevisingVfs<'_, F::AccessModel>,
dirty_shadow_logical_tick: &mut usize,
event: &Option<UpstreamUpdateEvent>,
) -> Option<()> {
// Handle delayed upstream update event before applying file system changes
if let Some(event) = event {
let TaggedMemoryEvent {
logical_tick,
event,
} = event.opaque.as_ref().downcast_ref()?;
// Recovery from dirty shadow state.
if logical_tick == dirty_shadow_logical_tick {
*dirty_shadow_logical_tick = 0;
}
Self::apply_memory_changes(verse, event.clone());
}
Some(())
}
/// Apply memory changes to underlying compiler.
fn apply_memory_changes(vfs: &mut RevisingVfs<'_, F::AccessModel>, event: MemoryEvent) {
if matches!(event, MemoryEvent::Sync(..)) {
vfs.reset_shadow();
}
match event {
MemoryEvent::Update(event) | MemoryEvent::Sync(event) => {
for path in event.removes {
let _ = vfs.unmap_shadow(&path);
}
for (path, snap) in event.inserts {
let _ = vfs.map_shadow(&path, snap);
}
}
}
}
fn find_project<'a>(
primary: &'a mut ProjectState<F, Ext>, primary: &'a mut ProjectState<F, Ext>,
dedicates: &'a mut [ProjectState<F, Ext>], dedicates: &'a mut [ProjectState<F, Ext>],
id: &ProjectInsId, id: &ProjectInsId,
@ -452,8 +429,48 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
dedicates.iter_mut().find(|e| e.id == *id).unwrap() dedicates.iter_mut().find(|e| e.id == *id).unwrap()
} }
pub fn projects(&mut self) -> impl Iterator<Item = &mut ProjectState<F, Ext>> { /// Restart a dedicate project.
std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut()) pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> Result<ProjectInsId> {
let id = ProjectInsId(group.into());
let verse = CompilerUniverse::<F>::new_raw(
entry,
Some(self.primary.verse.inputs().clone()),
self.primary.verse.vfs().fork(),
self.primary.verse.registry.clone(),
self.primary.verse.font_resolver.clone(),
);
let proj = Self::create_project(
id.clone(),
verse,
self.handler.clone(),
self.dep_tx.clone(),
self.primary.once_feature_set.as_ref().to_owned(),
);
self.remove_dedicates(&id);
self.dedicates.push(proj);
Ok(id)
}
fn remove_dedicates(&mut self, id: &ProjectInsId) {
let proj = self.dedicates.iter().position(|e| e.id == *id);
if let Some(idx) = proj {
let _proj = self.dedicates.remove(idx);
// todo: kill compilations
} else {
log::warn!("ProjectCompiler: settle project not found {id:?}");
}
}
/// Process an interrupt.
pub fn process(&mut self, intr: Interrupt<F>) {
// todo: evcit cache
self.process_inner(intr);
// Customized Project Compilation Handler
self.handler.clone().on_any_compile_reason(self);
} }
fn process_inner(&mut self, intr: Interrupt<F>) { fn process_inner(&mut self, intr: Interrupt<F>) {
@ -606,43 +623,51 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
} }
} }
pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> Result<ProjectInsId> { /// Apply delayed memory changes to underlying compiler.
let id = ProjectInsId(group.into()); fn apply_delayed_memory_changes(
verse: &mut RevisingVfs<'_, F::AccessModel>,
dirty_shadow_logical_tick: &mut usize,
event: &Option<UpstreamUpdateEvent>,
) -> Option<()> {
// Handle delayed upstream update event before applying file system changes
if let Some(event) = event {
let TaggedMemoryEvent {
logical_tick,
event,
} = event.opaque.as_ref().downcast_ref()?;
let verse = CompilerUniverse::<F>::new_raw( // Recovery from dirty shadow state.
entry, if logical_tick == dirty_shadow_logical_tick {
Some(self.primary.verse.inputs().clone()), *dirty_shadow_logical_tick = 0;
self.primary.verse.vfs().fork(), }
self.primary.verse.registry.clone(),
self.primary.verse.font_resolver.clone(),
);
let proj = Self::create_project( Self::apply_memory_changes(verse, event.clone());
id.clone(), }
verse,
self.handler.clone(),
self.dep_tx.clone(),
self.primary.once_feature_set.as_ref().to_owned(),
);
self.remove_dedicates(&id); Some(())
self.dedicates.push(proj);
Ok(id)
} }
fn remove_dedicates(&mut self, id: &ProjectInsId) { /// Apply memory changes to underlying compiler.
let proj = self.dedicates.iter().position(|e| e.id == *id); fn apply_memory_changes(vfs: &mut RevisingVfs<'_, F::AccessModel>, event: MemoryEvent) {
if let Some(idx) = proj { if matches!(event, MemoryEvent::Sync(..)) {
let _proj = self.dedicates.remove(idx); vfs.reset_shadow();
// todo: kill compilations }
} else { match event {
log::warn!("ProjectCompiler: settle project not found {id:?}"); MemoryEvent::Update(event) | MemoryEvent::Sync(event) => {
for path in event.removes {
let _ = vfs.unmap_shadow(&path);
}
for (path, snap) in event.inserts {
let _ = vfs.map_shadow(&path, snap);
}
}
} }
} }
} }
/// A project state.
pub struct ProjectState<F: CompilerFeat, Ext> { pub struct ProjectState<F: CompilerFeat, Ext> {
/// The project instance id.
pub id: ProjectInsId, pub id: ProjectInsId,
/// The extension /// The extension
pub ext: Ext, pub ext: Ext,
@ -672,10 +697,12 @@ pub struct ProjectState<F: CompilerFeat, Ext> {
} }
impl<F: CompilerFeat, Ext: 'static> ProjectState<F, Ext> { impl<F: CompilerFeat, Ext: 'static> ProjectState<F, Ext> {
/// Creates a new compile environment.
pub fn make_env(&self, feature_set: Arc<FeatureSet>) -> CompileEnv { pub fn make_env(&self, feature_set: Arc<FeatureSet>) -> CompileEnv {
CompileEnv::default().configure_shared(feature_set) CompileEnv::default().configure_shared(feature_set)
} }
/// Creates a snapshot of the project.
pub fn snapshot(&mut self) -> CompileSnapshot<F> { pub fn snapshot(&mut self) -> CompileSnapshot<F> {
match self.snapshot.as_ref() { match self.snapshot.as_ref() {
Some(snap) if snap.world.revision() == self.verse.revision => snap.clone(), Some(snap) if snap.world.revision() == self.verse.revision => snap.clone(),
@ -707,52 +734,8 @@ impl<F: CompilerFeat, Ext: 'static> ProjectState<F, Ext> {
} }
} }
fn process_compile(&mut self, artifact: CompiledArtifact<F>) { /// Compile the document once if there is any reason and the entry is
let world = &artifact.snap.world; /// active.
let compiled_revision = world.revision().get();
if self.committed_revision >= compiled_revision {
return;
}
// Update state.
let doc = artifact.doc.ok();
self.committed_revision = compiled_revision;
self.latest_doc.clone_from(&doc);
if doc.is_some() {
self.latest_success_doc.clone_from(&self.latest_doc);
}
// Notify the new file dependencies.
let mut deps = vec![];
world.iter_dependencies(&mut |dep| {
if let Ok(x) = world.file_path(dep).and_then(|e| e.to_err()) {
deps.push(x.into())
}
});
let event = NotifyMessage::SyncDependency(deps);
let err = self.dep_tx.send(event);
log_send_error("dep_tx", err);
let mut world = artifact.snap.world;
let is_primary = self.id == ProjectInsId("primary".into());
// Trigger an evict task.
rayon::spawn(move || {
let evict_start = std::time::Instant::now();
if is_primary {
comemo::evict(10);
// Since all the projects share the same cache, we need to evict the cache
// on the primary instance for all the projects.
world.evict_source_cache(30);
}
world.evict_vfs(60);
let elapsed = evict_start.elapsed();
log::info!("ProjectCompiler: evict cache in {elapsed:?}");
});
}
#[must_use] #[must_use]
pub fn may_compile( pub fn may_compile(
&mut self, &mut self,
@ -801,6 +784,52 @@ impl<F: CompilerFeat, Ext: 'static> ProjectState<F, Ext> {
compiled compiled
} }
} }
fn process_compile(&mut self, artifact: CompiledArtifact<F>) {
let world = &artifact.snap.world;
let compiled_revision = world.revision().get();
if self.committed_revision >= compiled_revision {
return;
}
// Update state.
let doc = artifact.doc.ok();
self.committed_revision = compiled_revision;
self.latest_doc.clone_from(&doc);
if doc.is_some() {
self.latest_success_doc.clone_from(&self.latest_doc);
}
// Notify the new file dependencies.
let mut deps = vec![];
world.iter_dependencies(&mut |dep| {
if let Ok(x) = world.file_path(dep).and_then(|e| e.to_err()) {
deps.push(x.into())
}
});
let event = NotifyMessage::SyncDependency(deps);
let err = self.dep_tx.send(event);
log_send_error("dep_tx", err);
let mut world = artifact.snap.world;
let is_primary = self.id == ProjectInsId("primary".into());
// Trigger an evict task.
rayon::spawn(move || {
let evict_start = std::time::Instant::now();
if is_primary {
comemo::evict(10);
// Since all the projects share the same cache, we need to evict the cache
// on the primary instance for all the projects.
world.evict_source_cache(30);
}
world.evict_vfs(60);
let elapsed = evict_start.elapsed();
log::info!("ProjectCompiler: evict cache in {elapsed:?}");
});
}
} }
fn log_compile_report(env: &CompileEnv, rep: &CompileReport) { fn log_compile_report(env: &CompileEnv, rep: &CompileReport) {

View file

@ -108,6 +108,7 @@ impl EntryResolver {
}) })
} }
/// Resolves the directory to store the lock file.
pub fn resolve_lock(&self, entry: &EntryState) -> Option<ImmutPath> { pub fn resolve_lock(&self, entry: &EntryState) -> Option<ImmutPath> {
match self.project_resolution { match self.project_resolution {
ProjectResolutionKind::LockDatabase if entry.is_in_package() => { ProjectResolutionKind::LockDatabase if entry.is_in_package() => {
@ -124,7 +125,7 @@ impl EntryResolver {
} }
} }
/// Determines the default entry path. /// Resolves the default entry path.
pub fn resolve_default(&self) -> Option<ImmutPath> { pub fn resolve_default(&self) -> Option<ImmutPath> {
let entry = self.entry.as_ref(); let entry = self.entry.as_ref();
// todo: pre-compute this when updating config // todo: pre-compute this when updating config

View file

@ -1,20 +1,18 @@
//! Font resolver implementation. //! Font resolver implementation.
use core::fmt; pub use crate::world::base::font::*;
use std::{
collections::HashMap,
path::PathBuf,
sync::{Arc, Mutex},
};
use core::fmt;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use tinymist_std::debug_loc::DataSource;
use tinymist_world::font::system::SystemFontSearcher; use tinymist_world::font::system::SystemFontSearcher;
use typst::text::{Font, FontBook, FontInfo}; use typst::text::{Font, FontBook, FontInfo};
use typst::utils::LazyHash; use typst::utils::LazyHash;
use crate::world::vfs::Bytes; use crate::world::vfs::Bytes;
use tinymist_std::debug_loc::DataSource;
pub use crate::world::base::font::*;
#[derive(Debug)] #[derive(Debug)]
/// The default FontResolver implementation. /// The default FontResolver implementation.

View file

@ -1,7 +1,5 @@
//! Project Model for tinymist //! Project Model for tinymist
#![allow(missing_docs)]
mod args; mod args;
mod compiler; mod compiler;
mod entry; mod entry;

View file

@ -1,17 +1,223 @@
#![allow(missing_docs)]
use std::cmp::Ordering;
use std::io::{Read, Seek, SeekFrom, Write};
use std::{path::Path, sync::Arc}; use std::{path::Path, sync::Arc};
use ecow::EcoVec; use ecow::{eco_vec, EcoVec};
use reflexo_typst::ImmutPath; use tinymist_std::error::prelude::*;
use tinymist_std::path::unix_slash; use tinymist_std::path::unix_slash;
use tinymist_std::{bail, ImmutPath};
use typst::diag::EcoString; use typst::diag::EcoString;
use typst::World; use typst::World;
use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath}; use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath};
use crate::{LspWorld, ProjectPathMaterial}; use crate::{LockFile, LockFileCompat, LspWorld, ProjectPathMaterial, LOCK_VERSION};
pub const LOCK_FILENAME: &str = "tinymist.lock";
pub const PROJECT_ROUTE_USER_ACTION_PRIORITY: u32 = 256;
impl LockFile {
pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> {
self.document.iter().find(|i| &i.id == id)
}
pub fn replace_document(&mut self, input: ProjectInput) {
let id = input.id.clone();
let index = self.document.iter().position(|i| i.id == id);
if let Some(index) = index {
self.document[index] = input;
} else {
self.document.push(input);
}
}
pub fn replace_task(&mut self, task: ProjectTask) {
let id = task.id().clone();
let index = self.task.iter().position(|i| *i.id() == id);
if let Some(index) = index {
self.task[index] = task;
} else {
self.task.push(task);
}
}
pub fn replace_route(&mut self, route: ProjectRoute) {
let id = route.id.clone();
self.route.retain(|i| i.id != id);
self.route.push(route);
}
pub fn sort(&mut self) {
self.document.sort_by(|a, b| a.id.cmp(&b.id));
self.task
.sort_by(|a, b| a.doc_id().cmp(b.doc_id()).then_with(|| a.id().cmp(b.id())));
// the route's order is important, so we don't sort them.
}
pub fn serialize_resolve(&self) -> String {
let content = toml::Table::try_from(self).unwrap();
let mut out = String::new();
// At the start of the file we notify the reader that the file is generated.
// Specifically Phabricator ignores files containing "@generated", so we use
// that.
let marker_line = "# This file is automatically @generated by tinymist.";
let extra_line = "# It is not intended for manual editing.";
out.push_str(marker_line);
out.push('\n');
out.push_str(extra_line);
out.push('\n');
out.push_str(&format!("version = {LOCK_VERSION:?}\n"));
let document = content.get("document");
if let Some(document) = document {
for document in document.as_array().unwrap() {
out.push('\n');
out.push_str("[[document]]\n");
emit_document(document, &mut out);
}
}
let route = content.get("route");
if let Some(route) = route {
for route in route.as_array().unwrap() {
out.push('\n');
out.push_str("[[route]]\n");
emit_route(route, &mut out);
}
}
let task = content.get("task");
if let Some(task) = task {
for task in task.as_array().unwrap() {
out.push('\n');
out.push_str("[[task]]\n");
emit_output(task, &mut out);
}
}
return out;
fn emit_document(input: &toml::Value, out: &mut String) {
let table = input.as_table().unwrap();
out.push_str(&table.to_string());
}
fn emit_output(output: &toml::Value, out: &mut String) {
let mut table = output.clone();
let table = table.as_table_mut().unwrap();
// replace transform with task.transforms
if let Some(transform) = table.remove("transform") {
let mut task_table = toml::Table::new();
task_table.insert("transform".to_string(), transform);
table.insert("task".to_string(), task_table.into());
}
out.push_str(&table.to_string());
}
fn emit_route(route: &toml::Value, out: &mut String) {
let table = route.as_table().unwrap();
out.push_str(&table.to_string());
}
}
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned());
let mut lock_file = fs
.open_rw_exclusive_create(LOCK_FILENAME, "project commands")
.context("tinymist.lock")?;
let mut data = vec![];
lock_file.read_to_end(&mut data).context("read lock")?;
let old_data =
std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let mut state = if old_data.trim().is_empty() {
LockFile {
document: vec![],
task: vec![],
route: eco_vec![],
}
} else {
let old_state = toml::from_str::<LockFileCompat>(old_data)
.context_ut("tinymist.lock file is not a valid TOML file")?;
let version = old_state.version()?;
match Version(version).partial_cmp(&Version(LOCK_VERSION)) {
Some(Ordering::Equal | Ordering::Less) => {}
Some(Ordering::Greater) => {
bail!(
"trying to update lock file having a future version, current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}",
);
}
None => {
bail!(
"cannot compare version, are version strings in right format? current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}",
);
}
}
old_state.migrate()?
};
f(&mut state)?;
// todo: for read only operations, we don't have to compare it.
state.sort();
let new_data = state.serialize_resolve();
// If the lock file contents haven't changed so don't rewrite it. This is
// helpful on read-only filesystems.
if old_data == new_data {
return Ok(());
}
// todo: even if cargo, they don't update the lock file atomically. This
// indicates that we may get data corruption if the process is killed
// while writing the lock file. This is sensible because `Cargo.lock` is
// only a "resolved result" of the `Cargo.toml`. Thus, we should inform
// users that don't only persist configuration in the lock file.
lock_file.file().set_len(0).context(LOCK_FILENAME)?;
lock_file.seek(SeekFrom::Start(0)).context(LOCK_FILENAME)?;
lock_file
.write_all(new_data.as_bytes())
.context(LOCK_FILENAME)?;
Ok(())
}
pub fn read(dir: &Path) -> Result<Self> {
let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned());
let mut lock_file = fs
.open_ro_shared(LOCK_FILENAME, "project commands")
.context(LOCK_FILENAME)?;
let mut data = vec![];
lock_file.read_to_end(&mut data).context(LOCK_FILENAME)?;
let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let state = toml::from_str::<LockFileCompat>(data)
.context_ut("tinymist.lock file is not a valid TOML file")?;
state.migrate()
}
}
/// Make a new project lock updater. /// Make a new project lock updater.
pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater { pub fn update_lock(root: ImmutPath) -> LockFileUpdate {
ProjectLockUpdater { LockFileUpdate {
root, root,
updates: vec![], updates: vec![],
} }
@ -24,12 +230,12 @@ enum LockUpdate {
Route(ProjectRoute), Route(ProjectRoute),
} }
pub struct ProjectLockUpdater { pub struct LockFileUpdate {
root: Arc<Path>, root: Arc<Path>,
updates: Vec<LockUpdate>, updates: Vec<LockUpdate>,
} }
impl ProjectLockUpdater { impl LockFileUpdate {
pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> { pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> {
let id = Id::from_world(world)?; let id = Id::from_world(world)?;
@ -143,3 +349,22 @@ impl ProjectLockUpdater {
} }
} }
} }
struct Version<'a>(&'a str);
impl PartialEq for Version<'_> {
fn eq(&self, other: &Self) -> bool {
semver::Version::parse(self.0)
.ok()
.and_then(|a| semver::Version::parse(other.0).ok().map(|b| a == b))
.unwrap_or(false)
}
}
impl PartialOrd for Version<'_> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
let lhs = semver::Version::parse(self.0).ok()?;
let rhs = semver::Version::parse(other.0).ok()?;
Some(lhs.cmp(&rhs))
}
}

View file

@ -1,11 +1,12 @@
use core::fmt; use core::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::io::{Read, Seek, SeekFrom, Write}; use std::num::NonZeroUsize;
use std::ops::RangeInclusive;
use std::path::PathBuf; use std::path::PathBuf;
use std::{cmp::Ordering, path::Path, str::FromStr}; use std::{path::Path, str::FromStr};
use clap::ValueHint; use clap::ValueEnum;
use ecow::{eco_vec, EcoVec}; use ecow::EcoVec;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinymist_std::error::prelude::*; use tinymist_std::error::prelude::*;
use tinymist_std::path::unix_slash; use tinymist_std::path::unix_slash;
@ -19,258 +20,8 @@ pub use task::*;
use crate::LspWorld; use crate::LspWorld;
use super::{Pages, PdfStandard, TaskWhen}; /// The currently using lock file version.
pub const LOCK_VERSION: &str = "0.1.0-beta0";
pub const LOCK_FILENAME: &str = "tinymist.lock";
const LOCK_VERSION: &str = "0.1.0-beta0";
pub const PROJECT_ROUTE_USER_ACTION_PRIORITY: u32 = 256;
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "version")]
pub enum LockFileCompat {
#[serde(rename = "0.1.0-beta0")]
Version010Beta0(LockFile),
#[serde(untagged)]
Other(serde_json::Value),
}
impl LockFileCompat {
pub fn version(&self) -> Result<&str> {
match self {
LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION),
LockFileCompat::Other(v) => v
.get("version")
.and_then(|v| v.as_str())
.context("missing version field"),
}
}
pub fn migrate(self) -> Result<LockFile> {
match self {
LockFileCompat::Version010Beta0(v) => Ok(v),
this @ LockFileCompat::Other(..) => {
bail!(
"cannot migrate from version: {}",
this.version().unwrap_or("unknown version")
)
}
}
}
}
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct LockFile {
// The lock file version.
// version: String,
/// The project's document (input).
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub document: Vec<ProjectInput>,
/// The project's task (output).
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub task: Vec<ProjectTask>,
/// The project's task route.
#[serde(skip_serializing_if = "EcoVec::is_empty", default)]
pub route: EcoVec<ProjectRoute>,
}
impl LockFile {
pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> {
self.document.iter().find(|i| &i.id == id)
}
pub fn replace_document(&mut self, input: ProjectInput) {
let id = input.id.clone();
let index = self.document.iter().position(|i| i.id == id);
if let Some(index) = index {
self.document[index] = input;
} else {
self.document.push(input);
}
}
pub fn replace_task(&mut self, task: ProjectTask) {
let id = task.id().clone();
let index = self.task.iter().position(|i| *i.id() == id);
if let Some(index) = index {
self.task[index] = task;
} else {
self.task.push(task);
}
}
pub fn replace_route(&mut self, route: ProjectRoute) {
let id = route.id.clone();
self.route.retain(|i| i.id != id);
self.route.push(route);
}
pub fn sort(&mut self) {
self.document.sort_by(|a, b| a.id.cmp(&b.id));
self.task
.sort_by(|a, b| a.doc_id().cmp(b.doc_id()).then_with(|| a.id().cmp(b.id())));
// the route's order is important, so we don't sort them.
}
pub fn serialize_resolve(&self) -> String {
let content = toml::Table::try_from(self).unwrap();
let mut out = String::new();
// At the start of the file we notify the reader that the file is generated.
// Specifically Phabricator ignores files containing "@generated", so we use
// that.
let marker_line = "# This file is automatically @generated by tinymist.";
let extra_line = "# It is not intended for manual editing.";
out.push_str(marker_line);
out.push('\n');
out.push_str(extra_line);
out.push('\n');
out.push_str(&format!("version = {LOCK_VERSION:?}\n"));
let document = content.get("document");
if let Some(document) = document {
for document in document.as_array().unwrap() {
out.push('\n');
out.push_str("[[document]]\n");
emit_document(document, &mut out);
}
}
let route = content.get("route");
if let Some(route) = route {
for route in route.as_array().unwrap() {
out.push('\n');
out.push_str("[[route]]\n");
emit_route(route, &mut out);
}
}
let task = content.get("task");
if let Some(task) = task {
for task in task.as_array().unwrap() {
out.push('\n');
out.push_str("[[task]]\n");
emit_output(task, &mut out);
}
}
return out;
fn emit_document(input: &toml::Value, out: &mut String) {
let table = input.as_table().unwrap();
out.push_str(&table.to_string());
}
fn emit_output(output: &toml::Value, out: &mut String) {
let mut table = output.clone();
let table = table.as_table_mut().unwrap();
// replace transform with task.transforms
if let Some(transform) = table.remove("transform") {
let mut task_table = toml::Table::new();
task_table.insert("transform".to_string(), transform);
table.insert("task".to_string(), task_table.into());
}
out.push_str(&table.to_string());
}
fn emit_route(route: &toml::Value, out: &mut String) {
let table = route.as_table().unwrap();
out.push_str(&table.to_string());
}
}
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned());
let mut lock_file = fs
.open_rw_exclusive_create(LOCK_FILENAME, "project commands")
.context("tinymist.lock")?;
let mut data = vec![];
lock_file.read_to_end(&mut data).context("read lock")?;
let old_data =
std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let mut state = if old_data.trim().is_empty() {
LockFile {
document: vec![],
task: vec![],
route: eco_vec![],
}
} else {
let old_state = toml::from_str::<LockFileCompat>(old_data)
.context_ut("tinymist.lock file is not a valid TOML file")?;
let version = old_state.version()?;
match Version(version).partial_cmp(&Version(LOCK_VERSION)) {
Some(Ordering::Equal | Ordering::Less) => {}
Some(Ordering::Greater) => {
bail!(
"trying to update lock file having a future version, current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}",
);
}
None => {
bail!(
"cannot compare version, are version strings in right format? current tinymist-cli supports {LOCK_VERSION}, the lock file is {version}",
);
}
}
old_state.migrate()?
};
f(&mut state)?;
// todo: for read only operations, we don't have to compare it.
state.sort();
let new_data = state.serialize_resolve();
// If the lock file contents haven't changed so don't rewrite it. This is
// helpful on read-only filesystems.
if old_data == new_data {
return Ok(());
}
// todo: even if cargo, they don't update the lock file atomically. This
// indicates that we may get data corruption if the process is killed
// while writing the lock file. This is sensible because `Cargo.lock` is
// only a "resolved result" of the `Cargo.toml`. Thus, we should inform
// users that don't only persist configuration in the lock file.
lock_file.file().set_len(0).context(LOCK_FILENAME)?;
lock_file.seek(SeekFrom::Start(0)).context(LOCK_FILENAME)?;
lock_file
.write_all(new_data.as_bytes())
.context(LOCK_FILENAME)?;
Ok(())
}
pub fn read(dir: &Path) -> Result<Self> {
let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned());
let mut lock_file = fs
.open_ro_shared(LOCK_FILENAME, "project commands")
.context(LOCK_FILENAME)?;
let mut data = vec![];
lock_file.read_to_end(&mut data).context(LOCK_FILENAME)?;
let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let state = toml::from_str::<LockFileCompat>(data)
.context_ut("tinymist.lock file is not a valid TOML file")?;
state.migrate()
}
}
/// A scalar that is not NaN. /// A scalar that is not NaN.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
@ -320,10 +71,12 @@ impl Ord for Scalar {
pub struct Id(String); pub struct Id(String);
impl Id { impl Id {
/// Creates a new project Id.
pub fn new(s: String) -> Self { pub fn new(s: String) -> Self {
Id(s) Id(s)
} }
/// Creates a new project Id from a world.
pub fn from_world(world: &LspWorld) -> Option<Self> { pub fn from_world(world: &LspWorld) -> Option<Self> {
let entry = world.entry_state(); let entry = world.entry_state();
let id = unix_slash(entry.main()?.vpath().as_rootless_path()); let id = unix_slash(entry.main()?.vpath().as_rootless_path());
@ -339,35 +92,173 @@ impl fmt::Display for Id {
} }
} }
/// The id of a document. macro_rules! display_possible_values {
/// ($ty:ty) => {
/// If an identifier is not provided, the document's path is used as the id. impl fmt::Display for $ty {
#[derive(Debug, Clone, clap::Parser)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
pub struct DocIdArgs { self.to_possible_value()
/// Give a name to the document. .expect("no values are skipped")
#[clap(long = "name")] .get_name()
pub name: Option<String>, .fmt(f)
/// Path to input Typst file. }
#[clap(value_hint = ValueHint::FilePath)] }
pub input: String, };
} }
impl From<&ResourcePath> for Id { /// When to export an output file.
fn from(value: &ResourcePath) -> Self { ///
Id(value.to_string()) /// By default, a `tinymist compile` only provides input information and
/// doesn't change the `when` field. However, you can still specify a `when`
/// argument to override the default behavior for specific tasks.
///
/// ## Examples
///
/// ```bash
/// tinymist compile --when onSave main.typ
/// alias typst="tinymist compile --when=onSave"
/// typst compile main.typ
/// ```
#[derive(
Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, ValueEnum, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
#[clap(rename_all = "camelCase")]
pub enum TaskWhen {
/// Never watch to run task.
Never,
/// Run task on save.
OnSave,
/// Run task on type.
OnType,
/// *DEPRECATED* Run task when a document has a title and on saved, which is
/// useful to filter out template files.
///
/// Note: this is deprecating.
OnDocumentHasTitle,
}
impl TaskWhen {
/// Returns `true` if the task should never be run automatically.
pub fn is_never(&self) -> bool {
matches!(self, TaskWhen::Never)
} }
} }
impl From<&DocIdArgs> for Id { display_possible_values!(TaskWhen);
fn from(args: &DocIdArgs) -> Self {
if let Some(id) = &args.name { /// Which format to use for the generated output file.
Id(id.clone()) #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
} else { pub enum OutputFormat {
(&ResourcePath::from_user_sys(Path::new(&args.input))).into() /// Export to PDF.
Pdf,
/// Export to PNG.
Png,
/// Export to SVG.
Svg,
/// Export to HTML.
Html,
}
display_possible_values!(OutputFormat);
/// A PDF standard that Typst can enforce conformance with.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.7.
#[value(name = "1.7")]
#[serde(rename = "1.7")]
V_1_7,
/// PDF/A-2b.
#[value(name = "a-2b")]
#[serde(rename = "a-2b")]
A_2b,
}
display_possible_values!(PdfStandard);
/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the
/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a
/// value parser, in order to generate better errors.
///
/// See also: <https://github.com/clap-rs/clap/issues/5065>
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
impl FromStr for Pages {
type Err = &'static str;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value
.split('-')
.map(str::trim)
.collect::<Vec<_>>()
.as_slice()
{
[] | [""] => Err("page export range must not be empty"),
[single_page] => {
let page_number = parse_page_number(single_page)?;
Ok(Pages(Some(page_number)..=Some(page_number)))
}
["", ""] => Err("page export range must have start or end"),
[start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)),
["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))),
[start, end] => {
let start = parse_page_number(start)?;
let end = parse_page_number(end)?;
if start > end {
Err("page export range must end at a page after the start")
} else {
Ok(Pages(Some(start)..=Some(end)))
}
}
[_, _, _, ..] => Err("page export range must have a single hyphen"),
} }
} }
} }
impl fmt::Display for Pages {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let start = match self.0.start() {
Some(start) => start.to_string(),
None => String::from(""),
};
let end = match self.0.end() {
Some(end) => end.to_string(),
None => String::from(""),
};
write!(f, "{start}-{end}")
}
}
impl serde::Serialize for Pages {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> serde::Deserialize<'de> for Pages {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let value = String::deserialize(deserializer)?;
value.parse().map_err(serde::de::Error::custom)
}
}
/// Parses a single page number.
fn parse_page_number(value: &str) -> Result<NonZeroUsize, &'static str> {
if value == "0" {
Err("page numbers start at one")
} else {
NonZeroUsize::from_str(value).map_err(|_| "not a valid page number")
}
}
/// A resource path. /// A resource path.
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ResourcePath(EcoString, String); pub struct ResourcePath(EcoString, String);
@ -413,6 +304,7 @@ impl<'de> serde::Deserialize<'de> for ResourcePath {
} }
impl ResourcePath { impl ResourcePath {
/// Creates a new resource path from a user passing system path.
pub fn from_user_sys(inp: &Path) -> Self { pub fn from_user_sys(inp: &Path) -> Self {
let rel = if inp.is_relative() { let rel = if inp.is_relative() {
inp.to_path_buf() inp.to_path_buf()
@ -423,7 +315,7 @@ impl ResourcePath {
let rel = unix_slash(&rel); let rel = unix_slash(&rel);
ResourcePath("file".into(), rel.to_string()) ResourcePath("file".into(), rel.to_string())
} }
/// Creates a new resource path from a file id.
pub fn from_file_id(id: FileId) -> Self { pub fn from_file_id(id: FileId) -> Self {
let package = id.package(); let package = id.package();
match package { match package {
@ -437,7 +329,7 @@ impl ResourcePath {
), ),
} }
} }
/// Converts the resource path to an absolute file system path.
pub fn to_abs_path(&self, rel: &Path) -> Option<PathBuf> { pub fn to_abs_path(&self, rel: &Path) -> Option<PathBuf> {
if self.0 == "file" { if self.0 == "file" {
let path = Path::new(&self.1); let path = Path::new(&self.1);
@ -452,6 +344,60 @@ impl ResourcePath {
} }
} }
/// A lock file compatibility wrapper.
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "version")]
pub enum LockFileCompat {
/// The lock file schema with version 0.1.0-beta0.
#[serde(rename = "0.1.0-beta0")]
Version010Beta0(LockFile),
/// Other lock file schema.
#[serde(untagged)]
Other(serde_json::Value),
}
impl LockFileCompat {
/// Returns the lock file version.
pub fn version(&self) -> Result<&str> {
match self {
LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION),
LockFileCompat::Other(v) => v
.get("version")
.and_then(|v| v.as_str())
.context("missing version field"),
}
}
/// Migrates the lock file to the current version.
pub fn migrate(self) -> Result<LockFile> {
match self {
LockFileCompat::Version010Beta0(v) => Ok(v),
this @ LockFileCompat::Other(..) => {
bail!(
"cannot migrate from version: {}",
this.version().unwrap_or("unknown version")
)
}
}
}
}
/// A lock file storing project information.
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct LockFile {
// The lock file version.
// version: String,
/// The project's document (input).
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub document: Vec<ProjectInput>,
/// The project's task (output).
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub task: Vec<ProjectTask>,
/// The project's task route.
#[serde(skip_serializing_if = "EcoVec::is_empty", default)]
pub route: EcoVec<ProjectRoute>,
}
/// A project input specifier. /// A project input specifier.
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
@ -503,6 +449,8 @@ pub struct ProjectPathMaterial {
} }
impl ProjectPathMaterial { impl ProjectPathMaterial {
/// Creates a new project path material from a document ID and a list of
/// files.
pub fn from_deps(doc_id: Id, files: EcoVec<ImmutPath>) -> Self { pub fn from_deps(doc_id: Id, files: EcoVec<ImmutPath>) -> Self {
let mut files: Vec<_> = files.into_iter().map(|p| p.as_ref().to_owned()).collect(); let mut files: Vec<_> = files.into_iter().map(|p| p.as_ref().to_owned()).collect();
files.sort(); files.sort();
@ -524,22 +472,3 @@ pub struct ProjectRoute {
/// The priority of the project. (lower numbers are higher priority). /// The priority of the project. (lower numbers are higher priority).
pub priority: u32, pub priority: u32,
} }
struct Version<'a>(&'a str);
impl PartialEq for Version<'_> {
fn eq(&self, other: &Self) -> bool {
semver::Version::parse(self.0)
.ok()
.and_then(|a| semver::Version::parse(other.0).ok().map(|b| a == b))
.unwrap_or(false)
}
}
impl PartialOrd for Version<'_> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
let lhs = semver::Version::parse(self.0).ok()?;
let rhs = semver::Version::parse(other.0).ok()?;
Some(lhs.cmp(&rhs))
}
}

View file

@ -1,13 +1,37 @@
//! Project task models.
use std::hash::Hash; use std::hash::Hash;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinymist_derive::toml_model;
use super::{Id, Pages, PdfStandard, Scalar, TaskWhen}; use super::{Id, Pages, PdfStandard, Scalar, TaskWhen};
/// A project task specifier. /// A project task specifier. This is used for specifying tasks in a project.
#[toml_model] /// When the language service notifies an update event of the project, it will
#[serde(tag = "type")] /// check whether any associated tasks need to be run.
///
/// Each task can have different timing and conditions for running. See
/// [`TaskWhen`] for more information.
///
/// The available task types listed in the [`ProjectTask`] only represent the
/// direct formats supported by the typst compiler. More task types can be
/// customized by the [`ExportTransform`].
///
/// ## Examples
///
/// Export a JSON file with the pdfpc notes of the document:
///
/// ```bash
/// tinymist project query main.typ --format json --selector "<pdfpc-notes>" --field value --one
/// ```
///
/// Export a PDF file and then runs a ghostscript command to compress it:
///
/// ```bash
/// tinymist project compile main.typ --pipe 'import "@local/postprocess:0.0.1": ghostscript; ghostscript(output.path)'
/// ```
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub enum ProjectTask { pub enum ProjectTask {
/// A preview task. /// A preview task.
Preview(PreviewTask), Preview(PreviewTask),
@ -46,7 +70,7 @@ impl ProjectTask {
} }
} }
/// Returns the task's ID. /// Returns the document's ID.
pub fn id(&self) -> &Id { pub fn id(&self) -> &Id {
match self { match self {
ProjectTask::Preview(task) => &task.id, ProjectTask::Preview(task) => &task.id,
@ -62,23 +86,26 @@ impl ProjectTask {
} }
} }
/// An lsp task specifier. /// A preview task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct PreviewTask { pub struct PreviewTask {
/// The task's ID. /// The task's ID.
pub id: Id, pub id: Id,
/// The doc's ID. /// The document's ID.
pub document: Id, pub document: Id,
/// When to run the task /// When to run the task. See [`TaskWhen`] for more
/// information.
pub when: TaskWhen, pub when: TaskWhen,
} }
/// An export task specifier. /// An export task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportTask { pub struct ExportTask {
/// The task's ID. /// The task's ID.
pub id: Id, pub id: Id,
/// The doc's ID. /// The document's ID.
pub document: Id, pub document: Id,
/// When to run the task /// When to run the task
pub when: TaskWhen, pub when: TaskWhen,
@ -88,7 +115,8 @@ pub struct ExportTask {
} }
/// A project export transform specifier. /// A project export transform specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum ExportTransform { pub enum ExportTransform {
/// Only pick a subset of pages. /// Only pick a subset of pages.
Pages { Pages {
@ -100,27 +128,35 @@ pub enum ExportTransform {
/// The gap between pages (in pt). /// The gap between pages (in pt).
gap: Scalar, gap: Scalar,
}, },
/// Execute a transform script.
Script {
/// The postprocess script (typst script) to run.
#[serde(skip_serializing_if = "Option::is_none", default)]
script: Option<String>,
},
/// Uses a pretty printer to format the output. /// Uses a pretty printer to format the output.
Pretty { Pretty {
/// The pretty printer id provided by editor. /// The pretty command (typst script) to run.
///
/// If not provided, the default pretty printer will be used. /// If not provided, the default pretty printer will be used.
/// Note: the builtin one may be only effective for json outputs. /// Note: the builtin one may be only effective for json outputs.
#[serde(skip_serializing_if = "Option::is_none", default)] #[serde(skip_serializing_if = "Option::is_none", default)]
id: Option<String>, script: Option<String>,
}, },
} }
/// An export pdf task specifier. /// An export pdf task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportPdfTask { pub struct ExportPdfTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
/// The pdf standards. /// One (or multiple comma-separated) PDF standards that Typst will enforce
/// conformance with.
#[serde(skip_serializing_if = "Vec::is_empty", default)] #[serde(skip_serializing_if = "Vec::is_empty", default)]
pub pdf_standards: Vec<PdfStandard>, pub pdf_standards: Vec<PdfStandard>,
/// The document's creation date formatted as a UNIX timestamp (in second /// The document's creation date formatted as a UNIX timestamp (in seconds).
/// unit).
/// ///
/// For more information, see <https://reproducible-builds.org/specs/source-date-epoch/>. /// For more information, see <https://reproducible-builds.org/specs/source-date-epoch/>.
#[serde(skip_serializing_if = "Option::is_none", default)] #[serde(skip_serializing_if = "Option::is_none", default)]
@ -128,14 +164,15 @@ pub struct ExportPdfTask {
} }
/// An export png task specifier. /// An export png task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportPngTask { pub struct ExportPngTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
/// The PPI (pixels per inch) to use for PNG export. /// The PPI (pixels per inch) to use for PNG export.
pub ppi: Scalar, pub ppi: Scalar,
/// The background fill color (in typst script). /// The expression constructing background fill color (in typst script).
/// e.g. `#ffffff`, `#000000`, `rgba(255, 255, 255, 0.5)`. /// e.g. `#ffffff`, `#000000`, `rgba(255, 255, 255, 0.5)`.
/// ///
/// If not provided, the default background color specified in the document /// If not provided, the default background color specified in the document
@ -145,46 +182,52 @@ pub struct ExportPngTask {
} }
/// An export svg task specifier. /// An export svg task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportSvgTask { pub struct ExportSvgTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
} }
/// An export html task specifier. /// An export html task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportHtmlTask { pub struct ExportHtmlTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
} }
/// An export markdown task specifier. /// An export markdown task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportMarkdownTask { pub struct ExportMarkdownTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
} }
/// An export text task specifier. /// An export text task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportTextTask { pub struct ExportTextTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
} }
/// An export query task specifier. /// An export query task specifier.
#[toml_model] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct QueryTask { pub struct QueryTask {
/// The shared export arguments /// The shared export arguments.
#[serde(flatten)] #[serde(flatten)]
pub export: ExportTask, pub export: ExportTask,
/// The format to serialize in. Can be `json`, `yaml`, or `txt`, /// The format to serialize in. Can be `json`, `yaml`, or `txt`,
pub format: String, pub format: String,
/// Specify a different output extension than the format. /// Uses a different output extension from the one inferring from the
/// [`Self::format`].
pub output_extension: String, pub output_extension: String,
/// Defines which elements to retrieve. /// Defines which elements to retrieve.
pub selector: String, pub selector: String,

View file

@ -513,6 +513,7 @@ fn log_send_error<T>(chan: &'static str, res: Result<(), mpsc::error::SendError<
.is_ok() .is_ok()
} }
/// Watches on a set of *files*.
pub async fn watch_deps( pub async fn watch_deps(
inbox: mpsc::UnboundedReceiver<NotifyMessage>, inbox: mpsc::UnboundedReceiver<NotifyMessage>,
interrupted_by_events: impl FnMut(FilesystemEvent) + Send + Sync + 'static, interrupted_by_events: impl FnMut(FilesystemEvent) + Send + Sync + 'static,

View file

@ -1,36 +1,34 @@
//! World implementation of typst for tinymist. //! World implementation of typst for tinymist.
pub use tinymist_std::error::prelude;
pub use tinymist_world as base; pub use tinymist_world as base;
pub use tinymist_world::args::*; pub use tinymist_world::args::*;
pub use tinymist_world::config::CompileFontOpts; pub use tinymist_world::config::CompileFontOpts;
use tinymist_world::package::RegistryPathMapper; pub use tinymist_world::entry::*;
pub use tinymist_world::vfs; pub use tinymist_world::{font, package, vfs};
pub use tinymist_world::{entry::*, EntryOpts, EntryState};
pub use tinymist_world::{ pub use tinymist_world::{
font, package, CompilerUniverse, CompilerWorld, RevisingUniverse, TaskInputs, CompilerUniverse, CompilerWorld, EntryOpts, EntryState, RevisingUniverse, TaskInputs,
}; };
use std::path::Path; use std::path::Path;
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};
use ::typst::utils::LazyHash;
use tinymist_std::error::prelude::*; use tinymist_std::error::prelude::*;
use tinymist_std::ImmutPath; use tinymist_std::ImmutPath;
use tinymist_world::font::system::SystemFontSearcher; use tinymist_world::font::system::SystemFontSearcher;
use tinymist_world::package::http::HttpRegistry; use tinymist_world::package::{http::HttpRegistry, RegistryPathMapper};
use tinymist_world::vfs::{system::SystemAccessModel, Vfs}; use tinymist_world::vfs::{system::SystemAccessModel, Vfs};
use tinymist_world::CompilerFeat; use tinymist_world::CompilerFeat;
use typst::foundations::{Dict, Str, Value}; use typst::foundations::{Dict, Str, Value};
use typst::utils::LazyHash;
use crate::font::TinymistFontResolver; use crate::font::TinymistFontResolver;
/// Compiler feature for LSP universe and worlds without typst.ts to implement /// Compiler feature for LSP universe and worlds without typst.ts to implement
/// more for tinymist. type trait of [`CompilerUniverse`]. /// more for tinymist. type trait of [`CompilerUniverse`].
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct SystemCompilerFeatExtend; pub struct LspCompilerFeat;
impl CompilerFeat for SystemCompilerFeatExtend { impl CompilerFeat for LspCompilerFeat {
/// Uses [`TinymistFontResolver`] directly. /// Uses [`TinymistFontResolver`] directly.
type FontResolver = TinymistFontResolver; type FontResolver = TinymistFontResolver;
/// It accesses a physical file system. /// It accesses a physical file system.
@ -39,11 +37,14 @@ impl CompilerFeat for SystemCompilerFeatExtend {
type Registry = HttpRegistry; type Registry = HttpRegistry;
} }
/// The compiler universe in system environment. /// LSP universe that spawns LSP worlds.
pub type TypstSystemUniverseExtend = CompilerUniverse<SystemCompilerFeatExtend>; pub type LspUniverse = CompilerUniverse<LspCompilerFeat>;
/// The compiler world in system environment. /// LSP world that holds compilation resources
pub type TypstSystemWorldExtend = CompilerWorld<SystemCompilerFeatExtend>; pub type LspWorld = CompilerWorld<LspCompilerFeat>;
/// Immutable prehashed reference to dictionary.
pub type ImmutDict = Arc<LazyHash<Dict>>;
/// World provider for LSP universe and worlds.
pub trait WorldProvider { pub trait WorldProvider {
/// Get the entry options from the arguments. /// Get the entry options from the arguments.
fn entry(&self) -> Result<EntryOpts>; fn entry(&self) -> Result<EntryOpts>;
@ -113,15 +114,6 @@ impl WorldProvider for CompileOnceArgs {
} }
} }
/// Compiler feature for LSP universe and worlds.
pub type LspCompilerFeat = SystemCompilerFeatExtend;
/// LSP universe that spawns LSP worlds.
pub type LspUniverse = TypstSystemUniverseExtend;
/// LSP world.
pub type LspWorld = TypstSystemWorldExtend;
/// Immutable prehashed reference to dictionary.
pub type ImmutDict = Arc<LazyHash<Dict>>;
/// Builder for LSP universe. /// Builder for LSP universe.
pub struct LspUniverseBuilder; pub struct LspUniverseBuilder;

View file

@ -1,10 +1,8 @@
use std::borrow::{Borrow, Cow}; use std::borrow::{Borrow, Cow};
use serde::{Deserializer, Serializer}; use serde::{Deserializer, Serializer};
use serde_with::{ use serde_with::base64::{Base64, Standard};
base64::{Base64, Standard}, use serde_with::formats::Padded;
formats::Padded,
};
use serde_with::{DeserializeAs, SerializeAs}; use serde_with::{DeserializeAs, SerializeAs};
/// A marker type for serializing and deserializing `Cow<[u8]>` as base64. /// A marker type for serializing and deserializing `Cow<[u8]>` as base64.

View file

@ -13,11 +13,12 @@ use std::io;
use std::io::{Read, Seek, SeekFrom, Write}; use std::io::{Read, Seek, SeekFrom, Write};
use std::path::{Display, Path, PathBuf}; use std::path::{Display, Path, PathBuf};
use self::sys::*;
use super::paths;
use anyhow::Context as _; use anyhow::Context as _;
use anyhow::Result; use anyhow::Result;
use self::sys::*;
use super::paths;
/// A locked file. /// A locked file.
/// ///
/// This provides access to file while holding a lock on the file. This type /// This provides access to file while holding a lock on the file. This type

View file

@ -1,8 +1,7 @@
//! Cross platform time utilities. //! Cross platform time utilities.
pub use std::time::SystemTime as Time; pub use std::time::SystemTime as Time;
pub use web_time::Duration; pub use web_time::{Duration, Instant};
pub use web_time::Instant;
/// Returns the current system time (UTC+0). /// Returns the current system time (UTC+0).
#[cfg(any(feature = "system", feature = "web"))] #[cfg(any(feature = "system", feature = "web"))]

View file

@ -471,8 +471,6 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
} }
/// Reset the shadowing files in [`OverlayAccessModel`]. /// Reset the shadowing files in [`OverlayAccessModel`].
///
/// Note: This function is independent from [`Vfs::reset`].
pub fn reset_shadow(&mut self) { pub fn reset_shadow(&mut self) {
for path in self.am().inner.inner.file_paths() { for path in self.am().inner.inner.file_paths() {
self.invalidate_path(&path); self.invalidate_path(&path);

View file

@ -1,9 +1,9 @@
use std::{fs::File, io::Read, path::Path}; use std::{fs::File, io::Read, path::Path};
use tinymist_std::ReadAllOnce;
use typst::diag::{FileError, FileResult}; use typst::diag::{FileError, FileResult};
use crate::{Bytes, PathAccessModel}; use crate::{Bytes, PathAccessModel};
use tinymist_std::ReadAllOnce;
/// Provides SystemAccessModel that makes access to the local file system for /// Provides SystemAccessModel that makes access to the local file system for
/// system compilation. /// system compilation.

View file

@ -11,11 +11,11 @@
//! ``` //! ```
//! //!
//! We use typst by creating a [`ProjectCompiler`] and //! We use typst by creating a [`ProjectCompiler`] and
//! running compiler with callbacking [`LspProjectHandler`] incrementally. An //! running compiler with callbacking [`CompileHandlerImpl`] incrementally. An
//! additional [`LocalCompileHandler`] is also created to control the //! additional [`LocalCompileHandler`] is also created to control the
//! [`ProjectCompiler`]. //! [`ProjectCompiler`].
//! //!
//! The [`LspProjectHandler`] will push information to other actors. //! The [`CompileHandlerImpl`] will push information to other actors.
#![allow(missing_docs)] #![allow(missing_docs)]
@ -82,7 +82,7 @@ impl LspPreviewState {
#[derive(Default)] #[derive(Default)]
pub struct ProjectStateExt { pub struct ProjectStateExt {
pub is_compiling: bool, pub is_compiling: bool,
pub last_compilation: Option<CompiledArtifact<LspCompilerFeat>>, pub last_compilation: Option<LspCompiledArtifact>,
} }
/// LSP project compiler. /// LSP project compiler.
@ -309,7 +309,7 @@ impl CompileHandler<LspCompilerFeat, ProjectStateExt> for CompileHandlerImpl {
} }
} }
fn notify_compile(&self, snap: &CompiledArtifact<LspCompilerFeat>, rep: CompileReport) { fn notify_compile(&self, snap: &LspCompiledArtifact, rep: CompileReport) {
// todo: we need to manage the revision for fn status() as well // todo: we need to manage the revision for fn status() as well
{ {
let mut n_rev = self.notified_revision.lock(); let mut n_rev = self.notified_revision.lock();
@ -361,12 +361,12 @@ pub struct QuerySnapWithStat {
} }
pub struct WorldSnapFut { pub struct WorldSnapFut {
rx: oneshot::Receiver<CompileSnapshot<LspCompilerFeat>>, rx: oneshot::Receiver<LspCompileSnapshot>,
} }
impl WorldSnapFut { impl WorldSnapFut {
/// wait for the snapshot to be ready /// wait for the snapshot to be ready
pub async fn receive(self) -> Result<CompileSnapshot<LspCompilerFeat>> { pub async fn receive(self) -> Result<LspCompileSnapshot> {
self.rx self.rx
.await .await
.map_err(map_string_err("failed to get snapshot")) .map_err(map_string_err("failed to get snapshot"))
@ -392,13 +392,13 @@ impl QuerySnapFut {
} }
pub struct QuerySnap { pub struct QuerySnap {
pub snap: CompileSnapshot<LspCompilerFeat>, pub snap: LspCompileSnapshot,
analysis: Arc<Analysis>, analysis: Arc<Analysis>,
rev_lock: AnalysisRevLock, rev_lock: AnalysisRevLock,
} }
impl std::ops::Deref for QuerySnap { impl std::ops::Deref for QuerySnap {
type Target = CompileSnapshot<LspCompilerFeat>; type Target = LspCompileSnapshot;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.snap &self.snap

View file

@ -2,9 +2,7 @@ use std::{path::Path, sync::Arc};
use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash}; use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash};
use rpds::RedBlackTreeMapSync; use rpds::RedBlackTreeMapSync;
use tinymist_project::{ use tinymist_project::{Id, LockFile, LspCompileSnapshot, ProjectPathMaterial, ProjectRoute};
CompileSnapshot, Id, LockFile, LspCompilerFeat, ProjectPathMaterial, ProjectRoute,
};
use tinymist_query::LspWorldExt; use tinymist_query::LspWorldExt;
use tinymist_std::{hash::FxHashMap, ImmutPath}; use tinymist_std::{hash::FxHashMap, ImmutPath};
use typst::diag::EcoString; use typst::diag::EcoString;
@ -94,7 +92,7 @@ impl ProjectRouteState {
pub fn update_existing_material( pub fn update_existing_material(
&mut self, &mut self,
lock_dir: ImmutPath, lock_dir: ImmutPath,
snap: &CompileSnapshot<LspCompilerFeat>, snap: &LspCompileSnapshot,
) -> Option<()> { ) -> Option<()> {
let path_route = self.path_routes.get_mut(&lock_dir)?; let path_route = self.path_routes.get_mut(&lock_dir)?;

View file

@ -11,7 +11,6 @@ use lsp_server::RequestId;
use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration}; use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration};
use lsp_types::*; use lsp_types::*;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use prelude::*;
use project::world::EntryState; use project::world::EntryState;
use project::{watch_deps, LspPreviewState}; use project::{watch_deps, LspPreviewState};
use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut}; use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut};
@ -24,7 +23,7 @@ use sync_lsp::*;
use task::{ use task::{
ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask, ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask,
}; };
use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId, ProjectResolutionKind}; use tinymist_project::{EntryResolver, LspCompileSnapshot, ProjectInsId, ProjectResolutionKind};
use tinymist_query::analysis::{Analysis, PeriscopeProvider}; use tinymist_query::analysis::{Analysis, PeriscopeProvider};
use tinymist_query::{ use tinymist_query::{
to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature, to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature,
@ -32,7 +31,8 @@ use tinymist_query::{
ServerInfoResponse, SyntaxRequest, VersionedDocument, ServerInfoResponse, SyntaxRequest, VersionedDocument,
}; };
use tinymist_render::PeriscopeRenderer; use tinymist_render::PeriscopeRenderer;
use tinymist_std::{Error, ImmutPath}; use tinymist_std::error::prelude::*;
use tinymist_std::ImmutPath;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use typst::layout::Position as TypstPosition; use typst::layout::Position as TypstPosition;
use typst::{diag::FileResult, syntax::Source}; use typst::{diag::FileResult, syntax::Source};
@ -844,7 +844,7 @@ impl LanguageState {
impl LanguageState { impl LanguageState {
/// Focus main file to some path. /// Focus main file to some path.
pub fn change_entry(&mut self, path: Option<ImmutPath>) -> Result<bool, Error> { pub fn change_entry(&mut self, path: Option<ImmutPath>) -> Result<bool> {
if path if path
.as_deref() .as_deref()
.is_some_and(|p| !p.is_absolute() && !p.starts_with("/untitled")) .is_some_and(|p| !p.is_absolute() && !p.starts_with("/untitled"))
@ -865,7 +865,7 @@ impl LanguageState {
} }
/// Pin the entry to the given path /// Pin the entry to the given path
pub fn pin_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<(), Error> { pub fn pin_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<()> {
self.pinning = new_entry.is_some(); self.pinning = new_entry.is_some();
let entry = new_entry let entry = new_entry
.or_else(|| self.entry_resolver().resolve_default()) .or_else(|| self.entry_resolver().resolve_default())
@ -874,7 +874,7 @@ impl LanguageState {
} }
/// Updates the primary (focusing) entry /// Updates the primary (focusing) entry
pub fn focus_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<bool, Error> { pub fn focus_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<bool> {
if self.pinning || self.config.compile.has_default_entry_path { if self.pinning || self.config.compile.has_default_entry_path {
self.focusing = new_entry; self.focusing = new_entry;
return Ok(false); return Ok(false);
@ -1063,7 +1063,7 @@ impl LanguageState {
let lock_dir = self.compile_config().entry_resolver.resolve_lock(&entry); let lock_dir = self.compile_config().entry_resolver.resolve_lock(&entry);
let update_dep = lock_dir.clone().map(|lock_dir| { let update_dep = lock_dir.clone().map(|lock_dir| {
|snap: CompileSnapshot<LspCompilerFeat>| async move { |snap: LspCompileSnapshot| async move {
let mut updater = update_lock(lock_dir); let mut updater = update_lock(lock_dir);
let world = snap.world.clone(); let world = snap.world.clone();
let doc_id = updater.compiled(&world)?; let doc_id = updater.compiled(&world)?;
@ -1287,14 +1287,14 @@ impl PeriscopeProvider for TypstPeriscopeProvider {
} }
impl LanguageState { impl LanguageState {
fn update_source(&mut self, files: FileChangeSet) -> Result<(), Error> { fn update_source(&mut self, files: FileChangeSet) -> Result<()> {
self.add_memory_changes(MemoryEvent::Update(files.clone())); self.add_memory_changes(MemoryEvent::Update(files.clone()));
Ok(()) Ok(())
} }
/// Create a new source file. /// Create a new source file.
pub fn create_source(&mut self, path: PathBuf, content: String) -> Result<(), Error> { pub fn create_source(&mut self, path: PathBuf, content: String) -> Result<()> {
let path: ImmutPath = path.into(); let path: ImmutPath = path.into();
log::info!("create source: {path:?}"); log::info!("create source: {path:?}");
@ -1314,7 +1314,7 @@ impl LanguageState {
} }
/// Remove a source file. /// Remove a source file.
pub fn remove_source(&mut self, path: PathBuf) -> Result<(), Error> { pub fn remove_source(&mut self, path: PathBuf) -> Result<()> {
let path: ImmutPath = path.into(); let path: ImmutPath = path.into();
self.memory_changes.remove(&path); self.memory_changes.remove(&path);
@ -1332,7 +1332,7 @@ impl LanguageState {
path: PathBuf, path: PathBuf,
content: Vec<TextDocumentContentChangeEvent>, content: Vec<TextDocumentContentChangeEvent>,
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
) -> Result<(), Error> { ) -> Result<()> {
let path: ImmutPath = path.into(); let path: ImmutPath = path.into();
let meta = self let meta = self

View file

@ -10,7 +10,7 @@ use crate::project::{
use anyhow::{bail, Context}; use anyhow::{bail, Context};
use reflexo::ImmutPath; use reflexo::ImmutPath;
use reflexo_typst::TypstDatetime; use reflexo_typst::TypstDatetime;
use tinymist_project::{CompileSnapshot, EntryReader}; use tinymist_project::{EntryReader, LspCompileSnapshot, LspCompiledArtifact};
use tinymist_query::{ExportKind, PageSelection}; use tinymist_query::{ExportKind, PageSelection};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use typlite::Typlite; use typlite::Typlite;
@ -23,9 +23,7 @@ use typst::{
use typst_pdf::PdfOptions; use typst_pdf::PdfOptions;
use crate::tool::text::FullTextDigest; use crate::tool::text::FullTextDigest;
use crate::{ use crate::{actor::editor::EditorRequest, tool::word_count, ExportMode, PathPattern};
actor::editor::EditorRequest, tool::word_count, world::LspCompilerFeat, ExportMode, PathPattern,
};
use super::*; use super::*;
@ -60,7 +58,7 @@ impl ExportTask {
self.factory.mutate(|data| data.config = config); self.factory.mutate(|data| data.config = config);
} }
pub fn signal(&self, snap: &CompiledArtifact<LspCompilerFeat>, s: ExportSignal) { pub fn signal(&self, snap: &LspCompiledArtifact, s: ExportSignal) {
let task = self.factory.task(); let task = self.factory.task();
task.signal(snap, s, self); task.signal(snap, s, self);
} }
@ -68,7 +66,7 @@ impl ExportTask {
pub struct ExportOnceTask<'a> { pub struct ExportOnceTask<'a> {
pub kind: &'a ExportKind, pub kind: &'a ExportKind,
pub artifact: CompiledArtifact<LspCompilerFeat>, pub artifact: LspCompiledArtifact,
pub lock_path: Option<ImmutPath>, pub lock_path: Option<ImmutPath>,
} }
@ -82,19 +80,14 @@ pub struct ExportConfig {
} }
impl ExportConfig { impl ExportConfig {
fn signal( fn signal(self: Arc<Self>, snap: &LspCompiledArtifact, s: ExportSignal, t: &ExportTask) {
self: Arc<Self>,
snap: &CompiledArtifact<LspCompilerFeat>,
s: ExportSignal,
t: &ExportTask,
) {
self.signal_export(snap, s, t); self.signal_export(snap, s, t);
self.signal_count_word(snap, t); self.signal_count_word(snap, t);
} }
fn signal_export( fn signal_export(
self: &Arc<Self>, self: &Arc<Self>,
artifact: &CompiledArtifact<LspCompilerFeat>, artifact: &LspCompiledArtifact,
s: ExportSignal, s: ExportSignal,
t: &ExportTask, t: &ExportTask,
) -> Option<()> { ) -> Option<()> {
@ -134,11 +127,7 @@ impl ExportConfig {
Some(()) Some(())
} }
fn signal_count_word( fn signal_count_word(&self, artifact: &LspCompiledArtifact, t: &ExportTask) -> Option<()> {
&self,
artifact: &CompiledArtifact<LspCompilerFeat>,
t: &ExportTask,
) -> Option<()> {
if !self.count_words { if !self.count_words {
return None; return None;
} }
@ -393,7 +382,7 @@ impl ExportConfig {
pub async fn oneshot( pub async fn oneshot(
&self, &self,
snap: CompileSnapshot<LspCompilerFeat>, snap: LspCompileSnapshot,
kind: ExportKind, kind: ExportKind,
lock_path: Option<ImmutPath>, lock_path: Option<ImmutPath>,
) -> anyhow::Result<Option<PathBuf>> { ) -> anyhow::Result<Option<PathBuf>> {

View file

@ -32,10 +32,9 @@ use typst_preview::{
use typst_shim::syntax::LinkedNodeExt; use typst_shim::syntax::LinkedNodeExt;
use crate::project::{ use crate::project::{
CompileHandlerImpl, CompileServerOpts, CompiledArtifact, LspInterrupt, ProjectClient, CompileHandlerImpl, CompileServerOpts, LspCompiledArtifact, LspInterrupt, ProjectClient,
ProjectCompiler, ProjectCompiler,
}; };
use crate::world::LspCompilerFeat;
use crate::*; use crate::*;
use actor::preview::{PreviewActor, PreviewRequest, PreviewTab}; use actor::preview::{PreviewActor, PreviewRequest, PreviewTab};
use project::world::vfs::{notify::MemoryEvent, FileChangeSet}; use project::world::vfs::{notify::MemoryEvent, FileChangeSet};
@ -44,7 +43,7 @@ use project::{watch_deps, LspPreviewState};
/// The preview's view of the compiled artifact. /// The preview's view of the compiled artifact.
pub struct PreviewCompileView { pub struct PreviewCompileView {
/// The artifact and snap. /// The artifact and snap.
pub snap: CompiledArtifact<LspCompilerFeat>, pub snap: LspCompiledArtifact,
} }
impl typst_preview::CompileView for PreviewCompileView { impl typst_preview::CompileView for PreviewCompileView {

View file

@ -2,8 +2,9 @@
use std::path::Path; use std::path::Path;
use tinymist_std::error::prelude::*;
use crate::project::*; use crate::project::*;
use prelude::Result;
trait LockFileExt { trait LockFileExt {
fn declare(&mut self, args: &DocNewArgs) -> Id; fn declare(&mut self, args: &DocNewArgs) -> Id;