mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-08-03 01:42:14 +00:00
dev: refactor tasks (#411)
* dev: improve export task * aba aba * dev: less indent * dev: reorder * dev: reduce some clone * dev: improve format task * fix: documentation links * dev: doesn't rely on hash entry state
This commit is contained in:
parent
69c50fef4a
commit
6a142dca01
12 changed files with 276 additions and 308 deletions
|
@ -18,8 +18,9 @@ use crate::prelude::*;
|
|||
/// This request was introduced in specification version 3.6.0.
|
||||
///
|
||||
/// This request has no special capabilities and registration options since it
|
||||
/// is sent as a resolve request for the
|
||||
/// [`textDocument/documentColor`](Self::document_color) request.
|
||||
/// is sent as a resolve request for the [`textDocument/documentColor`] request.
|
||||
///
|
||||
/// [`textDocument/documentColor`]: https://microsoft.github.io/language-server-protocol/specification#textDocument_documentColor
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColorPresentationRequest {
|
||||
/// The path of the document to request color presentations for.
|
||||
|
|
|
@ -5,9 +5,9 @@ use crate::{prelude::*, SyntaxRequest};
|
|||
/// The [`experimental/onEnter`] request is sent from client to server to handle
|
||||
/// the <kbd>Enter</kbd> key press.
|
||||
///
|
||||
/// - kbd:[Enter] inside triple-slash comments automatically inserts `///`
|
||||
/// - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//`
|
||||
/// - kbd:[Enter] inside `//!` doc comments automatically inserts `//!`
|
||||
/// - `kbd:Enter` inside triple-slash comments automatically inserts `///`
|
||||
/// - `kbd:Enter` in the middle or after a trailing space in `//` inserts `//`
|
||||
/// - `kbd:Enter` inside `//!` doc comments automatically inserts `//!`
|
||||
///
|
||||
/// [`experimental/onEnter`]: https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/lsp-extensions.md#on-enter
|
||||
///
|
||||
|
|
|
@ -15,8 +15,9 @@ pub struct ModuleDependency {
|
|||
|
||||
/// Construct the module dependencies of the given context.
|
||||
///
|
||||
/// It will scan all the files in the context, using [`AnalysisContext::files`],
|
||||
/// and find the dependencies and dependents of each file.
|
||||
/// It will scan all the files in the context, using
|
||||
/// [`AnalysisContext::source_files`], and find the dependencies and dependents
|
||||
/// of each file.
|
||||
pub fn construct_module_dependencies(
|
||||
ctx: &mut AnalysisContext,
|
||||
) -> HashMap<TypstFileId, ModuleDependency> {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use log::info;
|
||||
use lsp_types::{Diagnostic, Url};
|
||||
use lsp_types::Url;
|
||||
use tinymist_query::{DiagnosticsMap, LspDiagnostic};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
|
@ -115,7 +115,7 @@ impl EditorActor {
|
|||
}
|
||||
}
|
||||
|
||||
fn publish_inner(&mut self, group: &str, url: Url, next: Option<Vec<Diagnostic>>) {
|
||||
fn publish_inner(&mut self, group: &str, url: Url, next: Option<Vec<LspDiagnostic>>) {
|
||||
let mut to_publish = Vec::new();
|
||||
|
||||
// Get the diagnostics from other groups
|
||||
|
|
|
@ -20,7 +20,7 @@ use self::{
|
|||
typ_server::CompileServerActor,
|
||||
};
|
||||
use crate::{
|
||||
task::{ExportConfig, ExportTask, ExportTaskConf},
|
||||
task::{ExportConfig, ExportTask, ExportUserConfig},
|
||||
world::{ImmutDict, LspWorldBuilder},
|
||||
LanguageState,
|
||||
};
|
||||
|
@ -52,11 +52,11 @@ impl LanguageState {
|
|||
let (intr_tx, intr_rx) = mpsc::unbounded_channel();
|
||||
|
||||
// Run Export actors before preparing cluster to avoid loss of events
|
||||
let export = ExportTask::new(ExportTaskConf {
|
||||
let export = ExportTask::new(ExportConfig {
|
||||
group: editor_group.clone(),
|
||||
editor_tx: Some(self.editor_tx.clone()),
|
||||
config: ExportConfig {
|
||||
substitute_pattern: self.compile_config().output_path.clone(),
|
||||
config: ExportUserConfig {
|
||||
output: self.compile_config().output_path.clone(),
|
||||
mode: self.compile_config().export_pdf,
|
||||
},
|
||||
kind: ExportKind::Pdf,
|
||||
|
|
|
@ -10,20 +10,17 @@
|
|||
//! └─────┬────────────────────▲─────┘ └────────────┘
|
||||
//! │ │
|
||||
//! ┌─────▼────────────────────┴─────┐ handler ┌────────────┐
|
||||
//! │ compiler::compile_driver ├────────►│ rest actors│
|
||||
//! │ compiler::compile_handler ├────────►│ rest actors│
|
||||
//! └────────────────────────────────┘ └────────────┘
|
||||
//! ```
|
||||
//!
|
||||
//! We generally use typst in two ways.
|
||||
//! + creates a [`CompileDriver`] and run compilation in fly.
|
||||
//! + creates a [`CompileServerActor`], wraps the driver, and runs
|
||||
//! [`CompileDriver`] incrementally.
|
||||
//! We use typst by creating a
|
||||
//! [`CompileServerActor`][`crate::actor::typ_server::CompileServerActor`] and
|
||||
//! running compiler with callbacking [`CompileHandler`] incrementally. An
|
||||
//! additional [`CompileClientActor`] is also created to control the
|
||||
//! [`CompileServerActor`][`crate::actor::typ_server::CompileServerActor`].
|
||||
//!
|
||||
//! For latter case, an additional [`CompileClientActor`] is created to
|
||||
//! control the [`CompileServerActor`].
|
||||
//!
|
||||
//! The [`CompileDriver`] will also keep a [`CompileHandler`] to push
|
||||
//! information to other actors.
|
||||
//! The [`CompileHandler`] will push information to other actors.
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
|
@ -61,7 +58,7 @@ use super::{
|
|||
typ_server::{CompilationHandle, CompileSnapshot, CompiledArtifact, Interrupt},
|
||||
};
|
||||
use crate::{
|
||||
task::{ExportConfig, ExportSignal, ExportTask},
|
||||
task::{ExportTask, ExportUserConfig},
|
||||
world::{LspCompilerFeat, LspWorld},
|
||||
CompileConfig,
|
||||
};
|
||||
|
@ -295,20 +292,12 @@ impl CompilationHandle<LspCompilerFeat> for CompileHandler {
|
|||
snap.env.tracer.as_ref().map(|e| e.clone().warnings()),
|
||||
);
|
||||
|
||||
if snap.flags.triggered_by_entry_update {
|
||||
self.export.signal(snap, ExportSignal::EntryChanged);
|
||||
} else if snap.flags.triggered_by_mem_events && snap.flags.triggered_by_fs_events {
|
||||
self.export.signal(snap, ExportSignal::TypedAndSaved);
|
||||
} else if snap.flags.triggered_by_mem_events {
|
||||
self.export.signal(snap, ExportSignal::Typed);
|
||||
} else if snap.flags.triggered_by_fs_events {
|
||||
self.export.signal(snap, ExportSignal::Saved);
|
||||
}
|
||||
|
||||
if let Ok(doc) = &snap.doc {
|
||||
let _ = self.doc_tx.send(Some(doc.clone()));
|
||||
}
|
||||
|
||||
self.export.signal(snap, snap.signal);
|
||||
|
||||
self.editor_tx
|
||||
.send(EditorRequest::Status(
|
||||
self.diag_group.clone(),
|
||||
|
@ -326,7 +315,7 @@ impl CompilationHandle<LspCompilerFeat> for CompileHandler {
|
|||
.doc
|
||||
.clone()
|
||||
.map_err(|_| typst_preview::CompileStatus::CompileError);
|
||||
inner.notify_compile(res, snap.flags.triggered_by_fs_events);
|
||||
inner.notify_compile(res, snap.signal.by_fs_events);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -368,25 +357,17 @@ impl CompileClientActor {
|
|||
self.config = config;
|
||||
}
|
||||
|
||||
pub(crate) fn change_export_config(&mut self, config: ExportConfig) {
|
||||
pub(crate) fn change_export_config(&mut self, config: ExportUserConfig) {
|
||||
self.handle.export.change_config(config);
|
||||
}
|
||||
|
||||
pub fn on_export(&self, kind: ExportKind, path: PathBuf) -> QueryFuture {
|
||||
let snap = self.snapshot()?;
|
||||
let export = self.handle.export.task();
|
||||
|
||||
let entry = self.config.determine_entry(Some(path.as_path().into()));
|
||||
|
||||
let export = self.handle.export.oneshot(snap, Some(entry), kind);
|
||||
just_future(async move {
|
||||
let snap = snap.snapshot().await?;
|
||||
let snap = snap.task(TaskInputs {
|
||||
entry: Some(entry),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let artifact = snap.compile().await;
|
||||
let res = export.oneshot(&artifact, kind).await;
|
||||
let res = export.await?;
|
||||
|
||||
log::info!("CompileActor: on export end: {path:?} as {res:?}");
|
||||
Ok(tinymist_query::CompilerQueryResponse::OnExport(res))
|
||||
|
|
|
@ -29,21 +29,23 @@ use typst_ts_core::{exporter_builtins::GroupExporter, Exporter, GenericExporter,
|
|||
type CompileRawResult = Deferred<(SourceResult<Arc<TypstDocument>>, CompileEnv)>;
|
||||
type DocState = once_cell::sync::OnceCell<CompileRawResult>;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct CompileFlags {
|
||||
/// The compiler-thread local logical tick when the snapshot is taken.
|
||||
pub compile_tick: usize,
|
||||
/// A signal that possibly triggers an export.
|
||||
///
|
||||
/// Whether to export depends on the current state of the document and the user
|
||||
/// settings.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ExportSignal {
|
||||
/// Whether the revision is annotated by memory events.
|
||||
pub triggered_by_mem_events: bool,
|
||||
pub by_mem_events: bool,
|
||||
/// Whether the revision is annotated by file system events.
|
||||
pub triggered_by_fs_events: bool,
|
||||
pub by_fs_events: bool,
|
||||
/// Whether the revision is annotated by entry update.
|
||||
pub triggered_by_entry_update: bool,
|
||||
pub by_entry_update: bool,
|
||||
}
|
||||
|
||||
pub struct CompileSnapshot<F: CompilerFeat> {
|
||||
/// All the flags for the document.
|
||||
pub flags: CompileFlags,
|
||||
/// The export signal for the document.
|
||||
pub flags: ExportSignal,
|
||||
/// Using env
|
||||
pub env: CompileEnv,
|
||||
/// Using world
|
||||
|
@ -97,7 +99,7 @@ impl<F: CompilerFeat + 'static> CompileSnapshot<F> {
|
|||
pub async fn compile(&self) -> CompiledArtifact<F> {
|
||||
let (doc, env) = self.start().wait().clone();
|
||||
CompiledArtifact {
|
||||
flags: self.flags,
|
||||
signal: self.flags,
|
||||
world: self.world.clone(),
|
||||
env,
|
||||
doc,
|
||||
|
@ -120,8 +122,8 @@ impl<F: CompilerFeat> Clone for CompileSnapshot<F> {
|
|||
|
||||
#[derive(Clone)]
|
||||
pub struct CompiledArtifact<F: CompilerFeat> {
|
||||
/// All the flags for the document.
|
||||
pub flags: CompileFlags,
|
||||
/// All the export signal for the document.
|
||||
pub signal: ExportSignal,
|
||||
/// Used world
|
||||
pub world: Arc<CompilerWorld<F>>,
|
||||
/// Used env
|
||||
|
@ -484,11 +486,10 @@ impl<F: CompilerFeat + Send + Sync + 'static> CompileServerActor<F> {
|
|||
CompileSnapshot {
|
||||
world: Arc::new(world.clone()),
|
||||
env: env.clone(),
|
||||
flags: CompileFlags {
|
||||
compile_tick: self.logical_tick,
|
||||
triggered_by_entry_update: reason.by_entry_update,
|
||||
triggered_by_mem_events: reason.by_memory_events,
|
||||
triggered_by_fs_events: reason.by_fs_events,
|
||||
flags: ExportSignal {
|
||||
by_entry_update: reason.by_entry_update,
|
||||
by_mem_events: reason.by_memory_events,
|
||||
by_fs_events: reason.by_fs_events,
|
||||
},
|
||||
doc_state: Arc::new(OnceCell::new()),
|
||||
success_doc: self.latest_success_doc.clone(),
|
||||
|
|
|
@ -7,7 +7,8 @@ use comemo::Prehashed;
|
|||
use itertools::Itertools;
|
||||
use lsp_types::*;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use serde::Deserialize;
|
||||
use reflexo::path::PathClean;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Map, Value as JsonValue};
|
||||
use tinymist_query::{get_semantic_tokens_options, PositionEncoding};
|
||||
use tinymist_render::PeriscopeArgs;
|
||||
|
@ -401,7 +402,7 @@ pub struct CompileConfig {
|
|||
/// The workspace roots from initialization.
|
||||
pub roots: Vec<PathBuf>,
|
||||
/// The output directory for PDF export.
|
||||
pub output_path: String,
|
||||
pub output_path: PathPattern,
|
||||
/// The mode of PDF export.
|
||||
pub export_pdf: ExportMode,
|
||||
/// Specifies the root path of the project manually.
|
||||
|
@ -438,7 +439,8 @@ impl CompileConfig {
|
|||
|
||||
/// Updates the configuration with a map.
|
||||
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
||||
self.output_path = try_or_default(|| Some(update.get("outputPath")?.as_str()?.to_owned()));
|
||||
self.output_path =
|
||||
try_or_default(|| PathPattern::deserialize(update.get("outputPath")?).ok());
|
||||
self.export_pdf = try_or_default(|| ExportMode::deserialize(update.get("exportPdf")?).ok());
|
||||
self.root_path = try_(|| Some(update.get("rootPath")?.as_str()?.into()));
|
||||
self.notify_status = match try_(|| update.get("compileStatus")?.as_str()) {
|
||||
|
@ -726,6 +728,71 @@ pub struct CompileExtraOpts {
|
|||
pub font_paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if main.package().is_some() {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -754,7 +821,7 @@ mod tests {
|
|||
|
||||
config.update(&update).unwrap();
|
||||
|
||||
assert_eq!(config.compile.output_path, "out");
|
||||
assert_eq!(config.compile.output_path, PathPattern::new("out"));
|
||||
assert_eq!(config.compile.export_pdf, ExportMode::OnSave);
|
||||
assert_eq!(config.compile.root_path, Some(PathBuf::from(root_path)));
|
||||
assert_eq!(config.semantic_tokens, SemanticTokensMode::Enable);
|
||||
|
@ -799,4 +866,30 @@ mod tests {
|
|||
let err = format!("{}", config.update(&update).unwrap_err());
|
||||
assert!(err.contains("absolute path"), "unexpected error: {err}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let entry = EntryState::new_rooted(
|
||||
root.into(),
|
||||
Some(FileId::new(None, VirtualPath::new("/dir1/dir2/file.txt"))),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/../$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/target/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ use once_cell::sync::OnceCell;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value as JsonValue};
|
||||
use sync_lsp::*;
|
||||
use task::{ExportConfig, FormatConfig, FormatTask, UserActionTask};
|
||||
use task::{ExportUserConfig, FormatTask, FormatUserConfig, UserActionTask};
|
||||
use tinymist_query::{
|
||||
get_semantic_tokens_options, get_semantic_tokens_registration,
|
||||
get_semantic_tokens_unregistration, PageSelection, SemanticTokenContext,
|
||||
|
@ -111,7 +111,7 @@ impl LanguageState {
|
|||
const_config.tokens_overlapping_token_support,
|
||||
const_config.tokens_multiline_token_support,
|
||||
);
|
||||
let formatter = FormatTask::new(FormatConfig {
|
||||
let formatter = FormatTask::new(FormatUserConfig {
|
||||
mode: config.formatter,
|
||||
width: config.formatter_print_width,
|
||||
position_encoding: const_config.position_encoding,
|
||||
|
@ -481,8 +481,8 @@ impl LanguageState {
|
|||
if config.compile.output_path != self.config.compile.output_path
|
||||
|| config.compile.export_pdf != self.config.compile.export_pdf
|
||||
{
|
||||
let config = ExportConfig {
|
||||
substitute_pattern: self.config.compile.output_path.clone(),
|
||||
let config = ExportUserConfig {
|
||||
output: self.config.compile.output_path.clone(),
|
||||
mode: self.config.compile.export_pdf,
|
||||
};
|
||||
|
||||
|
@ -513,7 +513,7 @@ impl LanguageState {
|
|||
error!("could not change formatter config: {err}");
|
||||
}
|
||||
|
||||
self.formatter.change_config(FormatConfig {
|
||||
self.formatter.change_config(FormatUserConfig {
|
||||
mode: self.config.formatter,
|
||||
width: self.config.formatter_print_width,
|
||||
position_encoding: self.const_config.position_encoding,
|
||||
|
@ -667,7 +667,7 @@ impl LanguageState {
|
|||
let source = self
|
||||
.query_source(path, |source: typst::syntax::Source| Ok(source))
|
||||
.map_err(|e| internal_error(format!("could not format document: {e}")))?;
|
||||
self.client.schedule(req_id, self.formatter.exec(source))
|
||||
self.client.schedule(req_id, self.formatter.run(source))
|
||||
}
|
||||
|
||||
fn inlay_hint(&mut self, req_id: RequestId, params: InlayHintParams) -> ScheduledResult {
|
||||
|
|
|
@ -1,75 +1,53 @@
|
|||
//! The actor that handles PDF/SVG/PNG export.
|
||||
//! The actor that handles various document export, like PDF and SVG export.
|
||||
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use anyhow::bail;
|
||||
use anyhow::Context;
|
||||
use log::{error, info};
|
||||
use anyhow::{bail, Context};
|
||||
use once_cell::sync::Lazy;
|
||||
use tinymist_query::{ExportKind, PageSelection};
|
||||
use tokio::{sync::mpsc, task::spawn_blocking};
|
||||
use typst::{foundations::Smart, layout::Abs, layout::Frame, visualize::Color};
|
||||
use typst_ts_compiler::EntryReader;
|
||||
use typst_ts_core::{path::PathClean, ImmutPath};
|
||||
use typst_ts_compiler::{EntryReader, EntryState, TaskInputs};
|
||||
|
||||
use crate::{
|
||||
actor::{editor::EditorRequest, typ_server::CompiledArtifact},
|
||||
actor::{
|
||||
editor::EditorRequest,
|
||||
typ_client::QuerySnap,
|
||||
typ_server::{CompiledArtifact, ExportSignal},
|
||||
},
|
||||
tool::word_count,
|
||||
world::LspCompilerFeat,
|
||||
ExportMode,
|
||||
ExportMode, PathPattern,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
/// User configuration for export.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ExportConfig {
|
||||
pub substitute_pattern: String,
|
||||
pub struct ExportUserConfig {
|
||||
/// The output path pattern.
|
||||
pub output: PathPattern,
|
||||
/// The export mode.
|
||||
pub mode: ExportMode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum ExportSignal {
|
||||
Typed,
|
||||
Saved,
|
||||
TypedAndSaved,
|
||||
EntryChanged,
|
||||
}
|
||||
|
||||
impl ExportSignal {
|
||||
pub fn is_typed(&self) -> bool {
|
||||
matches!(self, ExportSignal::Typed | ExportSignal::TypedAndSaved)
|
||||
}
|
||||
|
||||
pub fn is_saved(&self) -> bool {
|
||||
matches!(self, ExportSignal::Saved | ExportSignal::TypedAndSaved)
|
||||
}
|
||||
|
||||
fn is_entry_change(&self) -> bool {
|
||||
matches!(self, ExportSignal::EntryChanged)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExportTask {
|
||||
factory: SyncTaskFactory<ExportTaskConf>,
|
||||
factory: SyncTaskFactory<ExportConfig>,
|
||||
export_folder: FutureFolder,
|
||||
count_word_folder: FutureFolder,
|
||||
}
|
||||
|
||||
impl ExportTask {
|
||||
pub fn new(data: ExportTaskConf) -> Self {
|
||||
pub fn new(data: ExportConfig) -> Self {
|
||||
Self {
|
||||
factory: SyncTaskFactory(Arc::new(std::sync::RwLock::new(Arc::new(data)))),
|
||||
export_folder: FutureFolder::default(),
|
||||
count_word_folder: FutureFolder::default(),
|
||||
factory: SyncTaskFactory::new(data),
|
||||
..ExportTask::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn task(&self) -> Arc<ExportTaskConf> {
|
||||
self.factory.task()
|
||||
pub fn change_config(&self, config: ExportUserConfig) {
|
||||
self.factory.mutate(|data| data.config = config);
|
||||
}
|
||||
|
||||
pub fn signal(&self, snap: &CompiledArtifact<LspCompilerFeat>, s: ExportSignal) {
|
||||
|
@ -77,30 +55,36 @@ impl ExportTask {
|
|||
task.signal(snap, s, self);
|
||||
}
|
||||
|
||||
pub fn change_config(&self, config: ExportConfig) {
|
||||
self.factory.mutate(|data| data.config = config);
|
||||
pub fn oneshot(
|
||||
&self,
|
||||
snap: QuerySnap,
|
||||
entry: Option<EntryState>,
|
||||
kind: ExportKind,
|
||||
) -> impl Future<Output = anyhow::Result<Option<PathBuf>>> {
|
||||
let export = self.factory.task();
|
||||
async move {
|
||||
let snap = snap.snapshot().await?;
|
||||
let snap = snap.task(TaskInputs {
|
||||
entry,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let artifact = snap.compile().await;
|
||||
export.do_export(&kind, artifact).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExportTaskConf {
|
||||
pub struct ExportConfig {
|
||||
pub group: String,
|
||||
pub editor_tx: Option<mpsc::UnboundedSender<EditorRequest>>,
|
||||
pub config: ExportConfig,
|
||||
pub config: ExportUserConfig,
|
||||
pub kind: ExportKind,
|
||||
pub count_words: bool,
|
||||
}
|
||||
|
||||
impl ExportTaskConf {
|
||||
pub async fn oneshot(
|
||||
&self,
|
||||
snap: &CompiledArtifact<LspCompilerFeat>,
|
||||
kind: ExportKind,
|
||||
) -> Option<PathBuf> {
|
||||
let snap = snap.clone();
|
||||
self.check_mode_and_export(&kind, &snap).await
|
||||
}
|
||||
|
||||
impl ExportConfig {
|
||||
fn signal(
|
||||
self: Arc<Self>,
|
||||
snap: &CompiledArtifact<LspCompilerFeat>,
|
||||
|
@ -108,10 +92,8 @@ impl ExportTaskConf {
|
|||
t: &ExportTask,
|
||||
) {
|
||||
self.signal_export(snap, s, t);
|
||||
if s.is_typed() || s.is_entry_change() {
|
||||
self.signal_count_word(snap, t);
|
||||
}
|
||||
}
|
||||
|
||||
fn signal_export(
|
||||
self: &Arc<Self>,
|
||||
|
@ -121,109 +103,68 @@ impl ExportTaskConf {
|
|||
) -> Option<()> {
|
||||
let doc = artifact.doc.as_ref().ok()?;
|
||||
|
||||
// We do only check the latest signal and determine whether to export by the
|
||||
// latest state. This is not a TOCTOU issue, as examined by typst-preview.
|
||||
let mode = self.config.mode;
|
||||
let need_export = (!matches!(mode, ExportMode::Never) && s.is_entry_change())
|
||||
let need_export = (!matches!(mode, ExportMode::Never) && s.by_entry_update)
|
||||
|| match mode {
|
||||
ExportMode::Never => false,
|
||||
ExportMode::OnType => s.is_typed(),
|
||||
ExportMode::OnSave => s.is_saved(),
|
||||
ExportMode::OnDocumentHasTitle => s.is_saved() && doc.title.is_some(),
|
||||
ExportMode::OnType => s.by_mem_events,
|
||||
ExportMode::OnSave => s.by_fs_events,
|
||||
ExportMode::OnDocumentHasTitle => s.by_fs_events && doc.title.is_some(),
|
||||
};
|
||||
|
||||
if !need_export {
|
||||
return None;
|
||||
}
|
||||
|
||||
t.export_folder.spawn(artifact.world.revision().get(), || {
|
||||
let this = self.clone();
|
||||
let artifact = artifact.clone();
|
||||
t.export_folder.spawn(
|
||||
artifact.world.revision().get(),
|
||||
Box::pin(async move {
|
||||
this.check_mode_and_export(&this.kind, &artifact).await;
|
||||
log_err(this.do_export(&this.kind, artifact).await);
|
||||
Some(())
|
||||
}),
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn signal_count_word(&self, artifact: &CompiledArtifact<LspCompilerFeat>, t: &ExportTask) {
|
||||
if !self.count_words {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(editor_tx) = self.editor_tx.clone() else {
|
||||
return;
|
||||
};
|
||||
if self.count_words {
|
||||
let revision = artifact.world.revision().get();
|
||||
|
||||
t.count_word_folder.spawn(revision, || {
|
||||
let artifact = artifact.clone();
|
||||
let group = self.group.clone();
|
||||
let revision = artifact.world.revision().get();
|
||||
t.count_word_folder.spawn(
|
||||
revision,
|
||||
Box::pin(async move {
|
||||
let doc = artifact.doc.ok()?;
|
||||
|
||||
let wc = word_count::word_count(&doc);
|
||||
log::debug!("word count({group}:{revision}): {wc:?}");
|
||||
log::debug!("WordCount({group}:{revision}): {wc:?}");
|
||||
|
||||
let _ = editor_tx.send(EditorRequest::WordCount(group, wc));
|
||||
|
||||
Some(())
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_mode_and_export(
|
||||
&self,
|
||||
kind: &ExportKind,
|
||||
doc: &CompiledArtifact<LspCompilerFeat>,
|
||||
) -> Option<PathBuf> {
|
||||
let entry = doc.world.entry_state();
|
||||
|
||||
let root = entry.root();
|
||||
let main = entry.main();
|
||||
|
||||
info!(
|
||||
"RenderActor: check path {:?} and root {:?} with output directory {}",
|
||||
main, root, self.config.substitute_pattern
|
||||
);
|
||||
|
||||
let root = root?;
|
||||
let main = main?;
|
||||
|
||||
// todo: package??
|
||||
if main.package().is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
match self.do_export(kind, doc, &root, &path).await {
|
||||
Ok(pdf) => Some(pdf),
|
||||
Err(err) => {
|
||||
error!("RenderActor({kind:?}): failed to export {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
async fn do_export(
|
||||
&self,
|
||||
kind: &ExportKind,
|
||||
doc: &CompiledArtifact<LspCompilerFeat>,
|
||||
root: &Path,
|
||||
path: &Path,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
artifact: CompiledArtifact<LspCompilerFeat>,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
use ExportKind::*;
|
||||
use PageSelection::*;
|
||||
|
||||
let doc = doc
|
||||
.doc
|
||||
.as_ref()
|
||||
.map_err(|_| anyhow::anyhow!("no document"))?
|
||||
.clone();
|
||||
|
||||
let Some(to) = substitute_path(&self.config.substitute_pattern, root, path) else {
|
||||
bail!("RenderActor({kind:?}): failed to substitute path");
|
||||
// Prepare the output path.
|
||||
let entry = artifact.world.entry_state();
|
||||
let Some(to) = self.config.output.substitute(&entry) else {
|
||||
return Ok(None);
|
||||
};
|
||||
if to.is_relative() {
|
||||
bail!("RenderActor({kind:?}): path is relative: {to:?}");
|
||||
|
@ -231,10 +172,8 @@ impl ExportTaskConf {
|
|||
if to.is_dir() {
|
||||
bail!("RenderActor({kind:?}): path is a directory: {to:?}");
|
||||
}
|
||||
|
||||
let to = to.with_extension(kind.extension());
|
||||
info!("RenderActor({kind:?}): exporting {path:?} to {to:?}");
|
||||
|
||||
log::info!("RenderActor({kind:?}): exporting {entry:?} to {to:?}");
|
||||
if let Some(e) = to.parent() {
|
||||
if !e.exists() {
|
||||
std::fs::create_dir_all(e).with_context(|| {
|
||||
|
@ -243,6 +182,10 @@ impl ExportTaskConf {
|
|||
}
|
||||
}
|
||||
|
||||
// Prepare the document.
|
||||
let doc = artifact.doc.map_err(|_| anyhow::anyhow!("no document"))?;
|
||||
|
||||
// Prepare data.
|
||||
let kind2 = kind.clone();
|
||||
let data = spawn_blocking(move || -> anyhow::Result<Vec<u8>> {
|
||||
rayon::in_place_scope(|_| {
|
||||
|
@ -278,39 +221,19 @@ impl ExportTaskConf {
|
|||
.await
|
||||
.with_context(|| format!("RenderActor({kind:?}): failed to export"))?;
|
||||
|
||||
info!("RenderActor({kind:?}): export complete");
|
||||
Ok(to)
|
||||
log::info!("RenderActor({kind:?}): export complete");
|
||||
Ok(Some(to))
|
||||
}
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_path(substitute_pattern: &str, root: &Path, path: &Path) -> Option<ImmutPath> {
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
fn log_err<T>(artifact: anyhow::Result<T>) -> Option<T> {
|
||||
match artifact {
|
||||
Ok(v) => Some(v),
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
None
|
||||
}
|
||||
|
||||
if substitute_pattern.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = substitute_pattern.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -319,31 +242,8 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_default_never() {
|
||||
let conf = ExportTaskConf::default();
|
||||
let conf = ExportConfig::default();
|
||||
assert!(!conf.count_words);
|
||||
assert_eq!(conf.config.mode, ExportMode::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let path = Path::new("/root/dir1/dir2/file.txt");
|
||||
|
||||
assert_eq!(
|
||||
substitute_path("/substitute/$dir/$name", root, path),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
substitute_path("/substitute/$dir/../$name", root, path),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
substitute_path("/substitute/$name", root, path),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
substitute_path("/substitute/target/$dir/$name", root, path),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,78 +1,59 @@
|
|||
//! The actor that handles formatting.
|
||||
|
||||
use std::{iter::zip, sync::Arc};
|
||||
use std::iter::zip;
|
||||
|
||||
use lsp_types::TextEdit;
|
||||
use sync_lsp::{just_future, SchedulableResponse};
|
||||
use tinymist_query::{typst_to_lsp, PositionEncoding};
|
||||
use typst::syntax::Source;
|
||||
|
||||
use crate::{FormatterMode, LspResult};
|
||||
|
||||
use super::SyncTaskFactory;
|
||||
use crate::FormatterMode;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FormatConfig {
|
||||
pub struct FormatUserConfig {
|
||||
pub mode: FormatterMode,
|
||||
pub width: u32,
|
||||
pub position_encoding: PositionEncoding,
|
||||
}
|
||||
|
||||
type FmtFn = Arc<dyn Fn(Source) -> LspResult<Option<Vec<TextEdit>>> + Send + Sync>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FormatTask {
|
||||
factory: SyncTaskFactory<FormatterTaskData>,
|
||||
factory: SyncTaskFactory<FormatUserConfig>,
|
||||
}
|
||||
|
||||
impl FormatTask {
|
||||
pub fn new(c: FormatConfig) -> Self {
|
||||
let factory = SyncTaskFactory::default();
|
||||
let this = Self { factory };
|
||||
|
||||
this.change_config(c);
|
||||
this
|
||||
pub fn new(c: FormatUserConfig) -> Self {
|
||||
Self {
|
||||
factory: SyncTaskFactory::new(c),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn change_config(&self, c: FormatConfig) {
|
||||
self.factory.mutate(|data| {
|
||||
data.0 = match c.mode {
|
||||
pub fn change_config(&self, c: FormatUserConfig) {
|
||||
self.factory.mutate(|data| *data = c);
|
||||
}
|
||||
|
||||
pub fn run(&self, src: Source) -> SchedulableResponse<Option<Vec<TextEdit>>> {
|
||||
let c = self.factory.task();
|
||||
just_future(async move {
|
||||
match c.mode {
|
||||
FormatterMode::Typstyle => {
|
||||
let cw = c.width as usize;
|
||||
Arc::new(move |e: Source| {
|
||||
let res =
|
||||
typstyle_core::Typstyle::new_with_src(e.clone(), cw).pretty_print();
|
||||
Ok(calc_diff(e, res, c.position_encoding))
|
||||
})
|
||||
let res = typstyle_core::Typstyle::new_with_src(src.clone(), cw).pretty_print();
|
||||
Ok(calc_diff(src, res, c.position_encoding))
|
||||
}
|
||||
FormatterMode::Typstfmt => {
|
||||
let config = typstfmt_lib::Config {
|
||||
max_line_length: c.width as usize,
|
||||
..typstfmt_lib::Config::default()
|
||||
};
|
||||
Arc::new(move |e: Source| {
|
||||
let res = typstfmt_lib::format(e.text(), config);
|
||||
Ok(calc_diff(e, res, c.position_encoding))
|
||||
let res = typstfmt_lib::format(src.text(), config);
|
||||
Ok(calc_diff(src, res, c.position_encoding))
|
||||
}
|
||||
FormatterMode::Disable => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
FormatterMode::Disable => Arc::new(|_| Ok(None)),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn exec(&self, source: Source) -> SchedulableResponse<Option<Vec<TextEdit>>> {
|
||||
let data = self.factory.task();
|
||||
just_future(async move { (data.0)(source) })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FormatterTaskData(FmtFn);
|
||||
|
||||
impl Default for FormatterTaskData {
|
||||
fn default() -> Self {
|
||||
Self(Arc::new(|_| Ok(None)))
|
||||
}
|
||||
}
|
||||
|
||||
/// A simple implementation of the diffing algorithm, borrowed from
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
//! Task are stateless actors that staring computing tasks.
|
||||
//! [`SyncTaskFactory`] can hold *mutable* configuration but the mutations don't
|
||||
//! blocking the computation, i.e. the mutations are non-blocking.
|
||||
|
||||
mod export;
|
||||
pub use export::*;
|
||||
mod format;
|
||||
|
@ -15,12 +19,18 @@ use reflexo::TakeAs;
|
|||
#[derive(Clone, Default)]
|
||||
struct SyncTaskFactory<T>(Arc<std::sync::RwLock<Arc<T>>>);
|
||||
|
||||
impl<T> SyncTaskFactory<T> {
|
||||
pub fn new(config: T) -> Self {
|
||||
Self(Arc::new(std::sync::RwLock::new(Arc::new(config))))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone> SyncTaskFactory<T> {
|
||||
fn mutate(&self, f: impl FnOnce(&mut T)) {
|
||||
let mut w = self.0.write().unwrap();
|
||||
let mut data = w.clone().take();
|
||||
f(&mut data);
|
||||
*w = Arc::new(data);
|
||||
let mut config = w.clone().take();
|
||||
f(&mut config);
|
||||
*w = Arc::new(config);
|
||||
}
|
||||
|
||||
fn task(&self) -> Arc<T> {
|
||||
|
@ -42,21 +52,21 @@ struct FutureFolder {
|
|||
}
|
||||
|
||||
impl FutureFolder {
|
||||
fn spawn(&self, revision: usize, fut: FoldFuture) {
|
||||
fn spawn(&self, revision: usize, fut: impl FnOnce() -> FoldFuture) {
|
||||
let mut state = self.state.lock();
|
||||
let state = state.deref_mut();
|
||||
|
||||
match &mut state.task {
|
||||
Some((prev_revision, prev)) => {
|
||||
if *prev_revision < revision {
|
||||
*prev = fut;
|
||||
*prev = fut();
|
||||
*prev_revision = revision;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
next_update => {
|
||||
*next_update = Some((revision, fut));
|
||||
*next_update = Some((revision, fut()));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue