mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-08-03 01:42:14 +00:00
feat: execute export and query on the task model (#1214)
* feat: extract id and doc id from config * dev: merge `TaskWhen` and move `PathPattern` * g * dev: let it compile * dev: rename a bit * dev: finish cmd conversions * dev: configure server * dev: run export * dev: clean code * dev: parse gap on export * fix: when test
This commit is contained in:
parent
86d3b912d4
commit
1b80d8c31d
14 changed files with 608 additions and 592 deletions
|
@ -148,7 +148,7 @@ pub struct TaskCompileArgs {
|
|||
|
||||
impl TaskCompileArgs {
|
||||
/// Convert the arguments to a project task.
|
||||
pub fn to_task(self, doc_id: Id) -> Result<ProjectTask> {
|
||||
pub fn to_task(self, doc_id: Id) -> Result<ApplyProjectTask> {
|
||||
let new_task_id = self.task_name.map(Id::new);
|
||||
let task_id = new_task_id.unwrap_or(doc_id.clone());
|
||||
|
||||
|
@ -182,13 +182,12 @@ impl TaskCompileArgs {
|
|||
}
|
||||
|
||||
let export = ExportTask {
|
||||
document: doc_id,
|
||||
id: task_id.clone(),
|
||||
when,
|
||||
output: None,
|
||||
transform: transforms,
|
||||
};
|
||||
|
||||
Ok(match output_format {
|
||||
let config = match output_format {
|
||||
OutputFormat::Pdf => ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pdf_standards: self.pdf_standard.clone(),
|
||||
|
@ -201,6 +200,12 @@ impl TaskCompileArgs {
|
|||
}),
|
||||
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
OutputFormat::Html => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
};
|
||||
|
||||
Ok(ApplyProjectTask {
|
||||
id: task_id.clone(),
|
||||
document: doc_id,
|
||||
task: config,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use tinymist_std::{bail, ImmutPath};
|
|||
use typst::diag::EcoString;
|
||||
use typst::World;
|
||||
|
||||
use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath};
|
||||
use crate::model::{ApplyProjectTask, Id, ProjectInput, ProjectRoute, ResourcePath};
|
||||
use crate::{LockFile, LockFileCompat, LspWorld, ProjectPathMaterial, LOCK_VERSION};
|
||||
|
||||
pub const LOCK_FILENAME: &str = "tinymist.lock";
|
||||
|
@ -33,7 +33,7 @@ impl LockFile {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn replace_task(&mut self, task: ProjectTask) {
|
||||
pub fn replace_task(&mut self, task: ApplyProjectTask) {
|
||||
let id = task.id().clone();
|
||||
let index = self.task.iter().position(|i| *i.id() == id);
|
||||
if let Some(index) = index {
|
||||
|
@ -225,7 +225,7 @@ pub fn update_lock(root: ImmutPath) -> LockFileUpdate {
|
|||
|
||||
enum LockUpdate {
|
||||
Input(ProjectInput),
|
||||
Task(ProjectTask),
|
||||
Task(ApplyProjectTask),
|
||||
Material(ProjectPathMaterial),
|
||||
Route(ProjectRoute),
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ impl LockFileUpdate {
|
|||
Some(id)
|
||||
}
|
||||
|
||||
pub fn task(&mut self, task: ProjectTask) {
|
||||
pub fn task(&mut self, task: ApplyProjectTask) {
|
||||
self.updates.push(LockUpdate::Task(task));
|
||||
}
|
||||
|
||||
|
|
|
@ -9,9 +9,10 @@ use clap::ValueEnum;
|
|||
use ecow::EcoVec;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::path::unix_slash;
|
||||
use tinymist_std::path::{unix_slash, PathClean};
|
||||
use tinymist_std::{bail, ImmutPath};
|
||||
use tinymist_world::EntryReader;
|
||||
use tinymist_world::vfs::WorkspaceResolver;
|
||||
use tinymist_world::{EntryReader, EntryState};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
|
@ -24,7 +25,7 @@ use crate::LspWorld;
|
|||
pub const LOCK_VERSION: &str = "0.1.0-beta0";
|
||||
|
||||
/// A scalar that is not NaN.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Scalar(f32);
|
||||
|
||||
impl TryFrom<f32> for Scalar {
|
||||
|
@ -39,6 +40,13 @@ impl TryFrom<f32> for Scalar {
|
|||
}
|
||||
}
|
||||
|
||||
impl Scalar {
|
||||
/// Converts the scalar to an f32.
|
||||
pub fn to_f32(self) -> f32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Scalar {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
|
@ -118,17 +126,16 @@ macro_rules! display_possible_values {
|
|||
/// alias typst="tinymist compile --when=onSave"
|
||||
/// typst compile main.typ
|
||||
/// ```
|
||||
#[derive(
|
||||
Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, ValueEnum, Serialize, Deserialize,
|
||||
)]
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[clap(rename_all = "camelCase")]
|
||||
pub enum TaskWhen {
|
||||
/// Never watch to run task.
|
||||
#[default]
|
||||
Never,
|
||||
/// Run task on save.
|
||||
/// Run task on saving the document, i.e. on `textDocument/didSave` events.
|
||||
OnSave,
|
||||
/// Run task on type.
|
||||
/// Run task on typing, i.e. on `textDocument/didChange` events.
|
||||
OnType,
|
||||
/// *DEPRECATED* Run task when a document has a title and on saved, which is
|
||||
/// useful to filter out template files.
|
||||
|
@ -161,6 +168,71 @@ pub enum OutputFormat {
|
|||
|
||||
display_possible_values!(OutputFormat);
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if WorkspaceResolver::is_package_file(main) {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
|
@ -185,6 +257,11 @@ display_possible_values!(PdfStandard);
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
||||
|
||||
impl Pages {
|
||||
/// Selects the first page.
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=None);
|
||||
}
|
||||
|
||||
impl FromStr for Pages {
|
||||
type Err = &'static str;
|
||||
|
||||
|
@ -392,7 +469,7 @@ pub struct LockFile {
|
|||
pub document: Vec<ProjectInput>,
|
||||
/// The project's task (output).
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub task: Vec<ProjectTask>,
|
||||
pub task: Vec<ApplyProjectTask>,
|
||||
/// The project's task route.
|
||||
#[serde(skip_serializing_if = "EcoVec::is_empty", default)]
|
||||
pub route: EcoVec<ProjectRoute>,
|
||||
|
@ -472,3 +549,33 @@ pub struct ProjectRoute {
|
|||
/// The priority of the project. (lower numbers are higher priority).
|
||||
pub priority: u32,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use typst::syntax::VirtualPath;
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let entry =
|
||||
EntryState::new_rooted(root.into(), Some(VirtualPath::new("/dir1/dir2/file.txt")));
|
||||
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/../$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/target/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,11 +4,11 @@ use std::hash::Hash;
|
|||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{Id, Pages, PdfStandard, Scalar, TaskWhen};
|
||||
use super::{Id, Pages, PathPattern, PdfStandard, Scalar, TaskWhen};
|
||||
|
||||
/// A project task specifier. This is used for specifying tasks in a project.
|
||||
/// When the language service notifies an update event of the project, it will
|
||||
/// check whether any associated tasks need to be run.
|
||||
/// A project task application specifier. This is used for specifying tasks to
|
||||
/// run in a project. When the language service notifies an update event of the
|
||||
/// project, it will check whether any associated tasks need to be run.
|
||||
///
|
||||
/// Each task can have different timing and conditions for running. See
|
||||
/// [`TaskWhen`] for more information.
|
||||
|
@ -32,6 +32,31 @@ use super::{Id, Pages, PdfStandard, Scalar, TaskWhen};
|
|||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub struct ApplyProjectTask {
|
||||
/// The task's ID.
|
||||
pub id: Id,
|
||||
/// The document's ID.
|
||||
pub document: Id,
|
||||
/// The task to run.
|
||||
#[serde(flatten)]
|
||||
pub task: ProjectTask,
|
||||
}
|
||||
|
||||
impl ApplyProjectTask {
|
||||
/// Returns the document's ID.
|
||||
pub fn doc_id(&self) -> &Id {
|
||||
&self.document
|
||||
}
|
||||
|
||||
/// Returns the task's ID.
|
||||
pub fn id(&self) -> &Id {
|
||||
&self.id
|
||||
}
|
||||
}
|
||||
|
||||
/// A project task specifier. This structure specifies the arguments for a task.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub enum ProjectTask {
|
||||
/// A preview task.
|
||||
Preview(PreviewTask),
|
||||
|
@ -55,33 +80,48 @@ pub enum ProjectTask {
|
|||
}
|
||||
|
||||
impl ProjectTask {
|
||||
/// Returns the task's ID.
|
||||
pub fn doc_id(&self) -> &Id {
|
||||
match self {
|
||||
ProjectTask::Preview(task) => &task.document,
|
||||
ProjectTask::ExportPdf(task) => &task.export.document,
|
||||
ProjectTask::ExportPng(task) => &task.export.document,
|
||||
ProjectTask::ExportSvg(task) => &task.export.document,
|
||||
ProjectTask::ExportHtml(task) => &task.export.document,
|
||||
ProjectTask::ExportMarkdown(task) => &task.export.document,
|
||||
ProjectTask::ExportText(task) => &task.export.document,
|
||||
ProjectTask::Query(task) => &task.export.document,
|
||||
// ProjectTask::Other(_) => return None,
|
||||
}
|
||||
/// Returns the timing of executing the task.
|
||||
pub fn when(&self) -> Option<TaskWhen> {
|
||||
Some(match self {
|
||||
Self::Preview(task) => task.when,
|
||||
Self::ExportPdf(..)
|
||||
| Self::ExportPng(..)
|
||||
| Self::ExportSvg(..)
|
||||
| Self::ExportHtml(..)
|
||||
| Self::ExportMarkdown(..)
|
||||
| Self::ExportText(..)
|
||||
| Self::Query(..) => self.as_export()?.when,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the document's ID.
|
||||
pub fn id(&self) -> &Id {
|
||||
/// Returns the export configuration of a task.
|
||||
pub fn as_export(&self) -> Option<&ExportTask> {
|
||||
Some(match self {
|
||||
Self::Preview(..) => return None,
|
||||
Self::ExportPdf(task) => &task.export,
|
||||
Self::ExportPng(task) => &task.export,
|
||||
Self::ExportSvg(task) => &task.export,
|
||||
Self::ExportHtml(task) => &task.export,
|
||||
Self::ExportMarkdown(task) => &task.export,
|
||||
Self::ExportText(task) => &task.export,
|
||||
Self::Query(task) => &task.export,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns extension of the artifact.
|
||||
pub fn extension(&self) -> &str {
|
||||
match self {
|
||||
ProjectTask::Preview(task) => &task.id,
|
||||
ProjectTask::ExportPdf(task) => &task.export.id,
|
||||
ProjectTask::ExportPng(task) => &task.export.id,
|
||||
ProjectTask::ExportSvg(task) => &task.export.id,
|
||||
ProjectTask::ExportHtml(task) => &task.export.id,
|
||||
ProjectTask::ExportMarkdown(task) => &task.export.id,
|
||||
ProjectTask::ExportText(task) => &task.export.id,
|
||||
ProjectTask::Query(task) => &task.export.id,
|
||||
// ProjectTask::Other(_) => return None,
|
||||
Self::ExportPdf { .. } => "pdf",
|
||||
Self::Preview(..) | Self::ExportHtml { .. } => "html",
|
||||
Self::ExportMarkdown { .. } => "md",
|
||||
Self::ExportText { .. } => "txt",
|
||||
Self::ExportSvg { .. } => "svg",
|
||||
Self::ExportPng { .. } => "png",
|
||||
Self::Query(QueryTask {
|
||||
format,
|
||||
output_extension,
|
||||
..
|
||||
}) => output_extension.as_deref().unwrap_or(format),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,30 +130,55 @@ impl ProjectTask {
|
|||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct PreviewTask {
|
||||
/// The task's ID.
|
||||
pub id: Id,
|
||||
/// The document's ID.
|
||||
pub document: Id,
|
||||
/// When to run the task. See [`TaskWhen`] for more
|
||||
/// information.
|
||||
pub when: TaskWhen,
|
||||
}
|
||||
|
||||
/// An export task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportTask {
|
||||
/// The task's ID.
|
||||
pub id: Id,
|
||||
/// The document's ID.
|
||||
pub document: Id,
|
||||
/// When to run the task
|
||||
pub when: TaskWhen,
|
||||
/// The output path pattern.
|
||||
pub output: Option<PathPattern>,
|
||||
/// The task's transforms.
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub transform: Vec<ExportTransform>,
|
||||
}
|
||||
|
||||
impl ExportTask {
|
||||
/// Creates a new unmounted export task.
|
||||
pub fn new(when: TaskWhen) -> Self {
|
||||
Self {
|
||||
when,
|
||||
output: None,
|
||||
transform: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Pretty prints the output whenever possible.
|
||||
pub fn apply_pretty(&mut self) {
|
||||
self.transform
|
||||
.push(ExportTransform::Pretty { script: None });
|
||||
}
|
||||
}
|
||||
|
||||
/// The legacy page selection specifier.
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum PageSelection {
|
||||
/// Selects the first page.
|
||||
#[default]
|
||||
First,
|
||||
/// Merges all pages into a single page.
|
||||
Merged {
|
||||
/// The gap between pages (in pt).
|
||||
gap: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// A project export transform specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
|
@ -125,8 +190,8 @@ pub enum ExportTransform {
|
|||
},
|
||||
/// Merge pages into a single page.
|
||||
Merge {
|
||||
/// The gap between pages (in pt).
|
||||
gap: Scalar,
|
||||
/// The gap between pages (typst code expression, e.g. `1pt`).
|
||||
gap: Option<String>,
|
||||
},
|
||||
/// Execute a transform script.
|
||||
Script {
|
||||
|
@ -228,7 +293,7 @@ pub struct QueryTask {
|
|||
pub format: String,
|
||||
/// Uses a different output extension from the one inferring from the
|
||||
/// [`Self::format`].
|
||||
pub output_extension: String,
|
||||
pub output_extension: Option<String>,
|
||||
/// Defines which elements to retrieve.
|
||||
pub selector: String,
|
||||
/// Extracts just one field from all retrieved elements.
|
||||
|
|
|
@ -155,80 +155,18 @@ mod polymorphic {
|
|||
use completion::CompletionList;
|
||||
use lsp_types::TextEdit;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_project::ProjectTask;
|
||||
use typst::foundations::Dict;
|
||||
|
||||
use super::prelude::*;
|
||||
use super::*;
|
||||
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum PageSelection {
|
||||
#[default]
|
||||
First,
|
||||
Merged {
|
||||
gap: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExportKind {
|
||||
Pdf {
|
||||
creation_timestamp: Option<chrono::DateTime<chrono::Utc>>,
|
||||
},
|
||||
Html {},
|
||||
Markdown {},
|
||||
Text {},
|
||||
Query {
|
||||
format: String,
|
||||
output_extension: Option<String>,
|
||||
strict: bool,
|
||||
selector: String,
|
||||
field: Option<String>,
|
||||
one: bool,
|
||||
pretty: bool,
|
||||
},
|
||||
Svg {
|
||||
page: PageSelection,
|
||||
},
|
||||
Png {
|
||||
ppi: Option<f64>,
|
||||
fill: Option<String>,
|
||||
page: PageSelection,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for ExportKind {
|
||||
fn default() -> Self {
|
||||
Self::Pdf {
|
||||
creation_timestamp: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExportKind {
|
||||
pub fn extension(&self) -> &str {
|
||||
match self {
|
||||
Self::Pdf { .. } => "pdf",
|
||||
Self::Html { .. } => "html",
|
||||
Self::Markdown { .. } => "md",
|
||||
Self::Text { .. } => "txt",
|
||||
Self::Svg { .. } => "svg",
|
||||
Self::Png { .. } => "png",
|
||||
Self::Query {
|
||||
format,
|
||||
output_extension,
|
||||
..
|
||||
} => output_extension.as_deref().unwrap_or(format),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OnExportRequest {
|
||||
/// The path of the document to export.
|
||||
pub path: PathBuf,
|
||||
/// The kind of the export.
|
||||
pub kind: ExportKind,
|
||||
/// The export task to run.
|
||||
pub task: ProjectTask,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
pub open: bool,
|
||||
}
|
||||
|
|
|
@ -70,7 +70,7 @@ pub struct CompileOnceArgs {
|
|||
#[clap(flatten)]
|
||||
pub package: CompilePackageArgs,
|
||||
|
||||
/// The document's creation date formatted as a UNIX timestamp.
|
||||
/// The document's creation date formatted as a UNIX timestamp (in seconds).
|
||||
///
|
||||
/// For more information, see <https://reproducible-builds.org/specs/source-date-epoch/>.
|
||||
#[clap(
|
||||
|
@ -80,7 +80,7 @@ pub struct CompileOnceArgs {
|
|||
value_parser = parse_source_date_epoch,
|
||||
hide(true),
|
||||
)]
|
||||
pub creation_timestamp: Option<DateTime<Utc>>,
|
||||
pub creation_timestamp: Option<i64>,
|
||||
|
||||
/// Path to CA certificate file for network access, especially for
|
||||
/// downloading typst packages.
|
||||
|
@ -105,9 +105,12 @@ fn parse_input_pair(raw: &str) -> Result<(String, String), String> {
|
|||
}
|
||||
|
||||
/// Parses a UNIX timestamp according to <https://reproducible-builds.org/specs/source-date-epoch/>
|
||||
pub fn parse_source_date_epoch(raw: &str) -> Result<DateTime<Utc>, String> {
|
||||
let timestamp: i64 = raw
|
||||
.parse()
|
||||
.map_err(|err| format!("timestamp must be decimal integer ({err})"))?;
|
||||
DateTime::from_timestamp(timestamp, 0).ok_or_else(|| "timestamp out of range".to_string())
|
||||
pub fn parse_source_date_epoch(raw: &str) -> Result<i64, String> {
|
||||
raw.parse()
|
||||
.map_err(|err| format!("timestamp must be decimal integer ({err})"))
|
||||
}
|
||||
|
||||
/// Parses a UNIX timestamp according to <https://reproducible-builds.org/specs/source-date-epoch/>
|
||||
pub fn convert_source_date_epoch(seconds: i64) -> Result<chrono::DateTime<Utc>, String> {
|
||||
DateTime::from_timestamp(seconds, 0).ok_or_else(|| "timestamp out of range".to_string())
|
||||
}
|
||||
|
|
|
@ -9,8 +9,12 @@ use serde::{Deserialize, Serialize};
|
|||
use serde_json::Value as JsonValue;
|
||||
use task::TraceParams;
|
||||
use tinymist_assets::TYPST_PREVIEW_HTML;
|
||||
use tinymist_project::{
|
||||
ExportHtmlTask, ExportMarkdownTask, ExportPdfTask, ExportPngTask, ExportSvgTask, ExportTask,
|
||||
ExportTextTask, ExportTransform, PageSelection, Pages, ProjectTask, QueryTask,
|
||||
};
|
||||
use tinymist_query::package::PackageInfo;
|
||||
use tinymist_query::{ExportKind, LocalContextGuard, PageSelection};
|
||||
use tinymist_query::LocalContextGuard;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use typst::diag::{eco_format, EcoString, StrResult};
|
||||
use typst::syntax::package::{PackageSpec, VersionlessPackageSpec};
|
||||
|
@ -21,19 +25,19 @@ use super::*;
|
|||
use crate::state::query::{run_query, LspClientExt};
|
||||
use crate::tool::package::InitTask;
|
||||
|
||||
/// See [`ExportKind`].
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
struct ExportOpts {
|
||||
creation_timestamp: Option<String>,
|
||||
fill: Option<String>,
|
||||
ppi: Option<f64>,
|
||||
ppi: Option<f32>,
|
||||
#[serde(default)]
|
||||
page: PageSelection,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
open: Option<bool>,
|
||||
}
|
||||
|
||||
/// See [`ExportKind`].
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct QueryOpts {
|
||||
|
@ -71,7 +75,11 @@ impl ServerState {
|
|||
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Pdf { creation_timestamp },
|
||||
ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export: ExportTask::default(),
|
||||
pdf_standards: vec![],
|
||||
creation_timestamp,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -82,7 +90,9 @@ impl ServerState {
|
|||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Html {},
|
||||
ProjectTask::ExportHtml(ExportHtmlTask {
|
||||
export: ExportTask::default(),
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -97,7 +107,9 @@ impl ServerState {
|
|||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Markdown {},
|
||||
ProjectTask::ExportMarkdown(ExportMarkdownTask {
|
||||
export: ExportTask::default(),
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -108,7 +120,9 @@ impl ServerState {
|
|||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Text {},
|
||||
ProjectTask::ExportText(ExportTextTask {
|
||||
export: ExportTask::default(),
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -117,17 +131,24 @@ impl ServerState {
|
|||
/// Query the current document and export the result as JSON file(s).
|
||||
pub fn export_query(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
let opts = get_arg_or_default!(args[1] as QueryOpts);
|
||||
// todo: deprecate it
|
||||
let _ = opts.strict;
|
||||
|
||||
let mut export = ExportTask::default();
|
||||
if opts.pretty.unwrap_or(true) {
|
||||
export.apply_pretty();
|
||||
}
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Query {
|
||||
ProjectTask::Query(QueryTask {
|
||||
format: opts.format,
|
||||
output_extension: opts.output_extension,
|
||||
strict: opts.strict.unwrap_or(true),
|
||||
selector: opts.selector,
|
||||
field: opts.field,
|
||||
pretty: opts.pretty.unwrap_or(true),
|
||||
one: opts.one.unwrap_or(false),
|
||||
},
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -136,9 +157,13 @@ impl ServerState {
|
|||
/// Export the current document as Svg file(s).
|
||||
pub fn export_svg(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let mut export = ExportTask::default();
|
||||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Svg { page: opts.page },
|
||||
ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -147,13 +172,23 @@ impl ServerState {
|
|||
/// Export the current document as Png file(s).
|
||||
pub fn export_png(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let ppi = opts.ppi.unwrap_or(144.);
|
||||
let ppi = ppi
|
||||
.try_into()
|
||||
.context("cannot convert ppi")
|
||||
.map_err(invalid_params)?;
|
||||
|
||||
let mut export = ExportTask::default();
|
||||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ExportKind::Png {
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
fill: opts.fill,
|
||||
ppi: opts.ppi,
|
||||
page: opts.page,
|
||||
},
|
||||
ppi,
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
|
@ -164,13 +199,13 @@ impl ServerState {
|
|||
pub fn export(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
kind: ExportKind,
|
||||
task: ProjectTask,
|
||||
open: bool,
|
||||
mut args: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
|
||||
run_query!(req_id, self.OnExport(path, open, kind))
|
||||
run_query!(req_id, self.OnExport(path, open, task))
|
||||
}
|
||||
|
||||
/// Export a range of the current document as Ansi highlighted text.
|
||||
|
@ -679,3 +714,17 @@ impl ServerState {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies page selection to the export task.
|
||||
fn select_page(task: &mut ExportTask, selection: PageSelection) -> Result<()> {
|
||||
match selection {
|
||||
PageSelection::First => task.transform.push(ExportTransform::Pages {
|
||||
ranges: vec![Pages::FIRST],
|
||||
}),
|
||||
PageSelection::Merged { gap } => {
|
||||
task.transform.push(ExportTransform::Merge { gap });
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -2,26 +2,25 @@ use std::path::{Path, PathBuf};
|
|||
use std::sync::Arc;
|
||||
|
||||
use crate::project::font::TinymistFontResolver;
|
||||
use crate::world::EntryState;
|
||||
use anyhow::bail;
|
||||
use clap::Parser;
|
||||
use itertools::Itertools;
|
||||
use lsp_types::*;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use reflexo::path::PathClean;
|
||||
use reflexo_typst::{ImmutPath, TypstDict};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Map, Value as JsonValue};
|
||||
use strum::IntoEnumIterator;
|
||||
use task::{FormatUserConfig, FormatterConfig};
|
||||
use tinymist_project::{EntryResolver, ProjectResolutionKind};
|
||||
use task::{ExportUserConfig, FormatUserConfig, FormatterConfig};
|
||||
use tinymist_project::{
|
||||
EntryResolver, ExportPdfTask, ExportTask, PathPattern, ProjectResolutionKind, ProjectTask,
|
||||
TaskWhen,
|
||||
};
|
||||
use tinymist_query::analysis::{Modifier, TokenType};
|
||||
use tinymist_query::{CompletionFeat, PositionEncoding};
|
||||
use tinymist_render::PeriscopeArgs;
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::syntax::FileId;
|
||||
use typst_shim::utils::{Deferred, LazyHash};
|
||||
use vfs::WorkspaceResolver;
|
||||
|
||||
// todo: svelte-language-server responds to a Goto Definition request with
|
||||
// LocationLink[] even if the client does not report the
|
||||
|
@ -400,7 +399,7 @@ impl Config {
|
|||
self.compile.validate()
|
||||
}
|
||||
|
||||
/// Get the formatter configuration.
|
||||
/// Gets the formatter configuration.
|
||||
pub fn formatter(&self) -> FormatUserConfig {
|
||||
let formatter_print_width = self.formatter_print_width.unwrap_or(120) as usize;
|
||||
|
||||
|
@ -418,6 +417,24 @@ impl Config {
|
|||
position_encoding: self.const_config.position_encoding,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the export configuration.
|
||||
pub(crate) fn export(&self) -> ExportUserConfig {
|
||||
let compile_config = &self.compile;
|
||||
|
||||
ExportUserConfig {
|
||||
task: ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export: ExportTask {
|
||||
output: Some(compile_config.output_path.clone()),
|
||||
when: compile_config.export_pdf,
|
||||
transform: vec![],
|
||||
},
|
||||
pdf_standards: vec![],
|
||||
creation_timestamp: compile_config.determine_creation_timestamp(),
|
||||
}),
|
||||
count_words: self.compile.notify_status,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration set at initialization that won't change within a single
|
||||
|
@ -491,7 +508,7 @@ pub struct CompileConfig {
|
|||
/// The output directory for PDF export.
|
||||
pub output_path: PathPattern,
|
||||
/// The mode of PDF export.
|
||||
pub export_pdf: ExportMode,
|
||||
pub export_pdf: TaskWhen,
|
||||
/// Specifies the cli font options
|
||||
pub font_opts: CompileFontArgs,
|
||||
/// Whether to ignore system fonts
|
||||
|
@ -536,7 +553,7 @@ impl CompileConfig {
|
|||
|
||||
let project_resolution = deser_or_default!("projectResolution", ProjectResolutionKind);
|
||||
self.output_path = deser_or_default!("outputPath", PathPattern);
|
||||
self.export_pdf = deser_or_default!("exportPdf", ExportMode);
|
||||
self.export_pdf = deser_or_default!("exportPdf", TaskWhen);
|
||||
self.notify_status = match try_(|| update.get("compileStatus")?.as_str()) {
|
||||
Some("enable") => true,
|
||||
Some("disable") | None => false,
|
||||
|
@ -712,7 +729,7 @@ impl CompileConfig {
|
|||
}
|
||||
|
||||
/// Determines the creation timestamp.
|
||||
pub fn determine_creation_timestamp(&self) -> Option<chrono::DateTime<chrono::Utc>> {
|
||||
pub fn determine_creation_timestamp(&self) -> Option<i64> {
|
||||
self.typst_extra_args.as_ref()?.creation_timestamp
|
||||
}
|
||||
|
||||
|
@ -772,22 +789,6 @@ pub enum FormatterMode {
|
|||
Typstfmt,
|
||||
}
|
||||
|
||||
/// The mode of PDF/SVG/PNG export.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ExportMode {
|
||||
/// Never export.
|
||||
#[default]
|
||||
Never,
|
||||
/// Export on saving the document, i.e. on `textDocument/didSave` events.
|
||||
OnSave,
|
||||
/// Export on typing, i.e. on `textDocument/didChange` events.
|
||||
OnType,
|
||||
/// Export when a document has a title and on saved, which is useful to
|
||||
/// filter out template files.
|
||||
OnDocumentHasTitle,
|
||||
}
|
||||
|
||||
/// The mode of semantic tokens.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -826,82 +827,17 @@ pub struct CompileExtraOpts {
|
|||
pub font: CompileFontArgs,
|
||||
/// Package related arguments.
|
||||
pub package: CompilePackageArgs,
|
||||
/// The creation timestamp for various output.
|
||||
pub creation_timestamp: Option<chrono::DateTime<chrono::Utc>>,
|
||||
/// The creation timestamp for various output (in seconds).
|
||||
pub creation_timestamp: Option<i64>,
|
||||
/// Path to certification file
|
||||
pub cert: Option<ImmutPath>,
|
||||
}
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if WorkspaceResolver::is_package_file(main) {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
use typst::syntax::VirtualPath;
|
||||
use tinymist_project::PathPattern;
|
||||
|
||||
#[test]
|
||||
fn test_default_encoding() {
|
||||
|
@ -935,7 +871,7 @@ mod tests {
|
|||
}
|
||||
|
||||
assert_eq!(config.compile.output_path, PathPattern::new("out"));
|
||||
assert_eq!(config.compile.export_pdf, ExportMode::OnSave);
|
||||
assert_eq!(config.compile.export_pdf, TaskWhen::OnSave);
|
||||
assert_eq!(
|
||||
config.compile.entry_resolver.root_path,
|
||||
Some(ImmutPath::from(root_path))
|
||||
|
@ -965,12 +901,12 @@ mod tests {
|
|||
|
||||
config.update(&update).unwrap();
|
||||
|
||||
assert_eq!(config.compile.export_pdf, ExportMode::OnType);
|
||||
assert_eq!(config.compile.export_pdf, TaskWhen::OnType);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_creation_timestamp() {
|
||||
type Timestamp = Option<chrono::DateTime<chrono::Utc>>;
|
||||
type Timestamp = Option<i64>;
|
||||
|
||||
fn timestamp(f: impl FnOnce(&mut Config)) -> Timestamp {
|
||||
let mut config = Config::default();
|
||||
|
@ -1124,30 +1060,6 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let entry =
|
||||
EntryState::new_rooted(root.into(), Some(VirtualPath::new("/dir1/dir2/file.txt")));
|
||||
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/../$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/target/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_formatting_config() {
|
||||
let config = Config::default().formatter();
|
||||
|
|
|
@ -16,7 +16,7 @@ use tinymist_std::ImmutPath;
|
|||
use typst::{diag::FileResult, syntax::Source};
|
||||
|
||||
use crate::route::ProjectResolution;
|
||||
use crate::task::{ExportUserConfig, FormatterConfig};
|
||||
use crate::task::FormatterConfig;
|
||||
use crate::world::vfs::{notify::MemoryEvent, FileChangeSet};
|
||||
use crate::world::TaskInputs;
|
||||
use crate::{init::*, *};
|
||||
|
@ -61,11 +61,11 @@ impl ServerState {
|
|||
&mut self,
|
||||
values: Map<String, JsonValue>,
|
||||
) -> LspResult<()> {
|
||||
let config = self.config.clone();
|
||||
let old_config = self.config.clone();
|
||||
match self.config.update_by_map(&values) {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
self.config = config;
|
||||
self.config = old_config;
|
||||
error!("error applying new settings: {err}");
|
||||
return Err(invalid_params(format!(
|
||||
"error applying new settings: {err}"
|
||||
|
@ -73,18 +73,12 @@ impl ServerState {
|
|||
}
|
||||
}
|
||||
|
||||
if config.compile.output_path != self.config.compile.output_path
|
||||
|| config.compile.export_pdf != self.config.compile.export_pdf
|
||||
{
|
||||
let config = ExportUserConfig {
|
||||
output: self.config.compile.output_path.clone(),
|
||||
mode: self.config.compile.export_pdf,
|
||||
};
|
||||
|
||||
self.change_export_config(config.clone());
|
||||
let new_export_config = self.config.export();
|
||||
if old_config.export() != new_export_config {
|
||||
self.change_export_config(new_export_config);
|
||||
}
|
||||
|
||||
if config.compile.primary_opts() != self.config.compile.primary_opts() {
|
||||
if old_config.compile.primary_opts() != self.config.compile.primary_opts() {
|
||||
self.config.compile.fonts = OnceCell::new(); // todo: don't reload fonts if not changed
|
||||
let err = self.restart_primary();
|
||||
if let Err(err) = err {
|
||||
|
@ -92,7 +86,7 @@ impl ServerState {
|
|||
}
|
||||
}
|
||||
|
||||
if config.semantic_tokens != self.config.semantic_tokens {
|
||||
if old_config.semantic_tokens != self.config.semantic_tokens {
|
||||
let err = self
|
||||
.enable_sema_token_caps(self.config.semantic_tokens == SemanticTokensMode::Enable);
|
||||
if let Err(err) = err {
|
||||
|
@ -101,7 +95,7 @@ impl ServerState {
|
|||
}
|
||||
|
||||
let new_formatter_config = self.config.formatter();
|
||||
if !config.formatter().eq(&new_formatter_config) {
|
||||
if !old_config.formatter().eq(&new_formatter_config) {
|
||||
let enabled = !matches!(new_formatter_config.config, FormatterConfig::Disable);
|
||||
let err = self.enable_formatter_caps(enabled);
|
||||
if let Err(err) = err {
|
||||
|
|
|
@ -316,7 +316,7 @@ impl CompileHandler<LspCompilerFeat, ProjectInsStateExt> for CompileHandlerImpl
|
|||
);
|
||||
|
||||
self.client.send_event(LspInterrupt::Compiled(snap.clone()));
|
||||
self.export.signal(snap, snap.signal);
|
||||
self.export.signal(snap);
|
||||
|
||||
self.editor_tx
|
||||
.send(EditorRequest::Status(CompileStatus {
|
||||
|
|
|
@ -7,13 +7,13 @@ use std::sync::Arc;
|
|||
|
||||
use log::{error, info};
|
||||
use lsp_types::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sync_lsp::*;
|
||||
use task::ExportUserConfig;
|
||||
use tinymist_project::{EntryResolver, LspCompileSnapshot, ProjectInsId};
|
||||
use tinymist_query::analysis::{Analysis, PeriscopeProvider};
|
||||
use tinymist_query::{
|
||||
CompilerQueryRequest, ExportKind, LocalContext, LspWorldExt, OnExportRequest, PageSelection,
|
||||
ServerInfoResponse, VersionedDocument,
|
||||
CompilerQueryRequest, LocalContext, LspWorldExt, OnExportRequest, ServerInfoResponse,
|
||||
VersionedDocument,
|
||||
};
|
||||
use tinymist_render::PeriscopeRenderer;
|
||||
use tinymist_std::error::prelude::*;
|
||||
|
@ -32,7 +32,7 @@ use crate::project::{
|
|||
use crate::route::ProjectRouteState;
|
||||
use crate::state::query::OnEnter;
|
||||
use crate::stats::CompilerQueryStats;
|
||||
use crate::task::{ExportConfig, ExportTask, ExportUserConfig, FormatTask, UserActionTask};
|
||||
use crate::task::{ExportTask, FormatTask, UserActionTask};
|
||||
use crate::world::{ImmutDict, LspUniverseBuilder, TaskInputs};
|
||||
use crate::{init::*, *};
|
||||
|
||||
|
@ -406,22 +406,13 @@ impl ServerState {
|
|||
) -> ProjectState {
|
||||
let compile_config = &config.compile;
|
||||
let const_config = &config.const_config;
|
||||
|
||||
// use codespan_reporting::term::Config;
|
||||
// Run Export actors before preparing cluster to avoid loss of events
|
||||
let export_config = ExportConfig {
|
||||
group: diag_group.clone(),
|
||||
editor_tx: Some(editor_tx.clone()),
|
||||
config: ExportUserConfig {
|
||||
output: compile_config.output_path.clone(),
|
||||
mode: compile_config.export_pdf,
|
||||
},
|
||||
kind: ExportKind::Pdf {
|
||||
creation_timestamp: config.compile.determine_creation_timestamp(),
|
||||
},
|
||||
count_words: config.compile.notify_status,
|
||||
};
|
||||
let export = ExportTask::new(client.handle.clone(), export_config);
|
||||
let export = ExportTask::new(
|
||||
client.handle.clone(),
|
||||
diag_group.clone(),
|
||||
Some(editor_tx.clone()),
|
||||
config.export(),
|
||||
);
|
||||
|
||||
log::info!(
|
||||
"TypstActor: creating server for {diag_group}, entry: {entry:?}, inputs: {inputs:?}"
|
||||
|
@ -522,7 +513,7 @@ impl ServerState {
|
|||
|
||||
/// Export the current document.
|
||||
pub fn on_export(&mut self, req: OnExportRequest) -> QueryFuture {
|
||||
let OnExportRequest { path, kind, open } = req;
|
||||
let OnExportRequest { path, task, open } = req;
|
||||
let entry = self.entry_resolver().resolve(Some(path.as_path().into()));
|
||||
let lock_dir = self.compile_config().entry_resolver.resolve_lock(&entry);
|
||||
|
||||
|
@ -542,13 +533,14 @@ impl ServerState {
|
|||
});
|
||||
|
||||
let snap = self.snapshot()?;
|
||||
let task = self.project.export.factory.task();
|
||||
just_future(async move {
|
||||
let snap = snap.task(TaskInputs {
|
||||
entry: Some(entry),
|
||||
..Default::default()
|
||||
});
|
||||
let res = task.oneshot(snap.clone(), kind, lock_dir).await?;
|
||||
|
||||
let artifact = snap.clone().compile();
|
||||
let res = ExportTask::do_export(task, artifact, lock_dir).await?;
|
||||
if let Some(update_dep) = update_dep {
|
||||
tokio::spawn(update_dep(snap));
|
||||
}
|
||||
|
@ -589,11 +581,6 @@ impl PeriscopeProvider for TypstPeriscopeProvider {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ExportOpts {
|
||||
page: PageSelection,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_as_path() {
|
||||
use reflexo::path::PathClean;
|
||||
|
|
|
@ -4,50 +4,50 @@ use std::str::FromStr;
|
|||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use crate::project::{
|
||||
CompiledArtifact, ExportHtmlTask, ExportMarkdownTask, ExportPdfTask, ExportPngTask,
|
||||
ExportSignal, ExportTextTask, ProjectTask, TaskWhen,
|
||||
ApplyProjectTask, CompiledArtifact, ExportHtmlTask, ExportMarkdownTask, ExportPdfTask,
|
||||
ExportPngTask, ExportTextTask, TaskWhen,
|
||||
};
|
||||
use anyhow::{bail, Context};
|
||||
use anyhow::bail;
|
||||
use reflexo::ImmutPath;
|
||||
use reflexo_typst::TypstDatetime;
|
||||
use tinymist_project::{EntryReader, LspCompileSnapshot, LspCompiledArtifact};
|
||||
use tinymist_query::{ExportKind, PageSelection};
|
||||
use reflexo_typst::{TypstAbs as Abs, TypstDatetime};
|
||||
use tinymist_project::{
|
||||
convert_source_date_epoch, EntryReader, ExportSvgTask, ExportTask as ProjectExportTask,
|
||||
ExportTransform, LspCompiledArtifact, Pages, ProjectTask, QueryTask,
|
||||
};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tokio::sync::mpsc;
|
||||
use typlite::Typlite;
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::{
|
||||
layout::Abs,
|
||||
syntax::{ast, SyntaxNode},
|
||||
visualize::Color,
|
||||
};
|
||||
use typst::syntax::{ast, SyntaxNode};
|
||||
use typst::visualize::Color;
|
||||
use typst_pdf::PdfOptions;
|
||||
|
||||
use crate::tool::text::FullTextDigest;
|
||||
use crate::{actor::editor::EditorRequest, tool::word_count, ExportMode, PathPattern};
|
||||
use crate::{actor::editor::EditorRequest, tool::word_count};
|
||||
|
||||
use super::*;
|
||||
|
||||
/// User configuration for export.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ExportUserConfig {
|
||||
/// The output path pattern.
|
||||
pub output: PathPattern,
|
||||
/// The export mode.
|
||||
pub mode: ExportMode,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExportTask {
|
||||
pub handle: tokio::runtime::Handle,
|
||||
pub factory: SyncTaskFactory<ExportConfig>,
|
||||
pub group: String,
|
||||
pub editor_tx: Option<mpsc::UnboundedSender<EditorRequest>>,
|
||||
pub factory: SyncTaskFactory<ExportUserConfig>,
|
||||
export_folder: FutureFolder,
|
||||
count_word_folder: FutureFolder,
|
||||
}
|
||||
|
||||
impl ExportTask {
|
||||
pub fn new(handle: tokio::runtime::Handle, data: ExportConfig) -> Self {
|
||||
pub fn new(
|
||||
handle: tokio::runtime::Handle,
|
||||
group: String,
|
||||
editor_tx: Option<mpsc::UnboundedSender<EditorRequest>>,
|
||||
data: ExportUserConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
handle,
|
||||
group,
|
||||
editor_tx,
|
||||
factory: SyncTaskFactory::new(data),
|
||||
export_folder: FutureFolder::default(),
|
||||
count_word_folder: FutureFolder::default(),
|
||||
|
@ -55,94 +55,71 @@ impl ExportTask {
|
|||
}
|
||||
|
||||
pub fn change_config(&self, config: ExportUserConfig) {
|
||||
self.factory.mutate(|data| data.config = config);
|
||||
self.factory.mutate(|data| *data = config);
|
||||
}
|
||||
|
||||
pub fn signal(&self, snap: &LspCompiledArtifact, s: ExportSignal) {
|
||||
let task = self.factory.task();
|
||||
task.signal(snap, s, self);
|
||||
}
|
||||
}
|
||||
pub fn signal(&self, snap: &LspCompiledArtifact) {
|
||||
let config = self.factory.task();
|
||||
|
||||
pub struct ExportOnceTask<'a> {
|
||||
pub kind: &'a ExportKind,
|
||||
pub artifact: LspCompiledArtifact,
|
||||
pub lock_path: Option<ImmutPath>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExportConfig {
|
||||
pub group: String,
|
||||
pub editor_tx: Option<mpsc::UnboundedSender<EditorRequest>>,
|
||||
pub config: ExportUserConfig,
|
||||
pub kind: ExportKind,
|
||||
pub count_words: bool,
|
||||
}
|
||||
|
||||
impl ExportConfig {
|
||||
fn signal(self: Arc<Self>, snap: &LspCompiledArtifact, s: ExportSignal, t: &ExportTask) {
|
||||
self.signal_export(snap, s, t);
|
||||
self.signal_count_word(snap, t);
|
||||
self.signal_export(snap, &config);
|
||||
self.signal_count_word(snap, &config);
|
||||
}
|
||||
|
||||
fn signal_export(
|
||||
self: &Arc<Self>,
|
||||
&self,
|
||||
artifact: &LspCompiledArtifact,
|
||||
s: ExportSignal,
|
||||
t: &ExportTask,
|
||||
config: &Arc<ExportUserConfig>,
|
||||
) -> Option<()> {
|
||||
let doc = artifact.doc.as_ref().ok()?;
|
||||
let s = artifact.signal;
|
||||
|
||||
let mode = self.config.mode;
|
||||
let need_export = (!matches!(mode, ExportMode::Never) && s.by_entry_update)
|
||||
|| match mode {
|
||||
ExportMode::Never => false,
|
||||
ExportMode::OnType => s.by_mem_events,
|
||||
ExportMode::OnSave => s.by_fs_events,
|
||||
ExportMode::OnDocumentHasTitle => s.by_fs_events && doc.info.title.is_some(),
|
||||
let when = config.task.when().unwrap_or_default();
|
||||
let need_export = (!matches!(when, TaskWhen::Never) && s.by_entry_update)
|
||||
|| match when {
|
||||
TaskWhen::Never => false,
|
||||
TaskWhen::OnType => s.by_mem_events,
|
||||
TaskWhen::OnSave => s.by_fs_events,
|
||||
TaskWhen::OnDocumentHasTitle => s.by_fs_events && doc.info.title.is_some(),
|
||||
};
|
||||
|
||||
if !need_export {
|
||||
return None;
|
||||
}
|
||||
|
||||
let fut = t.export_folder.spawn(artifact.world.revision().get(), || {
|
||||
let this = self.clone();
|
||||
let rev = artifact.world.revision().get();
|
||||
let fut = self.export_folder.spawn(rev, || {
|
||||
let task = config.task.clone();
|
||||
let artifact = artifact.clone();
|
||||
Box::pin(async move {
|
||||
log_err(
|
||||
this.do_export(ExportOnceTask {
|
||||
kind: &this.kind,
|
||||
artifact,
|
||||
lock_path: None,
|
||||
})
|
||||
.await,
|
||||
);
|
||||
log_err(Self::do_export(task, artifact, None).await);
|
||||
Some(())
|
||||
})
|
||||
})?;
|
||||
|
||||
t.handle.spawn(fut);
|
||||
self.handle.spawn(fut);
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn signal_count_word(&self, artifact: &LspCompiledArtifact, t: &ExportTask) -> Option<()> {
|
||||
if !self.count_words {
|
||||
fn signal_count_word(
|
||||
&self,
|
||||
artifact: &LspCompiledArtifact,
|
||||
config: &Arc<ExportUserConfig>,
|
||||
) -> Option<()> {
|
||||
if !config.count_words {
|
||||
return None;
|
||||
}
|
||||
|
||||
let editor_tx = self.editor_tx.clone()?;
|
||||
let revision = artifact.world.revision().get();
|
||||
|
||||
let fut = t.count_word_folder.spawn(revision, || {
|
||||
let rev = artifact.world.revision().get();
|
||||
let fut = self.count_word_folder.spawn(rev, || {
|
||||
let artifact = artifact.clone();
|
||||
let group = self.group.clone();
|
||||
Box::pin(async move {
|
||||
let doc = artifact.doc.ok()?;
|
||||
let wc =
|
||||
log_err(FutureFolder::compute(move |_| word_count::word_count(&doc)).await);
|
||||
log::debug!("WordCount({group}:{revision}): {wc:?}");
|
||||
log::debug!("WordCount({group}:{rev}): {wc:?}");
|
||||
|
||||
if let Some(wc) = wc {
|
||||
let _ = editor_tx.send(EditorRequest::WordCount(group, wc));
|
||||
|
@ -152,40 +129,39 @@ impl ExportConfig {
|
|||
})
|
||||
})?;
|
||||
|
||||
t.handle.spawn(fut);
|
||||
self.handle.spawn(fut);
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
async fn do_export(&self, task: ExportOnceTask<'_>) -> anyhow::Result<Option<PathBuf>> {
|
||||
pub async fn do_export(
|
||||
task: ProjectTask,
|
||||
artifact: LspCompiledArtifact,
|
||||
lock_dir: Option<ImmutPath>,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
use reflexo_vec2svg::DefaultExportFeature;
|
||||
use ExportKind::*;
|
||||
use PageSelection::*;
|
||||
use ProjectTask::*;
|
||||
|
||||
let ExportOnceTask {
|
||||
kind,
|
||||
artifact: CompiledArtifact { snap, doc, .. },
|
||||
lock_path: lock_dir,
|
||||
} = task;
|
||||
let CompiledArtifact { snap, doc, .. } = artifact;
|
||||
|
||||
// Prepare the output path.
|
||||
let entry = snap.world.entry_state();
|
||||
let Some(to) = self.config.output.substitute(&entry) else {
|
||||
let config = task.as_export().unwrap();
|
||||
let output = config.output.clone().unwrap_or_default();
|
||||
let Some(to) = output.substitute(&entry) else {
|
||||
return Ok(None);
|
||||
};
|
||||
if to.is_relative() {
|
||||
bail!("RenderActor({kind:?}): path is relative: {to:?}");
|
||||
bail!("RenderActor({task:?}): output path is relative: {to:?}");
|
||||
}
|
||||
if to.is_dir() {
|
||||
bail!("RenderActor({kind:?}): path is a directory: {to:?}");
|
||||
bail!("RenderActor({task:?}): output path is a directory: {to:?}");
|
||||
}
|
||||
let to = to.with_extension(kind.extension());
|
||||
log::info!("RenderActor({kind:?}): exporting {entry:?} to {to:?}");
|
||||
let to = to.with_extension(task.extension());
|
||||
log::info!("RenderActor({task:?}): exporting {entry:?} to {to:?}");
|
||||
if let Some(e) = to.parent() {
|
||||
if !e.exists() {
|
||||
std::fs::create_dir_all(e).with_context(|| {
|
||||
format!("RenderActor({kind:?}): failed to create directory")
|
||||
})?;
|
||||
std::fs::create_dir_all(e).context("failed to create directory")?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -193,65 +169,12 @@ impl ExportConfig {
|
|||
let mut updater = crate::project::update_lock(lock_dir);
|
||||
|
||||
let doc_id = updater.compiled(&snap.world)?;
|
||||
let task_id = doc_id.clone();
|
||||
|
||||
let when = match self.config.mode {
|
||||
ExportMode::Never => TaskWhen::Never,
|
||||
ExportMode::OnType => TaskWhen::OnType,
|
||||
ExportMode::OnSave => TaskWhen::OnSave,
|
||||
ExportMode::OnDocumentHasTitle => TaskWhen::OnSave,
|
||||
};
|
||||
|
||||
// todo: page transforms
|
||||
let transforms = vec![];
|
||||
|
||||
use tinymist_project::ExportTask as ProjectExportTask;
|
||||
|
||||
let export = ProjectExportTask {
|
||||
updater.task(ApplyProjectTask {
|
||||
id: doc_id.clone(),
|
||||
document: doc_id,
|
||||
id: task_id,
|
||||
when,
|
||||
transform: transforms,
|
||||
};
|
||||
|
||||
let task = match kind {
|
||||
Pdf { creation_timestamp } => {
|
||||
let _ = creation_timestamp;
|
||||
ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pdf_standards: Default::default(),
|
||||
creation_timestamp: None,
|
||||
})
|
||||
}
|
||||
Html {} => ProjectTask::ExportHtml(ExportHtmlTask { export }),
|
||||
Markdown {} => ProjectTask::ExportMarkdown(ExportMarkdownTask { export }),
|
||||
Text {} => ProjectTask::ExportText(ExportTextTask { export }),
|
||||
Query { .. } => {
|
||||
// todo: ignoring query task.
|
||||
return None;
|
||||
}
|
||||
Svg { page } => {
|
||||
// todo: ignoring page selection.
|
||||
let _ = page;
|
||||
return None;
|
||||
}
|
||||
Png { ppi, fill, page } => {
|
||||
// todo: ignoring page fill.
|
||||
let _ = fill;
|
||||
// todo: ignoring page selection.
|
||||
let _ = page;
|
||||
|
||||
let ppi = ppi.unwrap_or(144.) as f32;
|
||||
let ppi = ppi.try_into().unwrap();
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
export,
|
||||
ppi,
|
||||
fill: None,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
updater.task(task);
|
||||
task: task.clone(),
|
||||
});
|
||||
updater.commit();
|
||||
|
||||
Some(())
|
||||
|
@ -261,36 +184,44 @@ impl ExportConfig {
|
|||
let doc = doc.map_err(|_| anyhow::anyhow!("no document"))?;
|
||||
|
||||
// Prepare data.
|
||||
let kind2 = kind.clone();
|
||||
let kind2 = task.clone();
|
||||
let data = FutureFolder::compute(move |_| -> anyhow::Result<Vec<u8>> {
|
||||
let doc = &doc;
|
||||
|
||||
// static BLANK: Lazy<Page> = Lazy::new(Page::default);
|
||||
let first_page = doc.pages.first().unwrap();
|
||||
Ok(match kind2 {
|
||||
Pdf { creation_timestamp } => {
|
||||
let timestamp =
|
||||
convert_datetime(creation_timestamp.unwrap_or_else(chrono::Utc::now));
|
||||
// todo: Some(pdf_uri.as_str())
|
||||
Preview(..) => vec![],
|
||||
// todo: more pdf flags
|
||||
ExportPdf(ExportPdfTask {
|
||||
creation_timestamp, ..
|
||||
}) => {
|
||||
// todo: timestamp world.now()
|
||||
let creation_timestamp = creation_timestamp
|
||||
.map(convert_source_date_epoch)
|
||||
.transpose()
|
||||
.context_ut("parse pdf creation timestamp")?
|
||||
.unwrap_or_else(chrono::Utc::now);
|
||||
|
||||
// todo: Some(pdf_uri.as_str())
|
||||
typst_pdf::pdf(
|
||||
doc,
|
||||
&PdfOptions {
|
||||
timestamp,
|
||||
timestamp: convert_datetime(creation_timestamp),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to pdf: {e:?}"))?
|
||||
}
|
||||
Query {
|
||||
format,
|
||||
Query(QueryTask {
|
||||
export: _,
|
||||
output_extension: _,
|
||||
strict,
|
||||
format,
|
||||
selector,
|
||||
field,
|
||||
one,
|
||||
pretty,
|
||||
} => {
|
||||
}) => {
|
||||
let pretty = false;
|
||||
let elements = reflexo_typst::query::retrieve(&snap.world, &selector, doc)
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if one && elements.len() != 1 {
|
||||
|
@ -309,46 +240,35 @@ impl ExportConfig {
|
|||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &format, strict, pretty).map(String::into_bytes)?
|
||||
serialize(value, &format, pretty).map(String::into_bytes)?
|
||||
} else {
|
||||
serialize(&mapped, &format, strict, pretty).map(String::into_bytes)?
|
||||
serialize(&mapped, &format, pretty).map(String::into_bytes)?
|
||||
}
|
||||
}
|
||||
Html {} => {
|
||||
ExportHtml(ExportHtmlTask { export: _ }) => {
|
||||
reflexo_vec2svg::render_svg_html::<DefaultExportFeature>(doc).into_bytes()
|
||||
}
|
||||
Text {} => format!("{}", FullTextDigest(doc.clone())).into_bytes(),
|
||||
Markdown {} => {
|
||||
ExportText(ExportTextTask { export: _ }) => {
|
||||
format!("{}", FullTextDigest(doc.clone())).into_bytes()
|
||||
}
|
||||
ExportMarkdown(ExportMarkdownTask { export: _ }) => {
|
||||
let conv = Typlite::new(Arc::new(snap.world))
|
||||
.convert()
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to markdown: {e}"))?;
|
||||
|
||||
conv.as_bytes().to_owned()
|
||||
}
|
||||
Svg { page: First } => typst_svg::svg(first_page).into_bytes(),
|
||||
Svg {
|
||||
page: Merged { .. },
|
||||
} => typst_svg::svg_merged(doc, Abs::zero()).into_bytes(),
|
||||
Png {
|
||||
ppi,
|
||||
fill: _,
|
||||
page: First,
|
||||
} => {
|
||||
let ppi = ppi.unwrap_or(144.) as f32;
|
||||
if ppi <= 1e-6 {
|
||||
bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
ExportSvg(ExportSvgTask { export }) => {
|
||||
let (is_first, merged_gap) = get_page_selection(&export)?;
|
||||
|
||||
typst_render::render(first_page, ppi / 72.)
|
||||
.encode_png()
|
||||
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?
|
||||
if is_first {
|
||||
typst_svg::svg(first_page).into_bytes()
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap).into_bytes()
|
||||
}
|
||||
}
|
||||
Png {
|
||||
ppi,
|
||||
fill,
|
||||
page: Merged { gap },
|
||||
} => {
|
||||
let ppi = ppi.unwrap_or(144.) as f32;
|
||||
ExportPng(ExportPngTask { export, ppi, fill }) => {
|
||||
let ppi = ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
@ -359,13 +279,15 @@ impl ExportConfig {
|
|||
Color::WHITE
|
||||
};
|
||||
|
||||
let gap = if let Some(gap) = gap {
|
||||
parse_length(gap).map_err(|err| anyhow::anyhow!("invalid gap ({err})"))?
|
||||
let (is_first, merged_gap) = get_page_selection(&export)?;
|
||||
|
||||
let pixmap = if is_first {
|
||||
typst_render::render(first_page, ppi / 72.)
|
||||
} else {
|
||||
Abs::zero()
|
||||
typst_render::render_merged(doc, ppi / 72., merged_gap, Some(fill))
|
||||
};
|
||||
|
||||
typst_render::render_merged(doc, ppi / 72., gap, Some(fill))
|
||||
pixmap
|
||||
.encode_png()
|
||||
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?
|
||||
}
|
||||
|
@ -374,25 +296,34 @@ impl ExportConfig {
|
|||
|
||||
tokio::fs::write(&to, data.await??)
|
||||
.await
|
||||
.with_context(|| format!("RenderActor({kind:?}): failed to export"))?;
|
||||
.context("failed to export")?;
|
||||
|
||||
log::info!("RenderActor({kind:?}): export complete");
|
||||
log::info!("RenderActor({task:?}): export complete");
|
||||
Ok(Some(to))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn oneshot(
|
||||
&self,
|
||||
snap: LspCompileSnapshot,
|
||||
kind: ExportKind,
|
||||
lock_path: Option<ImmutPath>,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
let artifact = snap.compile();
|
||||
self.do_export(ExportOnceTask {
|
||||
kind: &kind,
|
||||
artifact,
|
||||
lock_path,
|
||||
})
|
||||
.await
|
||||
/// User configuration for export.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct ExportUserConfig {
|
||||
pub task: ProjectTask,
|
||||
pub count_words: bool,
|
||||
}
|
||||
|
||||
impl Default for ExportUserConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
task: ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export: ProjectExportTask {
|
||||
when: TaskWhen::Never,
|
||||
output: None,
|
||||
transform: vec![],
|
||||
},
|
||||
pdf_standards: vec![],
|
||||
creation_timestamp: None,
|
||||
}),
|
||||
count_words: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -410,8 +341,83 @@ fn parse_color(fill: String) -> anyhow::Result<Color> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_length(gap: String) -> anyhow::Result<Abs> {
|
||||
let length = typst::syntax::parse_code(&gap);
|
||||
fn log_err<T>(artifact: anyhow::Result<T>) -> Option<T> {
|
||||
match artifact {
|
||||
Ok(v) => Some(v),
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert [`chrono::DateTime`] to [`TypstDatetime`]
|
||||
fn convert_datetime(date_time: chrono::DateTime<chrono::Utc>) -> Option<TypstDatetime> {
|
||||
use chrono::{Datelike, Timelike};
|
||||
TypstDatetime::from_ymd_hms(
|
||||
date_time.year(),
|
||||
date_time.month().try_into().ok()?,
|
||||
date_time.day().try_into().ok()?,
|
||||
date_time.hour().try_into().ok()?,
|
||||
date_time.minute().try_into().ok()?,
|
||||
date_time.second().try_into().ok()?,
|
||||
)
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> anyhow::Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data)?,
|
||||
"json" => serde_json::to_string(data)?,
|
||||
"yaml" => serde_yaml::to_string(&data)?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data)?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets legacy page selection
|
||||
pub fn get_page_selection(task: &tinymist_project::ExportTask) -> Result<(bool, Abs)> {
|
||||
let is_first = task
|
||||
.transform
|
||||
.iter()
|
||||
.any(|t| matches!(t, ExportTransform::Pages { ranges, .. } if ranges == &[Pages::FIRST]));
|
||||
|
||||
let mut gap_res = Abs::default();
|
||||
if !is_first {
|
||||
for trans in &task.transform {
|
||||
if let ExportTransform::Merge { gap } = trans {
|
||||
let gap = gap
|
||||
.as_deref()
|
||||
.map(parse_length)
|
||||
.transpose()
|
||||
.context_ut("failed to parse gap")?;
|
||||
gap_res = gap.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((is_first, gap_res))
|
||||
}
|
||||
|
||||
fn parse_length(gap: &str) -> anyhow::Result<Abs> {
|
||||
let length = typst::syntax::parse_code(gap);
|
||||
if length.erroneous() {
|
||||
bail!("invalid length: {gap}, errors: {:?}", length.errors());
|
||||
}
|
||||
|
@ -443,81 +449,15 @@ fn descendants(node: &SyntaxNode) -> impl IntoIterator<Item = &SyntaxNode> + '_
|
|||
res
|
||||
}
|
||||
|
||||
fn log_err<T>(artifact: anyhow::Result<T>) -> Option<T> {
|
||||
match artifact {
|
||||
Ok(v) => Some(v),
|
||||
Err(err) => {
|
||||
log::error!("{err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert [`chrono::DateTime`] to [`TypstDatetime`]
|
||||
fn convert_datetime(date_time: chrono::DateTime<chrono::Utc>) -> Option<TypstDatetime> {
|
||||
use chrono::{Datelike, Timelike};
|
||||
TypstDatetime::from_ymd_hms(
|
||||
date_time.year(),
|
||||
date_time.month().try_into().ok()?,
|
||||
date_time.day().try_into().ok()?,
|
||||
date_time.hour().try_into().ok()?,
|
||||
date_time.minute().try_into().ok()?,
|
||||
date_time.second().try_into().ok()?,
|
||||
)
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(
|
||||
data: &impl serde::Serialize,
|
||||
format: &str,
|
||||
strict: bool,
|
||||
pretty: bool,
|
||||
) -> anyhow::Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data)?,
|
||||
"json" => serde_json::to_string(data)?,
|
||||
"yaml" => serde_yaml::to_string(&data)?,
|
||||
format if format == "txt" || !strict => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data)?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_never() {
|
||||
let conf = ExportConfig::default();
|
||||
let conf = ExportUserConfig::default();
|
||||
assert!(!conf.count_words);
|
||||
assert_eq!(conf.config.mode, ExportMode::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_length() {
|
||||
assert_eq!(parse_length("1pt".to_owned()).unwrap(), Abs::pt(1.));
|
||||
assert_eq!(parse_length("1mm".to_owned()).unwrap(), Abs::mm(1.));
|
||||
assert_eq!(parse_length("1cm".to_owned()).unwrap(), Abs::cm(1.));
|
||||
assert_eq!(parse_length("1in".to_owned()).unwrap(), Abs::inches(1.));
|
||||
assert!(parse_length("1".to_owned()).is_err());
|
||||
assert!(parse_length("1px".to_owned()).is_err());
|
||||
assert_eq!(conf.task.when(), Some(TaskWhen::Never));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -541,4 +481,14 @@ mod tests {
|
|||
);
|
||||
assert!(parse_color("invalid".to_owned()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_length() {
|
||||
assert_eq!(parse_length("1pt").unwrap(), Abs::pt(1.));
|
||||
assert_eq!(parse_length("1mm").unwrap(), Abs::mm(1.));
|
||||
assert_eq!(parse_length("1cm").unwrap(), Abs::cm(1.));
|
||||
assert_eq!(parse_length("1in").unwrap(), Abs::inches(1.));
|
||||
assert!(parse_length("1").is_err());
|
||||
assert!(parse_length("1px").is_err());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -595,7 +595,12 @@ pub async fn preview_main(args: PreviewCliArgs) -> Result<()> {
|
|||
let compile_handle = Arc::new(CompileHandlerImpl {
|
||||
preview: preview_state.clone(),
|
||||
diag_group: "main".to_owned(),
|
||||
export: crate::task::ExportTask::new(handle, Default::default()),
|
||||
export: crate::task::ExportTask::new(
|
||||
handle,
|
||||
String::default(),
|
||||
None,
|
||||
Default::default(),
|
||||
),
|
||||
editor_tx,
|
||||
client: Box::new(intr_tx.clone()),
|
||||
analysis: Arc::default(),
|
||||
|
|
|
@ -79,11 +79,12 @@ impl LockFileExt for LockFile {
|
|||
.unwrap_or(doc_id.clone());
|
||||
|
||||
let when = args.when.unwrap_or(TaskWhen::OnType);
|
||||
let task = ProjectTask::Preview(PreviewTask {
|
||||
let task = ProjectTask::Preview(PreviewTask { when });
|
||||
let task = ApplyProjectTask {
|
||||
id: task_id.clone(),
|
||||
document: doc_id,
|
||||
when,
|
||||
});
|
||||
task,
|
||||
};
|
||||
|
||||
self.replace_task(task);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue