mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-08-04 02:08:17 +00:00
feat: split tinymist-task (#1277)
* feat: split tinymist-task * cargo lock * revert html changes * Revert "revert html changes" This reverts commitb82662e441
. * Revert "Revert "revert html changes"" This reverts commitb42643399c
. * fix: examples
This commit is contained in:
parent
2c335b25d1
commit
3799db6dd4
24 changed files with 1538 additions and 696 deletions
|
@ -27,6 +27,7 @@ rpds.workspace = true
|
|||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tinymist-task.workspace = true
|
||||
tinymist-world = { workspace = true }
|
||||
tinymist-std = { workspace = true, features = ["typst"] }
|
||||
tinymist-derive.workspace = true
|
||||
|
|
|
@ -104,12 +104,6 @@ pub struct DocIdArgs {
|
|||
pub input: String,
|
||||
}
|
||||
|
||||
impl From<&ResourcePath> for Id {
|
||||
fn from(value: &ResourcePath) -> Self {
|
||||
Id::new(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&DocIdArgs> for Id {
|
||||
fn from(args: &DocIdArgs) -> Self {
|
||||
if let Some(id) = &args.name {
|
||||
|
|
|
@ -1,448 +1,17 @@
|
|||
use core::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::hash::Hash;
|
||||
use std::path::PathBuf;
|
||||
use std::{path::Path, str::FromStr};
|
||||
|
||||
use clap::ValueEnum;
|
||||
use ecow::EcoVec;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::path::{unix_slash, PathClean};
|
||||
use tinymist_std::{bail, ImmutPath};
|
||||
use tinymist_world::vfs::WorkspaceResolver;
|
||||
use tinymist_world::{EntryReader, EntryState};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
pub mod task;
|
||||
pub use task::*;
|
||||
|
||||
use crate::LspWorld;
|
||||
pub use tinymist_task as task;
|
||||
|
||||
/// The currently using lock file version.
|
||||
pub const LOCK_VERSION: &str = "0.1.0-beta0";
|
||||
|
||||
/// A scalar that is not NaN.
|
||||
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Scalar(f32);
|
||||
|
||||
impl TryFrom<f32> for Scalar {
|
||||
type Error = &'static str;
|
||||
|
||||
fn try_from(value: f32) -> Result<Self, Self::Error> {
|
||||
if value.is_nan() {
|
||||
Err("NaN is not a valid scalar value")
|
||||
} else {
|
||||
Ok(Scalar(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scalar {
|
||||
/// Converts the scalar to an f32.
|
||||
pub fn to_f32(self) -> f32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Scalar {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Scalar {}
|
||||
|
||||
impl Hash for Scalar {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.0.to_bits().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Scalar {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Scalar {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.0).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// A project ID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Id(String);
|
||||
|
||||
impl Id {
|
||||
/// Creates a new project Id.
|
||||
pub fn new(s: String) -> Self {
|
||||
Id(s)
|
||||
}
|
||||
|
||||
/// Creates a new project Id from a world.
|
||||
pub fn from_world(world: &LspWorld) -> Option<Self> {
|
||||
let entry = world.entry_state();
|
||||
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
||||
|
||||
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
||||
Some(path.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Id {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! display_possible_values {
|
||||
($ty:ty) => {
|
||||
impl fmt::Display for $ty {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// When to export an output file.
|
||||
///
|
||||
/// By default, a `tinymist compile` only provides input information and
|
||||
/// doesn't change the `when` field. However, you can still specify a `when`
|
||||
/// argument to override the default behavior for specific tasks.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```bash
|
||||
/// tinymist compile --when onSave main.typ
|
||||
/// alias typst="tinymist compile --when=onSave"
|
||||
/// typst compile main.typ
|
||||
/// ```
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[clap(rename_all = "camelCase")]
|
||||
pub enum TaskWhen {
|
||||
/// Never watch to run task.
|
||||
#[default]
|
||||
Never,
|
||||
/// Run task on saving the document, i.e. on `textDocument/didSave` events.
|
||||
OnSave,
|
||||
/// Run task on typing, i.e. on `textDocument/didChange` events.
|
||||
OnType,
|
||||
/// *DEPRECATED* Run task when a document has a title and on saved, which is
|
||||
/// useful to filter out template files.
|
||||
///
|
||||
/// Note: this is deprecating.
|
||||
OnDocumentHasTitle,
|
||||
}
|
||||
|
||||
impl TaskWhen {
|
||||
/// Returns `true` if the task should never be run automatically.
|
||||
pub fn is_never(&self) -> bool {
|
||||
matches!(self, TaskWhen::Never)
|
||||
}
|
||||
}
|
||||
|
||||
display_possible_values!(TaskWhen);
|
||||
|
||||
/// Which format to use for the generated output file.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
|
||||
pub enum OutputFormat {
|
||||
/// Export to PDF.
|
||||
Pdf,
|
||||
/// Export to PNG.
|
||||
Png,
|
||||
/// Export to SVG.
|
||||
Svg,
|
||||
/// Export to HTML.
|
||||
Html,
|
||||
}
|
||||
|
||||
display_possible_values!(OutputFormat);
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl fmt::Display for PathPattern {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if WorkspaceResolver::is_package_file(main) {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.7.
|
||||
#[value(name = "1.7")]
|
||||
#[serde(rename = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF/A-2b.
|
||||
#[value(name = "a-2b")]
|
||||
#[serde(rename = "a-2b")]
|
||||
A_2b,
|
||||
}
|
||||
|
||||
display_possible_values!(PdfStandard);
|
||||
|
||||
/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the
|
||||
/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a
|
||||
/// value parser, in order to generate better errors.
|
||||
///
|
||||
/// See also: <https://github.com/clap-rs/clap/issues/5065>
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
||||
|
||||
impl Pages {
|
||||
/// Selects the first page.
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=None);
|
||||
}
|
||||
|
||||
impl FromStr for Pages {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
match value
|
||||
.split('-')
|
||||
.map(str::trim)
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice()
|
||||
{
|
||||
[] | [""] => Err("page export range must not be empty"),
|
||||
[single_page] => {
|
||||
let page_number = parse_page_number(single_page)?;
|
||||
Ok(Pages(Some(page_number)..=Some(page_number)))
|
||||
}
|
||||
["", ""] => Err("page export range must have start or end"),
|
||||
[start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)),
|
||||
["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))),
|
||||
[start, end] => {
|
||||
let start = parse_page_number(start)?;
|
||||
let end = parse_page_number(end)?;
|
||||
if start > end {
|
||||
Err("page export range must end at a page after the start")
|
||||
} else {
|
||||
Ok(Pages(Some(start)..=Some(end)))
|
||||
}
|
||||
}
|
||||
[_, _, _, ..] => Err("page export range must have a single hyphen"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Pages {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let start = match self.0.start() {
|
||||
Some(start) => start.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
let end = match self.0.end() {
|
||||
Some(end) => end.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
write!(f, "{start}-{end}")
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for Pages {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for Pages {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a single page number.
|
||||
fn parse_page_number(value: &str) -> Result<NonZeroUsize, &'static str> {
|
||||
if value == "0" {
|
||||
Err("page numbers start at one")
|
||||
} else {
|
||||
NonZeroUsize::from_str(value).map_err(|_| "not a valid page number")
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource path.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ResourcePath(EcoString, String);
|
||||
|
||||
impl fmt::Display for ResourcePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}:{}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ResourcePath {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = value.split(':');
|
||||
let scheme = parts.next().ok_or("missing scheme")?;
|
||||
let path = parts.next().ok_or("missing path")?;
|
||||
if parts.next().is_some() {
|
||||
Err("too many colons")
|
||||
} else {
|
||||
Ok(ResourcePath(scheme.into(), path.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for ResourcePath {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for ResourcePath {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath {
|
||||
/// Creates a new resource path from a user passing system path.
|
||||
pub fn from_user_sys(inp: &Path) -> Self {
|
||||
let rel = if inp.is_relative() {
|
||||
inp.to_path_buf()
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
pathdiff::diff_paths(inp, &cwd).unwrap()
|
||||
};
|
||||
let rel = unix_slash(&rel);
|
||||
ResourcePath("file".into(), rel.to_string())
|
||||
}
|
||||
/// Creates a new resource path from a file id.
|
||||
pub fn from_file_id(id: FileId) -> Self {
|
||||
let package = id.package();
|
||||
match package {
|
||||
Some(package) => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("{package}{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
None => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("$root{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to a path relative to the `base` (usually the
|
||||
/// directory storing the lockfile).
|
||||
pub fn to_rel_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(pathdiff::diff_paths(path, base).unwrap_or_else(|| path.to_owned()))
|
||||
} else {
|
||||
Some(path.to_owned())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to an absolute file system path.
|
||||
pub fn to_abs_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(path.to_owned())
|
||||
} else {
|
||||
Some(base.join(path))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A lock file compatibility wrapper.
|
||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "version")]
|
||||
|
@ -575,9 +144,14 @@ pub struct ProjectRoute {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::Path;
|
||||
|
||||
use tinymist_task::PathPattern;
|
||||
use tinymist_world::EntryState;
|
||||
use typst::syntax::VirtualPath;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
|
|
52
crates/tinymist-task/Cargo.toml
Normal file
52
crates/tinymist-task/Cargo.toml
Normal file
|
@ -0,0 +1,52 @@
|
|||
[package]
|
||||
name = "tinymist-task"
|
||||
description = "Task model of typst for tinymist."
|
||||
categories = ["compilers"]
|
||||
keywords = ["language", "typst"]
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
clap.workspace = true
|
||||
comemo.workspace = true
|
||||
dirs.workspace = true
|
||||
ecow.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
pathdiff.workspace = true
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
rayon.workspace = true
|
||||
rpds.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tinymist-world = { workspace = true }
|
||||
tinymist-std = { workspace = true }
|
||||
tinymist-derive.workspace = true
|
||||
toml.workspace = true
|
||||
typst.workspace = true
|
||||
typst-assets.workspace = true
|
||||
typst-pdf.workspace = true
|
||||
typst-shim.workspace = true
|
||||
typst-svg.workspace = true
|
||||
typst-render.workspace = true
|
||||
notify.workspace = true
|
||||
|
||||
[features]
|
||||
|
||||
default = ["pdf", "text"]
|
||||
no-content-hint = []
|
||||
|
||||
pdf = []
|
||||
text = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
408
crates/tinymist-task/src/compute.rs
Normal file
408
crates/tinymist-task/src/compute.rs
Normal file
|
@ -0,0 +1,408 @@
|
|||
#![allow(missing_docs)]
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::EcoString;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use tinymist_world::{
|
||||
args::convert_source_date_epoch, CompileSnapshot, CompilerFeat, ExportComputation,
|
||||
WorldComputeGraph,
|
||||
};
|
||||
use typst::diag::{SourceResult, StrResult};
|
||||
use typst::foundations::{Bytes, Content, Datetime, IntoValue, LocatableSelector, Scope, Value};
|
||||
use typst::layout::Abs;
|
||||
use typst::syntax::{ast, Span, SyntaxNode};
|
||||
use typst::visualize::Color;
|
||||
use typst::World;
|
||||
use typst_pdf::PdfOptions;
|
||||
use typst_shim::eval::EvalMode;
|
||||
|
||||
use crate::model::{ExportPdfTask, ExportPngTask, ExportSvgTask};
|
||||
use crate::primitives::TaskWhen;
|
||||
use crate::{ExportTransform, Pages, QueryTask};
|
||||
|
||||
#[cfg(feature = "pdf")]
|
||||
pub mod pdf;
|
||||
#[cfg(feature = "pdf")]
|
||||
pub use pdf::*;
|
||||
#[cfg(feature = "text")]
|
||||
pub mod text;
|
||||
#[cfg(feature = "text")]
|
||||
pub use text::*;
|
||||
|
||||
pub struct SvgFlag;
|
||||
pub struct PngFlag;
|
||||
pub struct HtmlFlag;
|
||||
|
||||
pub struct ExportTimings;
|
||||
|
||||
impl ExportTimings {
|
||||
pub fn needs_run<F: CompilerFeat>(
|
||||
snap: &CompileSnapshot<F>,
|
||||
timing: Option<TaskWhen>,
|
||||
docs: Option<&TypstPagedDocument>,
|
||||
) -> Option<bool> {
|
||||
let s = snap.signal;
|
||||
let when = timing.unwrap_or(TaskWhen::Never);
|
||||
if !matches!(when, TaskWhen::Never) && s.by_entry_update {
|
||||
return Some(true);
|
||||
}
|
||||
|
||||
match when {
|
||||
TaskWhen::Never => Some(false),
|
||||
TaskWhen::OnType => Some(s.by_mem_events),
|
||||
TaskWhen::OnSave => Some(s.by_fs_events),
|
||||
TaskWhen::OnDocumentHasTitle if s.by_fs_events => {
|
||||
docs.map(|doc| doc.info.title.is_some())
|
||||
}
|
||||
TaskWhen::OnDocumentHasTitle => Some(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SvgExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
|
||||
type Output = String;
|
||||
type Config = ExportSvgTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportSvgTask,
|
||||
) -> Result<String> {
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let first_page = doc.pages.first();
|
||||
|
||||
Ok(if is_first {
|
||||
if let Some(first_page) = first_page {
|
||||
typst_svg::svg(first_page)
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
}
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for SvgExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct PngExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
|
||||
type Output = Bytes;
|
||||
type Config = ExportPngTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPngTask,
|
||||
) -> Result<Bytes> {
|
||||
let ppi = config.ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
tinymist_std::bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
||||
let fill = if let Some(fill) = &config.fill {
|
||||
parse_color(fill.clone()).map_err(|err| anyhow::anyhow!("invalid fill ({err})"))?
|
||||
} else {
|
||||
Color::WHITE
|
||||
};
|
||||
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let ppp = ppi / 72.;
|
||||
let pixmap = if is_first {
|
||||
if let Some(first_page) = doc.pages.first() {
|
||||
typst_render::render(first_page, ppp)
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
}
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
};
|
||||
|
||||
pixmap
|
||||
.encode_png()
|
||||
.map(Bytes::from)
|
||||
.context_ut("failed to encode PNG")
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PngExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for HtmlExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct DocumentQuery;
|
||||
|
||||
impl DocumentQuery {
|
||||
// todo: query exporter
|
||||
/// Retrieve the matches for the selector.
|
||||
pub fn retrieve(
|
||||
world: &dyn World,
|
||||
selector: &str,
|
||||
document: &TypstPagedDocument,
|
||||
) -> StrResult<Vec<Content>> {
|
||||
let selector = typst_shim::eval::eval_string(
|
||||
world.track(),
|
||||
selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
Scope::default(),
|
||||
)
|
||||
.map_err(|errors| {
|
||||
let mut message = EcoString::from("failed to evaluate selector");
|
||||
for (i, error) in errors.into_iter().enumerate() {
|
||||
message.push_str(if i == 0 { ": " } else { ", " });
|
||||
message.push_str(&error.message);
|
||||
}
|
||||
message
|
||||
})?
|
||||
.cast::<LocatableSelector>()
|
||||
.map_err(|e| EcoString::from(format!("failed to cast: {}", e.message())))?;
|
||||
|
||||
Ok(document
|
||||
.introspector
|
||||
.query(&selector.0)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
fn run_inner<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<Vec<Value>> {
|
||||
let selector = &config.selector;
|
||||
let elements = Self::retrieve(&g.snap.world, selector, doc.as_ref())
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if config.one && elements.len() != 1 {
|
||||
bail!("expected exactly one element, found {}", elements.len());
|
||||
}
|
||||
|
||||
Ok(elements
|
||||
.into_iter()
|
||||
.filter_map(|c| match &config.field {
|
||||
Some(field) => c.get_by_name(field).ok(),
|
||||
_ => Some(c.into_value()),
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub fn get_as_value<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serde_json::to_value(value)
|
||||
} else {
|
||||
serde_json::to_value(&mapped)
|
||||
};
|
||||
|
||||
res.context("failed to serialize")
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for DocumentQuery {
|
||||
type Output = SourceResult<String>;
|
||||
type Config = QueryTask;
|
||||
|
||||
fn run(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<SourceResult<String>> {
|
||||
let pretty = false;
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &config.format, pretty)
|
||||
} else {
|
||||
serialize(&mapped, &config.format, pretty)
|
||||
};
|
||||
|
||||
res.map(Ok)
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data).context("serialize query")?,
|
||||
"json" => serde_json::to_string(data).context("serialize query")?,
|
||||
"yaml" => serde_yaml::to_string(&data).context_ut("serialize query")?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data).context("serialize query")?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets legacy page selection
|
||||
pub fn get_page_selection(task: &crate::ExportTask) -> Result<(bool, Abs)> {
|
||||
let is_first = task
|
||||
.transform
|
||||
.iter()
|
||||
.any(|t| matches!(t, ExportTransform::Pages { ranges, .. } if ranges == &[Pages::FIRST]));
|
||||
|
||||
let mut gap_res = Abs::default();
|
||||
if !is_first {
|
||||
for trans in &task.transform {
|
||||
if let ExportTransform::Merge { gap } = trans {
|
||||
let gap = gap
|
||||
.as_deref()
|
||||
.map(parse_length)
|
||||
.transpose()
|
||||
.context_ut("failed to parse gap")?;
|
||||
gap_res = gap.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((is_first, gap_res))
|
||||
}
|
||||
|
||||
fn parse_length(gap: &str) -> Result<Abs> {
|
||||
let length = typst::syntax::parse_code(gap);
|
||||
if length.erroneous() {
|
||||
bail!("invalid length: {gap}, errors: {:?}", length.errors());
|
||||
}
|
||||
|
||||
let length: Option<ast::Numeric> = descendants(&length).into_iter().find_map(SyntaxNode::cast);
|
||||
|
||||
let Some(length) = length else {
|
||||
bail!("not a length: {gap}");
|
||||
};
|
||||
|
||||
let (value, unit) = length.get();
|
||||
match unit {
|
||||
ast::Unit::Pt => Ok(Abs::pt(value)),
|
||||
ast::Unit::Mm => Ok(Abs::mm(value)),
|
||||
ast::Unit::Cm => Ok(Abs::cm(value)),
|
||||
ast::Unit::In => Ok(Abs::inches(value)),
|
||||
_ => bail!("invalid unit: {unit:?} in {gap}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Low performance but simple recursive iterator.
|
||||
fn descendants(node: &SyntaxNode) -> impl IntoIterator<Item = &SyntaxNode> + '_ {
|
||||
let mut res = vec![];
|
||||
for child in node.children() {
|
||||
res.push(child);
|
||||
res.extend(descendants(child));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn parse_color(fill: String) -> anyhow::Result<Color> {
|
||||
match fill.as_str() {
|
||||
"black" => Ok(Color::BLACK),
|
||||
"white" => Ok(Color::WHITE),
|
||||
"red" => Ok(Color::RED),
|
||||
"green" => Ok(Color::GREEN),
|
||||
"blue" => Ok(Color::BLUE),
|
||||
hex if hex.starts_with('#') => {
|
||||
Color::from_str(&hex[1..]).map_err(|e| anyhow::anyhow!("failed to parse color: {e}"))
|
||||
}
|
||||
_ => anyhow::bail!("invalid color: {fill}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert [`chrono::DateTime`] to [`Datetime`]
|
||||
fn convert_datetime(date_time: chrono::DateTime<chrono::Utc>) -> Option<Datetime> {
|
||||
use chrono::{Datelike, Timelike};
|
||||
Datetime::from_ymd_hms(
|
||||
date_time.year(),
|
||||
date_time.month().try_into().ok()?,
|
||||
date_time.day().try_into().ok()?,
|
||||
date_time.hour().try_into().ok()?,
|
||||
date_time.minute().try_into().ok()?,
|
||||
date_time.second().try_into().ok()?,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_color() {
|
||||
assert_eq!(parse_color("black".to_owned()).unwrap(), Color::BLACK);
|
||||
assert_eq!(parse_color("white".to_owned()).unwrap(), Color::WHITE);
|
||||
assert_eq!(parse_color("red".to_owned()).unwrap(), Color::RED);
|
||||
assert_eq!(parse_color("green".to_owned()).unwrap(), Color::GREEN);
|
||||
assert_eq!(parse_color("blue".to_owned()).unwrap(), Color::BLUE);
|
||||
assert_eq!(
|
||||
parse_color("#000000".to_owned()).unwrap().to_hex(),
|
||||
"#000000"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#ffffff".to_owned()).unwrap().to_hex(),
|
||||
"#ffffff"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#000000cc".to_owned()).unwrap().to_hex(),
|
||||
"#000000cc"
|
||||
);
|
||||
assert!(parse_color("invalid".to_owned()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_length() {
|
||||
assert_eq!(parse_length("1pt").unwrap(), Abs::pt(1.));
|
||||
assert_eq!(parse_length("1mm").unwrap(), Abs::mm(1.));
|
||||
assert_eq!(parse_length("1cm").unwrap(), Abs::cm(1.));
|
||||
assert_eq!(parse_length("1in").unwrap(), Abs::inches(1.));
|
||||
assert!(parse_length("1").is_err());
|
||||
assert!(parse_length("1px").is_err());
|
||||
}
|
||||
}
|
81
crates/tinymist-task/src/compute/pdf.rs
Normal file
81
crates/tinymist-task/src/compute/pdf.rs
Normal file
|
@ -0,0 +1,81 @@
|
|||
use super::*;
|
||||
|
||||
pub use typst_pdf::pdf;
|
||||
pub use typst_pdf::PdfStandard as TypstPdfStandard;
|
||||
pub struct PdfExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PdfExport {
|
||||
type Output = Bytes;
|
||||
type Config = ExportPdfTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPdfTask,
|
||||
) -> Result<Bytes> {
|
||||
// todo: timestamp world.now()
|
||||
let creation_timestamp = config
|
||||
.creation_timestamp
|
||||
.map(convert_source_date_epoch)
|
||||
.transpose()
|
||||
.context_ut("parse pdf creation timestamp")?
|
||||
.unwrap_or_else(chrono::Utc::now);
|
||||
|
||||
// todo: Some(pdf_uri.as_str())
|
||||
|
||||
Ok(Bytes::from(typst_pdf::pdf(
|
||||
doc,
|
||||
&PdfOptions {
|
||||
timestamp: convert_datetime(creation_timestamp),
|
||||
..Default::default()
|
||||
},
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PdfExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
// use std::sync::Arc;
|
||||
|
||||
// use reflexo::typst::TypstPagedDocument;
|
||||
// use typst::{diag:: World;
|
||||
// use typst_pdf::{PdfOptions, PdfStandard, PdfStandards, Timestamp};
|
||||
|
||||
// #[derive(Debug, Clone, Default)]
|
||||
// pub struct PdfDocExporter {
|
||||
// ctime: Option<Timestamp>,
|
||||
// standards: Option<PdfStandards>,
|
||||
// }
|
||||
|
||||
// impl PdfDocExporter {
|
||||
// pub fn with_ctime(mut self, v: Option<Timestamp>) -> Self {
|
||||
// self.ctime = v;
|
||||
// self
|
||||
// }
|
||||
|
||||
// pub fn with_standard(mut self, v: Option<PdfStandard>) -> Self {
|
||||
// self.standards = v.map(|v| PdfStandards::new(&[v]).unwrap());
|
||||
// self
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl Exporter<TypstPagedDocument, Vec<u8>> for PdfDocExporter {
|
||||
// fn export(&self, _world: &dyn World, output: Arc<TypstPagedDocument>) ->
|
||||
// Vecu8>> { // todo: ident option
|
||||
|
||||
// typst_pdf::pdf(
|
||||
// output.as_ref(),
|
||||
// &PdfOptions {
|
||||
// timestamp: self.ctime,
|
||||
// standards: self.standards.clone().unwrap_or_default(),
|
||||
// ..Default::default()
|
||||
// },
|
||||
// )
|
||||
// }
|
||||
// }
|
71
crates/tinymist-task/src/compute/text.rs
Normal file
71
crates/tinymist-task/src/compute/text.rs
Normal file
|
@ -0,0 +1,71 @@
|
|||
use core::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::ExportTextTask;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::{TypstDocument, TypstPagedDocument};
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
|
||||
pub struct TextExport;
|
||||
|
||||
impl TextExport {
|
||||
pub fn run_on_doc(doc: &TypstDocument) -> Result<String> {
|
||||
Ok(format!("{}", FullTextDigest(doc.clone())))
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for TextExport {
|
||||
type Output = String;
|
||||
type Config = ExportTextTask;
|
||||
|
||||
fn run(
|
||||
_g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
_config: &ExportTextTask,
|
||||
) -> Result<String> {
|
||||
Self::run_on_doc(&TypstDocument::Paged(doc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
/// A full text digest of a document.
|
||||
pub struct FullTextDigest(pub TypstDocument);
|
||||
|
||||
impl FullTextDigest {
|
||||
fn export_frame(f: &mut fmt::Formatter<'_>, doc: &typst::layout::Frame) -> fmt::Result {
|
||||
for (_, item) in doc.items() {
|
||||
Self::export_item(f, item)?;
|
||||
}
|
||||
#[cfg(not(feature = "no-content-hint"))]
|
||||
{
|
||||
use std::fmt::Write;
|
||||
let c = doc.content_hint();
|
||||
if c != '\0' {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn export_item(f: &mut fmt::Formatter<'_>, item: &typst::layout::FrameItem) -> fmt::Result {
|
||||
use typst::layout::FrameItem::*;
|
||||
match item {
|
||||
Group(g) => Self::export_frame(f, &g.frame),
|
||||
Text(t) => f.write_str(t.text.as_str()),
|
||||
Link(..) | Tag(..) | Shape(..) | Image(..) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FullTextDigest {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.0 {
|
||||
TypstDocument::Paged(paged_doc) => {
|
||||
for page in paged_doc.pages.iter() {
|
||||
Self::export_frame(f, &page.frame)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
10
crates/tinymist-task/src/lib.rs
Normal file
10
crates/tinymist-task/src/lib.rs
Normal file
|
@ -0,0 +1,10 @@
|
|||
#![allow(missing_docs)]
|
||||
|
||||
mod model;
|
||||
pub use model::*;
|
||||
|
||||
mod primitives;
|
||||
pub use primitives::*;
|
||||
|
||||
pub mod compute;
|
||||
pub use compute::*;
|
|
@ -69,7 +69,7 @@ pub enum ProjectTask {
|
|||
/// An export HTML task.
|
||||
ExportHtml(ExportHtmlTask),
|
||||
/// An export Markdown task.
|
||||
ExportMarkdown(ExportMarkdownTask),
|
||||
ExportMd(ExportMarkdownTask),
|
||||
/// An export Text task.
|
||||
ExportText(ExportTextTask),
|
||||
/// An query task.
|
||||
|
@ -88,7 +88,7 @@ impl ProjectTask {
|
|||
| Self::ExportPng(..)
|
||||
| Self::ExportSvg(..)
|
||||
| Self::ExportHtml(..)
|
||||
| Self::ExportMarkdown(..)
|
||||
| Self::ExportMd(..)
|
||||
| Self::ExportText(..)
|
||||
| Self::Query(..) => self.as_export()?.when,
|
||||
})
|
||||
|
@ -102,7 +102,7 @@ impl ProjectTask {
|
|||
Self::ExportPng(task) => &task.export,
|
||||
Self::ExportSvg(task) => &task.export,
|
||||
Self::ExportHtml(task) => &task.export,
|
||||
Self::ExportMarkdown(task) => &task.export,
|
||||
Self::ExportMd(task) => &task.export,
|
||||
Self::ExportText(task) => &task.export,
|
||||
Self::Query(task) => &task.export,
|
||||
})
|
||||
|
@ -113,7 +113,7 @@ impl ProjectTask {
|
|||
match self {
|
||||
Self::ExportPdf { .. } => "pdf",
|
||||
Self::Preview(..) | Self::ExportHtml { .. } => "html",
|
||||
Self::ExportMarkdown { .. } => "md",
|
||||
Self::ExportMd { .. } => "md",
|
||||
Self::ExportText { .. } => "txt",
|
||||
Self::ExportSvg { .. } => "svg",
|
||||
Self::ExportPng { .. } => "png",
|
||||
|
@ -211,7 +211,7 @@ pub enum ExportTransform {
|
|||
}
|
||||
|
||||
/// An export pdf task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportPdfTask {
|
||||
/// The shared export arguments.
|
||||
|
@ -247,7 +247,7 @@ pub struct ExportPngTask {
|
|||
}
|
||||
|
||||
/// An export svg task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportSvgTask {
|
||||
/// The shared export arguments.
|
||||
|
@ -256,7 +256,7 @@ pub struct ExportSvgTask {
|
|||
}
|
||||
|
||||
/// An export html task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportHtmlTask {
|
||||
/// The shared export arguments.
|
||||
|
@ -265,7 +265,7 @@ pub struct ExportHtmlTask {
|
|||
}
|
||||
|
||||
/// An export markdown task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportMarkdownTask {
|
||||
/// The shared export arguments.
|
||||
|
@ -274,7 +274,7 @@ pub struct ExportMarkdownTask {
|
|||
}
|
||||
|
||||
/// An export text task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportTextTask {
|
||||
/// The shared export arguments.
|
471
crates/tinymist-task/src/primitives.rs
Normal file
471
crates/tinymist-task/src/primitives.rs
Normal file
|
@ -0,0 +1,471 @@
|
|||
use core::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::path::PathBuf;
|
||||
use std::{path::Path, str::FromStr};
|
||||
|
||||
use clap::ValueEnum;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::path::{unix_slash, PathClean};
|
||||
use tinymist_std::ImmutPath;
|
||||
use tinymist_world::vfs::WorkspaceResolver;
|
||||
use tinymist_world::{CompilerFeat, CompilerWorld, EntryReader, EntryState};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
/// A scalar that is not NaN.
|
||||
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Scalar(f32);
|
||||
|
||||
impl TryFrom<f32> for Scalar {
|
||||
type Error = &'static str;
|
||||
|
||||
fn try_from(value: f32) -> Result<Self, Self::Error> {
|
||||
if value.is_nan() {
|
||||
Err("NaN is not a valid scalar value")
|
||||
} else {
|
||||
Ok(Scalar(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scalar {
|
||||
/// Converts the scalar to an f32.
|
||||
pub fn to_f32(self) -> f32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Scalar {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Scalar {}
|
||||
|
||||
impl Hash for Scalar {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.0.to_bits().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Scalar {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Scalar {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.0).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// A project ID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Id(String);
|
||||
|
||||
impl Id {
|
||||
/// Creates a new project Id.
|
||||
pub fn new(s: String) -> Self {
|
||||
Id(s)
|
||||
}
|
||||
|
||||
/// Creates a new project Id from a world.
|
||||
pub fn from_world<F: CompilerFeat>(world: &CompilerWorld<F>) -> Option<Self> {
|
||||
let entry = world.entry_state();
|
||||
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
||||
|
||||
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
||||
Some(path.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ResourcePath> for Id {
|
||||
fn from(value: &ResourcePath) -> Self {
|
||||
Id::new(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Id {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! display_possible_values {
|
||||
($ty:ty) => {
|
||||
impl fmt::Display for $ty {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// When to export an output file.
|
||||
///
|
||||
/// By default, a `tinymist compile` only provides input information and
|
||||
/// doesn't change the `when` field. However, you can still specify a `when`
|
||||
/// argument to override the default behavior for specific tasks.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```bash
|
||||
/// tinymist compile --when onSave main.typ
|
||||
/// alias typst="tinymist compile --when=onSave"
|
||||
/// typst compile main.typ
|
||||
/// ```
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[clap(rename_all = "camelCase")]
|
||||
pub enum TaskWhen {
|
||||
/// Never watch to run task.
|
||||
#[default]
|
||||
Never,
|
||||
/// Run task on saving the document, i.e. on `textDocument/didSave` events.
|
||||
OnSave,
|
||||
/// Run task on typing, i.e. on `textDocument/didChange` events.
|
||||
OnType,
|
||||
/// *DEPRECATED* Run task when a document has a title and on saved, which is
|
||||
/// useful to filter out template files.
|
||||
///
|
||||
/// Note: this is deprecating.
|
||||
OnDocumentHasTitle,
|
||||
}
|
||||
|
||||
impl TaskWhen {
|
||||
/// Returns `true` if the task should never be run automatically.
|
||||
pub fn is_never(&self) -> bool {
|
||||
matches!(self, TaskWhen::Never)
|
||||
}
|
||||
}
|
||||
|
||||
display_possible_values!(TaskWhen);
|
||||
|
||||
/// Which format to use for the generated output file.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
|
||||
pub enum OutputFormat {
|
||||
/// Export to PDF.
|
||||
Pdf,
|
||||
/// Export to PNG.
|
||||
Png,
|
||||
/// Export to SVG.
|
||||
Svg,
|
||||
/// Export to HTML.
|
||||
Html,
|
||||
}
|
||||
|
||||
display_possible_values!(OutputFormat);
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl fmt::Display for PathPattern {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if WorkspaceResolver::is_package_file(main) {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.7.
|
||||
#[value(name = "1.7")]
|
||||
#[serde(rename = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF/A-2b.
|
||||
#[value(name = "a-2b")]
|
||||
#[serde(rename = "a-2b")]
|
||||
A_2b,
|
||||
}
|
||||
|
||||
display_possible_values!(PdfStandard);
|
||||
|
||||
/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the
|
||||
/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a
|
||||
/// value parser, in order to generate better errors.
|
||||
///
|
||||
/// See also: <https://github.com/clap-rs/clap/issues/5065>
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
||||
|
||||
impl Pages {
|
||||
/// Selects the first page.
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=None);
|
||||
}
|
||||
|
||||
impl FromStr for Pages {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
match value
|
||||
.split('-')
|
||||
.map(str::trim)
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice()
|
||||
{
|
||||
[] | [""] => Err("page export range must not be empty"),
|
||||
[single_page] => {
|
||||
let page_number = parse_page_number(single_page)?;
|
||||
Ok(Pages(Some(page_number)..=Some(page_number)))
|
||||
}
|
||||
["", ""] => Err("page export range must have start or end"),
|
||||
[start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)),
|
||||
["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))),
|
||||
[start, end] => {
|
||||
let start = parse_page_number(start)?;
|
||||
let end = parse_page_number(end)?;
|
||||
if start > end {
|
||||
Err("page export range must end at a page after the start")
|
||||
} else {
|
||||
Ok(Pages(Some(start)..=Some(end)))
|
||||
}
|
||||
}
|
||||
[_, _, _, ..] => Err("page export range must have a single hyphen"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Pages {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let start = match self.0.start() {
|
||||
Some(start) => start.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
let end = match self.0.end() {
|
||||
Some(end) => end.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
write!(f, "{start}-{end}")
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for Pages {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for Pages {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a single page number.
|
||||
fn parse_page_number(value: &str) -> Result<NonZeroUsize, &'static str> {
|
||||
if value == "0" {
|
||||
Err("page numbers start at one")
|
||||
} else {
|
||||
NonZeroUsize::from_str(value).map_err(|_| "not a valid page number")
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource path.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ResourcePath(EcoString, String);
|
||||
|
||||
impl fmt::Display for ResourcePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}:{}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ResourcePath {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = value.split(':');
|
||||
let scheme = parts.next().ok_or("missing scheme")?;
|
||||
let path = parts.next().ok_or("missing path")?;
|
||||
if parts.next().is_some() {
|
||||
Err("too many colons")
|
||||
} else {
|
||||
Ok(ResourcePath(scheme.into(), path.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for ResourcePath {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for ResourcePath {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath {
|
||||
/// Creates a new resource path from a user passing system path.
|
||||
pub fn from_user_sys(inp: &Path) -> Self {
|
||||
let rel = if inp.is_relative() {
|
||||
inp.to_path_buf()
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
pathdiff::diff_paths(inp, &cwd).unwrap()
|
||||
};
|
||||
let rel = unix_slash(&rel);
|
||||
ResourcePath("file".into(), rel.to_string())
|
||||
}
|
||||
/// Creates a new resource path from a file id.
|
||||
pub fn from_file_id(id: FileId) -> Self {
|
||||
let package = id.package();
|
||||
match package {
|
||||
Some(package) => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("{package}{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
None => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("$root{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to a path relative to the `base` (usually the
|
||||
/// directory storing the lockfile).
|
||||
pub fn to_rel_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(pathdiff::diff_paths(path, base).unwrap_or_else(|| path.to_owned()))
|
||||
} else {
|
||||
Some(path.to_owned())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to an absolute file system path.
|
||||
pub fn to_abs_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(path.to_owned())
|
||||
} else {
|
||||
Some(base.join(path))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use typst::syntax::VirtualPath;
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let entry =
|
||||
EntryState::new_rooted(root.into(), Some(VirtualPath::new("/dir1/dir2/file.txt")));
|
||||
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/../$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/target/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
}
|
|
@ -64,8 +64,5 @@ system = [
|
|||
"tinymist-vfs/system",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tinymist-world = { path = ".", features = ["system"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -7,10 +7,11 @@ use ecow::EcoVec;
|
|||
use parking_lot::Mutex;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use typst::diag::{SourceResult, Warned};
|
||||
use typst::diag::{At, SourceResult, Warned};
|
||||
use typst::syntax::Span;
|
||||
|
||||
use crate::snapshot::CompileSnapshot;
|
||||
use crate::CompilerFeat;
|
||||
use crate::{CompilerFeat, EntryReader};
|
||||
|
||||
type AnyArc = Arc<dyn std::any::Any + Send + Sync>;
|
||||
|
||||
|
@ -36,6 +37,8 @@ pub struct WorldComputeGraph<F: CompilerFeat> {
|
|||
|
||||
/// A world computable trait.
|
||||
pub trait WorldComputable<F: CompilerFeat>: std::any::Any + Send + Sync + Sized {
|
||||
type Output: Send + Sync + 'static;
|
||||
|
||||
/// The computation implementation.
|
||||
///
|
||||
/// ## Example
|
||||
|
@ -58,6 +61,8 @@ pub trait WorldComputable<F: CompilerFeat>: std::any::Any + Send + Sync + Sized
|
|||
/// }
|
||||
///
|
||||
/// impl WorldComputable<SystemCompilerFeat> for SystemFontsOnce {
|
||||
/// type Output = Self;
|
||||
///
|
||||
/// fn compute(graph: &Arc<WorldComputeGraph<SystemCompilerFeat>>) -> Result<Self> {
|
||||
///
|
||||
/// Ok(Self {
|
||||
|
@ -71,7 +76,7 @@ pub trait WorldComputable<F: CompilerFeat>: std::any::Any + Send + Sync + Sized
|
|||
/// let _fonts = graph.compute::<SystemFontsOnce>().expect("font").fonts.clone();
|
||||
/// }
|
||||
/// ```
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self>;
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> WorldComputeGraph<F> {
|
||||
|
@ -92,7 +97,7 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
|
|||
}
|
||||
|
||||
/// Gets a world computed.
|
||||
pub fn must_get<T: WorldComputable<F>>(&self) -> Result<Arc<T>> {
|
||||
pub fn must_get<T: WorldComputable<F>>(&self) -> Result<Arc<T::Output>> {
|
||||
let res = self.get::<T>().transpose()?;
|
||||
res.with_context("computation not found", || {
|
||||
Some(Box::new([("type", std::any::type_name::<T>().to_owned())]))
|
||||
|
@ -100,13 +105,13 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
|
|||
}
|
||||
|
||||
/// Gets a world computed.
|
||||
pub fn get<T: WorldComputable<F>>(&self) -> Option<Result<Arc<T>>> {
|
||||
pub fn get<T: WorldComputable<F>>(&self) -> Option<Result<Arc<T::Output>>> {
|
||||
let computed = self.computed(TypeId::of::<T>()).computed;
|
||||
computed.get().cloned().map(WorldComputeEntry::cast)
|
||||
}
|
||||
|
||||
pub fn exact_provide<T: WorldComputable<F>>(&self, ins: Result<Arc<T>>) {
|
||||
if self.provide(ins).is_err() {
|
||||
pub fn exact_provide<T: WorldComputable<F>>(&self, ins: Result<Arc<T::Output>>) {
|
||||
if self.provide::<T>(ins).is_err() {
|
||||
panic!(
|
||||
"failed to provide computed instance: {:?}",
|
||||
std::any::type_name::<T>()
|
||||
|
@ -118,15 +123,15 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
|
|||
#[must_use = "the result must be checked"]
|
||||
pub fn provide<T: WorldComputable<F>>(
|
||||
&self,
|
||||
ins: Result<Arc<T>>,
|
||||
) -> Result<(), Result<Arc<T>>> {
|
||||
ins: Result<Arc<T::Output>>,
|
||||
) -> Result<(), Result<Arc<T::Output>>> {
|
||||
let entry = self.computed(TypeId::of::<T>()).computed;
|
||||
let initialized = entry.set(ins.map(|e| Arc::new(e) as AnyArc));
|
||||
initialized.map_err(WorldComputeEntry::cast)
|
||||
}
|
||||
|
||||
/// Gets or computes a world computable.
|
||||
pub fn compute<T: WorldComputable<F>>(self: &Arc<Self>) -> Result<Arc<T>> {
|
||||
pub fn compute<T: WorldComputable<F>>(self: &Arc<Self>) -> Result<Arc<T::Output>> {
|
||||
let entry = self.computed(TypeId::of::<T>()).computed;
|
||||
let computed = entry.get_or_init(|| Ok(Arc::new(T::compute(self)?)));
|
||||
WorldComputeEntry::cast(computed.clone())
|
||||
|
@ -147,20 +152,38 @@ impl<F: CompilerFeat> WorldComputeGraph<F> {
|
|||
pub trait Document {}
|
||||
impl Document for TypstPagedDocument {}
|
||||
|
||||
pub trait ExportDetection<F: CompilerFeat, D> {
|
||||
type Config: Send + Sync + 'static;
|
||||
|
||||
fn needs_run(graph: &Arc<WorldComputeGraph<F>>, config: &Self::Config) -> bool;
|
||||
}
|
||||
|
||||
pub trait ExportComputation<F: CompilerFeat, D> {
|
||||
type Output;
|
||||
type Config: Send + Sync + 'static;
|
||||
|
||||
fn needs_run(graph: &Arc<WorldComputeGraph<F>>, doc: Option<&D>, config: &Self::Config)
|
||||
-> bool;
|
||||
fn run_with<C: WorldComputable<F, Output = Option<Arc<D>>>>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
config: &Self::Config,
|
||||
) -> Result<Self::Output> {
|
||||
let doc = g.compute::<C>()?;
|
||||
let doc = doc.as_ref().as_ref().context("document not found")?;
|
||||
Self::run(g, doc, config)
|
||||
}
|
||||
|
||||
fn run(doc: &Arc<D>, config: &Self::Config) -> Result<Self::Output>;
|
||||
fn run(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &Self::Config,
|
||||
) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
pub struct ConfigTask<T>(pub T);
|
||||
|
||||
impl<F: CompilerFeat, T: Send + Sync + 'static> WorldComputable<F> for ConfigTask<T> {
|
||||
fn compute(_graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
type Output = T;
|
||||
|
||||
fn compute(_graph: &Arc<WorldComputeGraph<F>>) -> Result<T> {
|
||||
let id = std::any::type_name::<T>();
|
||||
panic!("{id:?} must be provided before computation");
|
||||
}
|
||||
|
@ -173,78 +196,47 @@ pub struct TaskFlagBase<T> {
|
|||
}
|
||||
|
||||
impl<T> FlagTask<T> {
|
||||
pub fn flag(flag: bool) -> Arc<Self> {
|
||||
Arc::new(ConfigTask(TaskFlagBase {
|
||||
pub fn flag(flag: bool) -> Arc<TaskFlagBase<T>> {
|
||||
Arc::new(TaskFlagBase {
|
||||
enabled: flag,
|
||||
_phantom: Default::default(),
|
||||
}))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub type PagedCompilationTask = CompilationTask<TypstPagedDocument>;
|
||||
|
||||
pub struct CompilationTask<D>(Option<Warned<SourceResult<Arc<D>>>>);
|
||||
pub struct CompilationTask<D>(std::marker::PhantomData<D>);
|
||||
|
||||
impl<F: CompilerFeat> WorldComputable<F> for CompilationTask<TypstPagedDocument> {
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
type Output = Option<Warned<SourceResult<Arc<TypstPagedDocument>>>>;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
let enabled = graph
|
||||
.must_get::<FlagTask<CompilationTask<TypstPagedDocument>>>()?
|
||||
.0
|
||||
.enabled;
|
||||
|
||||
Ok(Self(enabled.then(|| {
|
||||
Ok(enabled.then(|| {
|
||||
let compiled = typst::compile(&graph.snap.world);
|
||||
Warned {
|
||||
output: compiled.output.map(Arc::new),
|
||||
warnings: compiled.warnings,
|
||||
}
|
||||
})))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OptionDocumentTask<D>(pub Option<Arc<D>>);
|
||||
pub struct OptionDocumentTask<D>(std::marker::PhantomData<D>);
|
||||
|
||||
impl<F: CompilerFeat> WorldComputable<F> for OptionDocumentTask<TypstPagedDocument> {
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
type Output = Option<Arc<TypstPagedDocument>>;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
let doc = graph.compute::<CompilationTask<TypstPagedDocument>>()?;
|
||||
let compiled = doc.0.as_ref().and_then(|warned| warned.output.clone().ok());
|
||||
let doc = doc.as_ref().as_ref();
|
||||
let compiled = doc.and_then(|warned| warned.output.clone().ok());
|
||||
|
||||
Ok(Self(compiled))
|
||||
}
|
||||
}
|
||||
|
||||
impl OptionDocumentTask<TypstPagedDocument> {
|
||||
pub fn needs_run<F: CompilerFeat, C: Send + Sync + 'static>(
|
||||
graph: &Arc<WorldComputeGraph<F>>,
|
||||
f: impl FnOnce(&Arc<WorldComputeGraph<F>>, Option<&TypstPagedDocument>, &C) -> bool,
|
||||
) -> Result<bool> {
|
||||
let Some(config) = graph.get::<ConfigTask<C>>().transpose()? else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let doc = graph.compute::<OptionDocumentTask<TypstPagedDocument>>()?;
|
||||
Ok(f(graph, doc.0.as_deref(), &config.0))
|
||||
}
|
||||
|
||||
pub fn run_export<F: CompilerFeat, T: ExportComputation<F, TypstPagedDocument>>(
|
||||
graph: &Arc<WorldComputeGraph<F>>,
|
||||
) -> Result<Option<T::Output>> {
|
||||
if !OptionDocumentTask::needs_run(graph, T::needs_run)? {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let doc = graph
|
||||
.compute::<OptionDocumentTask<TypstPagedDocument>>()?
|
||||
.0
|
||||
.clone();
|
||||
let config = graph.get::<ConfigTask<T::Config>>().transpose()?;
|
||||
|
||||
let result = doc
|
||||
.zip(config)
|
||||
.map(|(doc, config)| T::run(&doc, &config.0))
|
||||
.transpose()?;
|
||||
|
||||
Ok(result)
|
||||
Ok(compiled)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -254,7 +246,7 @@ struct CompilationDiagnostics {
|
|||
}
|
||||
|
||||
impl CompilationDiagnostics {
|
||||
fn from_result<T>(result: Option<Warned<SourceResult<T>>>) -> Self {
|
||||
fn from_result<T>(result: &Option<Warned<SourceResult<T>>>) -> Self {
|
||||
let errors = result
|
||||
.as_ref()
|
||||
.and_then(|r| r.output.as_ref().map_err(|e| e.clone()).err());
|
||||
|
@ -269,11 +261,13 @@ pub struct DiagnosticsTask {
|
|||
}
|
||||
|
||||
impl<F: CompilerFeat> WorldComputable<F> for DiagnosticsTask {
|
||||
type Output = Self;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
let paged = graph.compute::<PagedCompilationTask>()?.0.clone();
|
||||
let paged = graph.compute::<PagedCompilationTask>()?.clone();
|
||||
|
||||
Ok(Self {
|
||||
paged: CompilationDiagnostics::from_result(paged),
|
||||
paged: CompilationDiagnostics::from_result(paged.as_ref()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -288,58 +282,79 @@ impl DiagnosticsTask {
|
|||
}
|
||||
}
|
||||
|
||||
pub type ErasedVecExportTask<E> = ErasedExportTask<SourceResult<Vec<u8>>, E>;
|
||||
pub type ErasedStrExportTask<E> = ErasedExportTask<SourceResult<String>, E>;
|
||||
// pub type ErasedVecExportTask<E> = ErasedExportTask<SourceResult<Bytes>, E>;
|
||||
// pub type ErasedStrExportTask<E> = ErasedExportTask<SourceResult<String>, E>;
|
||||
|
||||
pub struct ErasedExportTask<T, E> {
|
||||
pub result: Option<T>,
|
||||
_phantom: std::marker::PhantomData<E>,
|
||||
}
|
||||
// pub struct ErasedExportTask<T, E> {
|
||||
// _phantom: std::marker::PhantomData<(T, E)>,
|
||||
// }
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
struct ErasedExportImpl<F: CompilerFeat, T, E> {
|
||||
f: Arc<dyn Fn(&Arc<WorldComputeGraph<F>>) -> Result<ErasedExportTask<T, E>> + Send + Sync>,
|
||||
}
|
||||
// #[allow(clippy::type_complexity)]
|
||||
// struct ErasedExportImpl<F: CompilerFeat, T, E> {
|
||||
// f: Arc<dyn Fn(&Arc<WorldComputeGraph<F>>) -> Result<Option<T>> + Send +
|
||||
// Sync>, _phantom: std::marker::PhantomData<E>,
|
||||
// }
|
||||
|
||||
impl<T: Send + Sync + 'static, E: Send + Sync + 'static> ErasedExportTask<T, E> {
|
||||
#[must_use = "the result must be checked"]
|
||||
pub fn provide_raw<F: CompilerFeat>(
|
||||
graph: &Arc<WorldComputeGraph<F>>,
|
||||
f: impl Fn(&Arc<WorldComputeGraph<F>>) -> Result<Option<T>> + Send + Sync + 'static,
|
||||
) -> Result<()> {
|
||||
let provided = graph.provide::<ConfigTask<ErasedExportImpl<F, T, E>>>(Ok(Arc::new({
|
||||
ConfigTask(ErasedExportImpl {
|
||||
f: Arc::new(move |graph| {
|
||||
let result = f(graph)?;
|
||||
Ok(ErasedExportTask {
|
||||
result,
|
||||
_phantom: std::marker::PhantomData,
|
||||
})
|
||||
}),
|
||||
})
|
||||
})));
|
||||
// impl<T: Send + Sync + 'static, E: Send + Sync + 'static> ErasedExportTask<T,
|
||||
// E> { #[must_use = "the result must be checked"]
|
||||
// pub fn provide_raw<F: CompilerFeat>(
|
||||
// graph: &Arc<WorldComputeGraph<F>>,
|
||||
// f: impl Fn(&Arc<WorldComputeGraph<F>>) -> Result<Option<T>> + Send +
|
||||
// Sync + 'static, ) -> Result<()> {
|
||||
// let provided = graph.provide::<ConfigTask<ErasedExportImpl<F, T,
|
||||
// E>>>(Ok(Arc::new({ ErasedExportImpl {
|
||||
// f: Arc::new(f),
|
||||
// _phantom: std::marker::PhantomData,
|
||||
// }
|
||||
// })));
|
||||
|
||||
if provided.is_err() {
|
||||
tinymist_std::bail!("already provided")
|
||||
// if provided.is_err() {
|
||||
// tinymist_std::bail!("already provided")
|
||||
// }
|
||||
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
// #[must_use = "the result must be checked"]
|
||||
// pub fn provide<F: CompilerFeat, D, C>(graph: &Arc<WorldComputeGraph<F>>)
|
||||
// -> Result<()> where
|
||||
// D: typst::Document + Send + Sync + 'static,
|
||||
// C: WorldComputable<F> + ExportComputation<F, D, Output = T>,
|
||||
// {
|
||||
// Self::provide_raw(graph, OptionDocumentTask::run_export::<F, C>)
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl<F: CompilerFeat, T: Send + Sync + 'static, E: Send + Sync + 'static>
|
||||
// WorldComputable<F> for ErasedExportTask<T, E>
|
||||
// {
|
||||
// type Output = Option<T>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// let conf = graph.must_get::<ConfigTask<ErasedExportImpl<F, T,
|
||||
// E>>>()?; (conf.f)(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
impl<F: CompilerFeat> WorldComputeGraph<F> {
|
||||
pub fn ensure_main(&self) -> SourceResult<()> {
|
||||
let main_id = self.snap.world.main_id();
|
||||
let checked = main_id.ok_or_else(|| typst::diag::eco_format!("entry file is not set"));
|
||||
checked.at(Span::detached()).map(|_| ())
|
||||
}
|
||||
|
||||
/// Compile once from scratch.
|
||||
pub fn pure_compile(&self) -> Warned<SourceResult<Arc<TypstPagedDocument>>> {
|
||||
let res = ::typst::compile(&self.snap.world);
|
||||
// compile document
|
||||
Warned {
|
||||
output: res.output.map(Arc::new),
|
||||
warnings: res.warnings,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[must_use = "the result must be checked"]
|
||||
pub fn provide<F: CompilerFeat, C>(graph: &Arc<WorldComputeGraph<F>>) -> Result<()>
|
||||
where
|
||||
C: WorldComputable<F> + ExportComputation<F, TypstPagedDocument, Output = T>,
|
||||
{
|
||||
Self::provide_raw(graph, OptionDocumentTask::run_export::<F, C>)
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat, T: Send + Sync + 'static, E: Send + Sync + 'static> WorldComputable<F>
|
||||
for ErasedExportTask<T, E>
|
||||
{
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
let f = graph.must_get::<ConfigTask<ErasedExportImpl<F, T, E>>>()?;
|
||||
(f.0.f)(graph)
|
||||
/// Compile once from scratch.
|
||||
pub fn compile(&self) -> Warned<SourceResult<Arc<TypstPagedDocument>>> {
|
||||
self.pure_compile()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,8 @@ impl crate::CompilerFeat for SystemCompilerFeat {
|
|||
pub type TypstSystemUniverse = crate::world::CompilerUniverse<SystemCompilerFeat>;
|
||||
/// The compiler world in system environment.
|
||||
pub type TypstSystemWorld = crate::world::CompilerWorld<SystemCompilerFeat>;
|
||||
/// The compute graph in system environment.
|
||||
pub type SystemWorldComputeGraph = crate::WorldComputeGraph<SystemCompilerFeat>;
|
||||
|
||||
impl TypstSystemUniverse {
|
||||
/// Create [`TypstSystemWorld`] with the given options.
|
||||
|
@ -132,6 +134,8 @@ mod tests {
|
|||
}
|
||||
|
||||
impl WorldComputable<SystemCompilerFeat> for FontsOnce {
|
||||
type Output = Self;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<SystemCompilerFeat>>) -> Result<Self> {
|
||||
// Ensure that this function is only called once.
|
||||
if FONT_COMPUTED.swap(true, std::sync::atomic::Ordering::SeqCst) {
|
||||
|
|
|
@ -77,6 +77,7 @@ typst-timing.workspace = true
|
|||
typst-shim.workspace = true
|
||||
typst-preview = { workspace = true, optional = true }
|
||||
typst-ansi-hl.workspace = true
|
||||
tinymist-task.workspace = true
|
||||
typstfmt.workspace = true
|
||||
typstyle-core.workspace = true
|
||||
unicode-script.workspace = true
|
||||
|
@ -101,6 +102,7 @@ embed-fonts = ["tinymist-project/fonts"]
|
|||
# This requires modifying typst.
|
||||
no-content-hint = [
|
||||
"tinymist-query/no-content-hint",
|
||||
"tinymist-task/no-content-hint",
|
||||
"reflexo-typst/no-content-hint",
|
||||
"reflexo-vec2svg/no-content-hint",
|
||||
]
|
||||
|
|
|
@ -106,7 +106,7 @@ impl ServerState {
|
|||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportMarkdown(ExportMarkdownTask {
|
||||
ProjectTask::ExportMd(ExportMarkdownTask {
|
||||
export: ExportTask::default(),
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
|
|
|
@ -36,6 +36,7 @@ mod utils;
|
|||
pub use init::*;
|
||||
pub use server::*;
|
||||
pub use sync_lsp::LspClient;
|
||||
pub use task::export2 as export;
|
||||
pub use task::UserActionTask;
|
||||
pub use tinymist_project::world;
|
||||
pub use tinymist_query as query;
|
||||
|
|
|
@ -9,24 +9,24 @@ use crate::project::{
|
|||
};
|
||||
use anyhow::bail;
|
||||
use reflexo::ImmutPath;
|
||||
use reflexo_typst::{TypstAbs as Abs, TypstDatetime};
|
||||
use reflexo_typst::TypstDatetime;
|
||||
use tinymist_project::{
|
||||
convert_source_date_epoch, EntryReader, ExportSvgTask, ExportTask as ProjectExportTask,
|
||||
ExportTransform, LspCompiledArtifact, Pages, ProjectTask, QueryTask,
|
||||
LspCompiledArtifact, ProjectTask, QueryTask,
|
||||
};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstDocument;
|
||||
use tinymist_task::get_page_selection;
|
||||
use tokio::sync::mpsc;
|
||||
use typlite::Typlite;
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::syntax::{ast, SyntaxNode};
|
||||
use typst::visualize::Color;
|
||||
use typst_pdf::PdfOptions;
|
||||
|
||||
use crate::tool::text::FullTextDigest;
|
||||
use crate::{actor::editor::EditorRequest, tool::word_count};
|
||||
|
||||
use super::*;
|
||||
use super::{FutureFolder, SyncTaskFactory};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExportTask {
|
||||
|
@ -251,7 +251,7 @@ impl ExportTask {
|
|||
ExportText(ExportTextTask { export: _ }) => {
|
||||
format!("{}", FullTextDigest(doc.clone())).into_bytes()
|
||||
}
|
||||
ExportMarkdown(ExportMarkdownTask { export: _ }) => {
|
||||
ExportMd(ExportMarkdownTask { export: _ }) => {
|
||||
let conv = Typlite::new(Arc::new(snap.world))
|
||||
.convert()
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to markdown: {e}"))?;
|
||||
|
@ -392,66 +392,14 @@ fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> anyhow
|
|||
})
|
||||
}
|
||||
|
||||
/// Gets legacy page selection
|
||||
pub fn get_page_selection(task: &tinymist_project::ExportTask) -> Result<(bool, Abs)> {
|
||||
let is_first = task
|
||||
.transform
|
||||
.iter()
|
||||
.any(|t| matches!(t, ExportTransform::Pages { ranges, .. } if ranges == &[Pages::FIRST]));
|
||||
|
||||
let mut gap_res = Abs::default();
|
||||
if !is_first {
|
||||
for trans in &task.transform {
|
||||
if let ExportTransform::Merge { gap } = trans {
|
||||
let gap = gap
|
||||
.as_deref()
|
||||
.map(parse_length)
|
||||
.transpose()
|
||||
.context_ut("failed to parse gap")?;
|
||||
gap_res = gap.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((is_first, gap_res))
|
||||
}
|
||||
|
||||
fn parse_length(gap: &str) -> anyhow::Result<Abs> {
|
||||
let length = typst::syntax::parse_code(gap);
|
||||
if length.erroneous() {
|
||||
bail!("invalid length: {gap}, errors: {:?}", length.errors());
|
||||
}
|
||||
|
||||
let length: Option<ast::Numeric> = descendants(&length).into_iter().find_map(SyntaxNode::cast);
|
||||
|
||||
let Some(length) = length else {
|
||||
bail!("not a length: {gap}");
|
||||
};
|
||||
|
||||
let (value, unit) = length.get();
|
||||
match unit {
|
||||
ast::Unit::Pt => Ok(Abs::pt(value)),
|
||||
ast::Unit::Mm => Ok(Abs::mm(value)),
|
||||
ast::Unit::Cm => Ok(Abs::cm(value)),
|
||||
ast::Unit::In => Ok(Abs::inches(value)),
|
||||
_ => bail!("invalid unit: {unit:?} in {gap}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Low performance but simple recursive iterator.
|
||||
fn descendants(node: &SyntaxNode) -> impl IntoIterator<Item = &SyntaxNode> + '_ {
|
||||
let mut res = vec![];
|
||||
for child in node.children() {
|
||||
res.push(child);
|
||||
res.extend(descendants(child));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use clap::Parser;
|
||||
|
||||
use super::*;
|
||||
use crate::export::ProjectCompilation;
|
||||
use crate::project::{CompileOnceArgs, ExportSignal};
|
||||
use crate::world::base::{CompileSnapshot, WorldComputeGraph};
|
||||
|
||||
#[test]
|
||||
fn test_default_never() {
|
||||
|
@ -483,12 +431,43 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_length() {
|
||||
assert_eq!(parse_length("1pt").unwrap(), Abs::pt(1.));
|
||||
assert_eq!(parse_length("1mm").unwrap(), Abs::mm(1.));
|
||||
assert_eq!(parse_length("1cm").unwrap(), Abs::cm(1.));
|
||||
assert_eq!(parse_length("1in").unwrap(), Abs::inches(1.));
|
||||
assert!(parse_length("1").is_err());
|
||||
assert!(parse_length("1px").is_err());
|
||||
fn compilation_default_never() {
|
||||
let args = CompileOnceArgs::parse_from(["tinymist", "main.typ"]);
|
||||
let verse = args
|
||||
.resolve_system()
|
||||
.expect("failed to resolve system universe");
|
||||
|
||||
let snap = CompileSnapshot::from_world(verse.snapshot());
|
||||
|
||||
let graph = WorldComputeGraph::new(snap);
|
||||
|
||||
let needs_run =
|
||||
ProjectCompilation::preconfig_timings(&graph).expect("failed to preconfigure timings");
|
||||
|
||||
assert!(!needs_run);
|
||||
}
|
||||
|
||||
// todo: on demand compilation
|
||||
#[test]
|
||||
fn compilation_run_paged_diagnostics() {
|
||||
let args = CompileOnceArgs::parse_from(["tinymist", "main.typ"]);
|
||||
let verse = args
|
||||
.resolve_system()
|
||||
.expect("failed to resolve system universe");
|
||||
|
||||
let mut snap = CompileSnapshot::from_world(verse.snapshot());
|
||||
|
||||
snap.signal = ExportSignal {
|
||||
by_entry_update: true,
|
||||
by_fs_events: false,
|
||||
by_mem_events: false,
|
||||
};
|
||||
|
||||
let graph = WorldComputeGraph::new(snap);
|
||||
|
||||
let needs_run =
|
||||
ProjectCompilation::preconfig_timings(&graph).expect("failed to preconfigure timings");
|
||||
|
||||
assert!(needs_run);
|
||||
}
|
||||
}
|
||||
|
|
197
crates/tinymist/src/task/export2.rs
Normal file
197
crates/tinymist/src/task/export2.rs
Normal file
|
@ -0,0 +1,197 @@
|
|||
#![allow(missing_docs)]
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use reflexo_typst::Bytes;
|
||||
use tinymist_project::{EntryReader, LspCompilerFeat, PdfExport, PngExport, SvgExport, TaskWhen};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::{TypstDocument, TypstPagedDocument};
|
||||
use tinymist_task::ExportTimings;
|
||||
use typlite::Typlite;
|
||||
use typst::diag::SourceResult;
|
||||
|
||||
use crate::project::{ExportMarkdownTask, ExportTextTask, ProjectTask};
|
||||
use crate::tool::text::FullTextDigest;
|
||||
use crate::world::base::{
|
||||
CompilerFeat, ConfigTask, DiagnosticsTask, ExportComputation, FlagTask, OptionDocumentTask,
|
||||
PagedCompilationTask, WorldComputable, WorldComputeGraph,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default)]
|
||||
pub struct ProjectCompilation;
|
||||
|
||||
impl ProjectCompilation {
|
||||
pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> {
|
||||
// todo: configure run_diagnostics!
|
||||
let paged_diag = Some(TaskWhen::OnType);
|
||||
|
||||
let pdf: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<PdfExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
let svg: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<SvgExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
let png: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<PngExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
let md: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<ExportMarkdownTask>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
let text: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<TextExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
|
||||
let doc = None::<TypstPagedDocument>.as_ref();
|
||||
let check = |timing| ExportTimings::needs_run(&graph.snap, timing, doc).unwrap_or(true);
|
||||
|
||||
let compile_paged = [paged_diag, pdf, svg, png, text, md].into_iter().any(check);
|
||||
|
||||
let _ = graph.provide::<FlagTask<PagedCompilationTask>>(Ok(FlagTask::flag(compile_paged)));
|
||||
|
||||
Ok(compile_paged)
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> WorldComputable<F> for ProjectCompilation {
|
||||
type Output = Self;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self> {
|
||||
Self::preconfig_timings(graph)?;
|
||||
DiagnosticsTask::compute(graph)?;
|
||||
Ok(Self)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProjectExport;
|
||||
|
||||
impl ProjectExport {
|
||||
fn export_bytes<T: ExportComputation<LspCompilerFeat, TypstPagedDocument, Output = Bytes>>(
|
||||
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
|
||||
when: Option<TaskWhen>,
|
||||
config: &T::Config,
|
||||
) -> Result<Option<Bytes>> {
|
||||
let doc = graph.compute::<OptionDocumentTask<TypstPagedDocument>>()?;
|
||||
let doc = doc.as_ref();
|
||||
let n = ExportTimings::needs_run(&graph.snap, when, doc.as_deref()).unwrap_or(true);
|
||||
if !n {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let res = doc.as_ref().map(|doc| T::run(graph, doc, config));
|
||||
res.transpose()
|
||||
}
|
||||
|
||||
fn export_string<T: ExportComputation<LspCompilerFeat, TypstPagedDocument, Output = String>>(
|
||||
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
|
||||
when: Option<TaskWhen>,
|
||||
config: &T::Config,
|
||||
) -> Result<Option<Bytes>> {
|
||||
let doc = graph.compute::<OptionDocumentTask<TypstPagedDocument>>()?;
|
||||
let doc = doc.as_ref();
|
||||
let n = ExportTimings::needs_run(&graph.snap, when, doc.as_deref()).unwrap_or(true);
|
||||
if !n {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let doc = doc.as_ref();
|
||||
let res = doc.map(|doc| {
|
||||
T::run(graph, doc, config)
|
||||
.map(String::into_bytes)
|
||||
.map(Bytes::from)
|
||||
});
|
||||
res.transpose()
|
||||
}
|
||||
}
|
||||
|
||||
impl WorldComputable<LspCompilerFeat> for ProjectExport {
|
||||
type Output = Self;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<LspCompilerFeat>>) -> Result<Self> {
|
||||
let config = graph.must_get::<ConfigTask<ProjectTask>>()?;
|
||||
let output_path = config.as_export().and_then(|e| {
|
||||
e.output
|
||||
.as_ref()
|
||||
.and_then(|o| o.substitute(&graph.snap.world.entry_state()))
|
||||
});
|
||||
let when = config.when();
|
||||
|
||||
let output = || -> Result<Option<Bytes>> {
|
||||
use ProjectTask::*;
|
||||
match config.as_ref() {
|
||||
Preview(..) => todo!(),
|
||||
ExportPdf(config) => Self::export_bytes::<PdfExport>(graph, when, config),
|
||||
ExportPng(config) => Self::export_bytes::<PngExport>(graph, when, config),
|
||||
ExportSvg(config) => Self::export_string::<SvgExport>(graph, when, config),
|
||||
ExportHtml(_config) => todo!(),
|
||||
ExportMd(_config) => {
|
||||
let doc = graph.compute::<OptionDocumentTask<TypstPagedDocument>>()?;
|
||||
let doc = doc.as_ref();
|
||||
let n =
|
||||
ExportTimings::needs_run(&graph.snap, when, doc.as_deref()).unwrap_or(true);
|
||||
if !n {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
Ok(TypliteMdExport::run(graph)?
|
||||
.map(String::into_bytes)
|
||||
.map(Bytes::from))
|
||||
}
|
||||
ExportText(config) => Self::export_string::<TextExport>(graph, when, config),
|
||||
Query(..) => todo!(),
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(path) = output_path {
|
||||
let output = output()?;
|
||||
// todo: don't ignore export source diagnostics
|
||||
if let Some(output) = output {
|
||||
std::fs::write(path, output).context("failed to write output")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypliteMdExport(pub Option<SourceResult<String>>);
|
||||
|
||||
impl TypliteMdExport {
|
||||
fn run(graph: &Arc<WorldComputeGraph<LspCompilerFeat>>) -> Result<Option<String>> {
|
||||
let conv = Typlite::new(Arc::new(graph.snap.world.clone()))
|
||||
.convert()
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to markdown: {e}"))?;
|
||||
|
||||
Ok(Some(conv.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl WorldComputable<LspCompilerFeat> for TypliteMdExport {
|
||||
type Output = Option<String>;
|
||||
|
||||
fn compute(graph: &Arc<WorldComputeGraph<LspCompilerFeat>>) -> Result<Self::Output> {
|
||||
Self::run(graph)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TextExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for TextExport {
|
||||
type Output = String;
|
||||
type Config = ExportTextTask;
|
||||
|
||||
fn run(
|
||||
_g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
_config: &ExportTextTask,
|
||||
) -> Result<String> {
|
||||
Ok(format!(
|
||||
"{}",
|
||||
FullTextDigest(TypstDocument::Paged(doc.clone()))
|
||||
))
|
||||
}
|
||||
}
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
mod export;
|
||||
pub use export::*;
|
||||
pub mod export2;
|
||||
mod format;
|
||||
pub use format::*;
|
||||
mod user_action;
|
||||
|
|
|
@ -278,7 +278,7 @@ fn shell_build_script(shell: Shell) -> Result<String> {
|
|||
ProjectTask::ExportSvg(..) => {
|
||||
cmd.push("--format=svg");
|
||||
}
|
||||
ProjectTask::ExportMarkdown(..) => {
|
||||
ProjectTask::ExportMd(..) => {
|
||||
cmd.push("--format=md");
|
||||
}
|
||||
ProjectTask::ExportPng(..) => {
|
||||
|
|
|
@ -11,19 +11,23 @@ impl FullTextDigest {
|
|||
for (_, item) in doc.items() {
|
||||
Self::export_item(f, item)?;
|
||||
}
|
||||
#[cfg(not(feature = "no-content-hint"))]
|
||||
{
|
||||
use std::fmt::Write;
|
||||
let c = doc.content_hint();
|
||||
if c != '\0' {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn export_item(f: &mut fmt::Formatter<'_>, item: &typst::layout::FrameItem) -> fmt::Result {
|
||||
#[cfg(not(feature = "no-content-hint"))]
|
||||
use std::fmt::Write;
|
||||
use typst::layout::FrameItem::*;
|
||||
match item {
|
||||
Group(g) => Self::export_frame(f, &g.frame),
|
||||
Text(t) => f.write_str(t.text.as_str()),
|
||||
#[cfg(not(feature = "no-content-hint"))]
|
||||
ContentHint(c) => f.write_char(*c),
|
||||
Link(..) | Tag(..) | Shape(..) | Image(..) => Ok(()),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
//! Word count tool for documents.
|
||||
|
||||
use std::io::{self, Write};
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use reflexo_typst::{debug_loc::SourceSpanOffset, exporter_utils::map_err};
|
||||
use reflexo_typst::debug_loc::SourceSpanOffset;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_std::typst::TypstDocument;
|
||||
use tinymist_task::TextExport;
|
||||
use typst::{syntax::Span, text::TextItem};
|
||||
use unicode_script::{Script, UnicodeScript};
|
||||
|
||||
|
@ -36,8 +36,7 @@ pub fn word_count(doc: &TypstDocument) -> WordsCount {
|
|||
let mut spaces = 0;
|
||||
|
||||
// First, get text representation of the document.
|
||||
let w = TextExporter::default();
|
||||
let content = w.collect(doc).unwrap();
|
||||
let content = TextExport::run_on_doc(doc).unwrap_or_default();
|
||||
|
||||
/// A automaton to count words.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
@ -92,60 +91,6 @@ pub fn word_count(doc: &TypstDocument) -> WordsCount {
|
|||
}
|
||||
}
|
||||
|
||||
/// Export text content from a document.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct TextExporter {}
|
||||
|
||||
impl TextExporter {
|
||||
/// Collect text content from a document.
|
||||
pub fn collect(&self, output: &TypstDocument) -> typst::diag::SourceResult<String> {
|
||||
let w = std::io::BufWriter::new(Vec::new());
|
||||
|
||||
let mut d = TextExportWorker { w };
|
||||
d.doc(output).map_err(map_err)?;
|
||||
|
||||
d.w.flush().unwrap();
|
||||
Ok(String::from_utf8(d.w.into_inner().unwrap()).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
struct TextExportWorker {
|
||||
w: std::io::BufWriter<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl TextExportWorker {
|
||||
fn doc(&mut self, doc: &TypstDocument) -> io::Result<()> {
|
||||
match doc {
|
||||
TypstDocument::Paged(paged_doc) => {
|
||||
for page in paged_doc.pages.iter() {
|
||||
self.frame(&page.frame)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn frame(&mut self, doc: &typst::layout::Frame) -> io::Result<()> {
|
||||
for (_, item) in doc.items() {
|
||||
self.item(item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn item(&mut self, item: &typst::layout::FrameItem) -> io::Result<()> {
|
||||
use typst::layout::FrameItem::*;
|
||||
match item {
|
||||
Group(g) => self.frame(&g.frame),
|
||||
Text(t) => {
|
||||
write!(self.w, " {}", t.text.as_str())
|
||||
}
|
||||
Link(..) | Shape(..) | Image(..) => self.w.write_all(b"object"),
|
||||
Tag(..) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a text range, map it back to the original document.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MappedSpan {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue