feat: merge fs and std crate (#1203)

* feat: merge fs and std crate

* fix: errors
This commit is contained in:
Myriad-Dreamin 2025-01-20 23:00:31 +08:00 committed by GitHub
parent 04f688e122
commit 8ca6c8118c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
41 changed files with 449 additions and 325 deletions

22
Cargo.lock generated
View file

@ -4007,7 +4007,6 @@ dependencies = [
"sync-lsp", "sync-lsp",
"tinymist-assets 0.12.18 (registry+https://github.com/rust-lang/crates.io-index)", "tinymist-assets 0.12.18 (registry+https://github.com/rust-lang/crates.io-index)",
"tinymist-core", "tinymist-core",
"tinymist-fs",
"tinymist-project", "tinymist-project",
"tinymist-query", "tinymist-query",
"tinymist-render", "tinymist-render",
@ -4078,19 +4077,6 @@ dependencies = [
"syn 2.0.90", "syn 2.0.90",
] ]
[[package]]
name = "tinymist-fs"
version = "0.12.18"
dependencies = [
"anyhow",
"core-foundation",
"libc",
"log",
"same-file",
"tempfile",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "tinymist-project" name = "tinymist-project"
version = "0.12.18" version = "0.12.18"
@ -4111,7 +4097,6 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"tinymist-derive", "tinymist-derive",
"tinymist-fs",
"tinymist-std", "tinymist-std",
"tinymist-world", "tinymist-world",
"tokio", "tokio",
@ -4190,27 +4175,34 @@ dependencies = [
name = "tinymist-std" name = "tinymist-std"
version = "0.12.18" version = "0.12.18"
dependencies = [ dependencies = [
"anyhow",
"base64", "base64",
"bitvec", "bitvec",
"comemo", "comemo",
"core-foundation",
"dashmap", "dashmap",
"ecow", "ecow",
"fxhash", "fxhash",
"hex", "hex",
"js-sys", "js-sys",
"libc",
"log",
"parking_lot", "parking_lot",
"path-clean", "path-clean",
"rkyv", "rkyv",
"rustc-hash 2.1.0", "rustc-hash 2.1.0",
"same-file",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
"serde_with", "serde_with",
"siphasher 1.0.1", "siphasher 1.0.1",
"tempfile",
"typst", "typst",
"typst-shim", "typst-shim",
"wasm-bindgen", "wasm-bindgen",
"web-time", "web-time",
"windows-sys 0.59.0",
] ]
[[package]] [[package]]

View file

@ -85,6 +85,7 @@ sha2 = "0.10.6"
nohash-hasher = "0.2.0" nohash-hasher = "0.2.0"
# Data Structures # Data Structures
bitvec = "1"
comemo = "0.4" comemo = "0.4"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable # We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] } dashmap = { version = "=5.5.3", features = ["raw-api"] }
@ -109,8 +110,9 @@ rkyv = "0.7.42"
semver = "1" semver = "1"
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1" serde_json = "1"
serde_yaml = "0.9" serde_repr = "0.1"
serde_with = { version = "3.6", features = ["base64"] } serde_with = { version = "3.6", features = ["base64"] }
serde_yaml = "0.9"
serde-wasm-bindgen = "^0.6" serde-wasm-bindgen = "^0.6"
toml = { version = "0.8", default-features = false, features = [ toml = { version = "0.8", default-features = false, features = [
"parse", "parse",
@ -184,7 +186,6 @@ tinymist-vfs = { path = "./crates/tinymist-vfs/", default-features = false }
tinymist-core = { path = "./crates/tinymist-core/", default-features = false } tinymist-core = { path = "./crates/tinymist-core/", default-features = false }
tinymist-world = { path = "./crates/tinymist-world/", default-features = false } tinymist-world = { path = "./crates/tinymist-world/", default-features = false }
tinymist-project = { path = "./crates/tinymist-project/" } tinymist-project = { path = "./crates/tinymist-project/" }
tinymist-fs = { path = "./crates/tinymist-fs/" }
tinymist-derive = { path = "./crates/tinymist-derive/" } tinymist-derive = { path = "./crates/tinymist-derive/" }
tinymist-analysis = { path = "./crates/tinymist-analysis/" } tinymist-analysis = { path = "./crates/tinymist-analysis/" }
tinymist-query = { path = "./crates/tinymist-query/" } tinymist-query = { path = "./crates/tinymist-query/" }

View file

@ -1,41 +0,0 @@
[package]
name = "tinymist-fs"
description = "Filesystem support for tinymist."
categories = ["compilers", "command-line-utilities"]
keywords = ["tool"]
authors.workspace = true
version.workspace = true
license.workspace = true
edition.workspace = true
homepage.workspace = true
repository.workspace = true
rust-version.workspace = true
[dependencies]
anyhow.workspace = true
log.workspace = true
tempfile.workspace = true
same-file.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation.workspace = true
[target.'cfg(unix)'.dependencies]
libc.workspace = true
[target.'cfg(windows)'.dependencies]
windows-sys = { workspace = true, features = [
"Win32_Foundation",
"Win32_Security",
"Win32_Storage_FileSystem",
"Win32_System_IO",
"Win32_System_Console",
"Win32_System_JobObjects",
"Win32_System_Threading",
] }
[features]
[lints]
workspace = true

View file

@ -1,2 +0,0 @@
/// Type alias for `anyhow::Result`.
pub type Result<T> = anyhow::Result<T>;

View file

@ -28,8 +28,7 @@ semver.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
tinymist-world = { workspace = true, features = ["system"] } tinymist-world = { workspace = true, features = ["system"] }
tinymist-fs.workspace = true tinymist-std = { workspace = true, features = ["system"] }
tinymist-std.workspace = true
tinymist-derive.workspace = true tinymist-derive.workspace = true
toml.workspace = true toml.workspace = true
typst.workspace = true typst.workspace = true

View file

@ -2,11 +2,10 @@ use crate::DocIdArgs;
use core::fmt; use core::fmt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{num::NonZeroUsize, ops::RangeInclusive, path::Path, str::FromStr, sync::OnceLock}; use std::{num::NonZeroUsize, ops::RangeInclusive, path::Path, str::FromStr, sync::OnceLock};
use tinymist_std::{bail, error::prelude::ZResult}; use tinymist_std::{bail, error::prelude::Result};
pub use tinymist_world::args::{CompileFontArgs, CompilePackageArgs}; pub use tinymist_world::args::{CompileFontArgs, CompilePackageArgs};
pub use typst_preview::{PreviewArgs, PreviewMode}; pub use typst_preview::{PreviewArgs, PreviewMode};
use anyhow::Result;
use clap::{ValueEnum, ValueHint}; use clap::{ValueEnum, ValueHint};
use crate::model::*; use crate::model::*;
@ -271,11 +270,11 @@ pub struct TaskCompileArgs {
pub ppi: f32, pub ppi: f32,
#[clap(skip)] #[clap(skip)]
pub output_format: OnceLock<ZResult<OutputFormat>>, pub output_format: OnceLock<Result<OutputFormat>>,
} }
impl TaskCompileArgs { impl TaskCompileArgs {
pub fn to_task(self, doc_id: Id) -> ZResult<ProjectTask> { pub fn to_task(self, doc_id: Id) -> Result<ProjectTask> {
let new_task_id = self.task_name.map(Id::new); let new_task_id = self.task_name.map(Id::new);
let task_id = new_task_id.unwrap_or(doc_id.clone()); let task_id = new_task_id.unwrap_or(doc_id.clone());

View file

@ -18,7 +18,7 @@ use reflexo_typst::{
features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE}, features::{CompileFeature, FeatureSet, WITH_COMPILING_STATUS_FEATURE},
CompileEnv, CompileReport, Compiler, TypstDocument, CompileEnv, CompileReport, Compiler, TypstDocument,
}; };
use tinymist_std::error::prelude::ZResult; use tinymist_std::error::prelude::Result;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use typst::diag::{SourceDiagnostic, SourceResult}; use typst::diag::{SourceDiagnostic, SourceResult};
@ -606,7 +606,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
} }
} }
pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> ZResult<ProjectInsId> { pub fn restart_dedicate(&mut self, group: &str, entry: EntryState) -> Result<ProjectInsId> {
let id = ProjectInsId(group.into()); let id = ProjectInsId(group.into());
let verse = CompilerUniverse::<F>::new_raw( let verse = CompilerUniverse::<F>::new_raw(

View file

@ -121,7 +121,7 @@ impl ProjectLockUpdater {
let data = serde_json::to_string(&mat).unwrap(); let data = serde_json::to_string(&mat).unwrap();
let path = cache_dir.join("path-material.json"); let path = cache_dir.join("path-material.json");
let result = tinymist_fs::paths::write_atomic(path, data); let result = tinymist_std::fs::paths::write_atomic(path, data);
if let Err(err) = result { if let Err(err) = result {
log::error!("ProjectCompiler: write material error: {err}"); log::error!("ProjectCompiler: write material error: {err}");
} }

View file

@ -4,18 +4,16 @@ use std::io::{Read, Seek, SeekFrom, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::{cmp::Ordering, path::Path, str::FromStr}; use std::{cmp::Ordering, path::Path, str::FromStr};
use anyhow::{bail, Context};
use clap::ValueHint; use clap::ValueHint;
use ecow::{eco_vec, EcoVec}; use ecow::{eco_vec, EcoVec};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinymist_std::error::prelude::*;
use tinymist_std::path::unix_slash; use tinymist_std::path::unix_slash;
use tinymist_std::ImmutPath; use tinymist_std::{bail, ImmutPath};
use tinymist_world::EntryReader; use tinymist_world::EntryReader;
use typst::diag::EcoString; use typst::diag::EcoString;
use typst::syntax::FileId; use typst::syntax::FileId;
pub use anyhow::Result;
pub mod task; pub mod task;
pub use task::*; pub use task::*;
@ -39,17 +37,17 @@ pub enum LockFileCompat {
} }
impl LockFileCompat { impl LockFileCompat {
pub fn version(&self) -> anyhow::Result<&str> { pub fn version(&self) -> Result<&str> {
match self { match self {
LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION), LockFileCompat::Version010Beta0(..) => Ok(LOCK_VERSION),
LockFileCompat::Other(v) => v LockFileCompat::Other(v) => v
.get("version") .get("version")
.and_then(|v| v.as_str()) .and_then(|v| v.as_str())
.ok_or_else(|| anyhow::anyhow!("missing version field")), .context("missing version field"),
} }
} }
pub fn migrate(self) -> anyhow::Result<LockFile> { pub fn migrate(self) -> Result<LockFile> {
match self { match self {
LockFileCompat::Version010Beta0(v) => Ok(v), LockFileCompat::Version010Beta0(v) => Ok(v),
this @ LockFileCompat::Other(..) => { this @ LockFileCompat::Other(..) => {
@ -189,12 +187,14 @@ impl LockFile {
} }
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> { pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
let fs = tinymist_fs::flock::Filesystem::new(cwd.to_owned()); let fs = tinymist_std::fs::flock::Filesystem::new(cwd.to_owned());
let mut lock_file = fs.open_rw_exclusive_create(LOCK_FILENAME, "project commands")?; let mut lock_file = fs
.open_rw_exclusive_create(LOCK_FILENAME, "project commands")
.context("tinymist.lock")?;
let mut data = vec![]; let mut data = vec![];
lock_file.read_to_end(&mut data)?; lock_file.read_to_end(&mut data).context("read lock")?;
let old_data = let old_data =
std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
@ -207,7 +207,7 @@ impl LockFile {
} }
} else { } else {
let old_state = toml::from_str::<LockFileCompat>(old_data) let old_state = toml::from_str::<LockFileCompat>(old_data)
.context("tinymist.lock file is not a valid TOML file")?; .context_ut("tinymist.lock file is not a valid TOML file")?;
let version = old_state.version()?; let version = old_state.version()?;
match Version(version).partial_cmp(&Version(LOCK_VERSION)) { match Version(version).partial_cmp(&Version(LOCK_VERSION)) {
@ -244,25 +244,29 @@ impl LockFile {
// while writing the lock file. This is sensible because `Cargo.lock` is // while writing the lock file. This is sensible because `Cargo.lock` is
// only a "resolved result" of the `Cargo.toml`. Thus, we should inform // only a "resolved result" of the `Cargo.toml`. Thus, we should inform
// users that don't only persist configuration in the lock file. // users that don't only persist configuration in the lock file.
lock_file.file().set_len(0)?; lock_file.file().set_len(0).context(LOCK_FILENAME)?;
lock_file.seek(SeekFrom::Start(0))?; lock_file.seek(SeekFrom::Start(0)).context(LOCK_FILENAME)?;
lock_file.write_all(new_data.as_bytes())?; lock_file
.write_all(new_data.as_bytes())
.context(LOCK_FILENAME)?;
Ok(()) Ok(())
} }
pub fn read(dir: &Path) -> Result<Self> { pub fn read(dir: &Path) -> Result<Self> {
let fs = tinymist_fs::flock::Filesystem::new(dir.to_owned()); let fs = tinymist_std::fs::flock::Filesystem::new(dir.to_owned());
let mut lock_file = fs.open_ro_shared(LOCK_FILENAME, "project commands")?; let mut lock_file = fs
.open_ro_shared(LOCK_FILENAME, "project commands")
.context(LOCK_FILENAME)?;
let mut data = vec![]; let mut data = vec![];
lock_file.read_to_end(&mut data)?; lock_file.read_to_end(&mut data).context(LOCK_FILENAME)?;
let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?; let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let state = toml::from_str::<LockFileCompat>(data) let state = toml::from_str::<LockFileCompat>(data)
.context("tinymist.lock file is not a valid TOML file")?; .context_ut("tinymist.lock file is not a valid TOML file")?;
state.migrate() state.migrate()
} }

View file

@ -1,6 +1,5 @@
use std::hash::Hash; use std::hash::Hash;
pub use anyhow::Result;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tinymist_derive::toml_model; use tinymist_derive::toml_model;

View file

@ -15,7 +15,6 @@ use std::path::Path;
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};
use ::typst::utils::LazyHash; use ::typst::utils::LazyHash;
use anyhow::Context;
use tinymist_std::error::prelude::*; use tinymist_std::error::prelude::*;
use tinymist_std::ImmutPath; use tinymist_std::ImmutPath;
use tinymist_world::font::system::SystemFontSearcher; use tinymist_world::font::system::SystemFontSearcher;
@ -47,13 +46,13 @@ pub type TypstSystemWorldExtend = CompilerWorld<SystemCompilerFeatExtend>;
pub trait WorldProvider { pub trait WorldProvider {
/// Get the entry options from the arguments. /// Get the entry options from the arguments.
fn entry(&self) -> anyhow::Result<EntryOpts>; fn entry(&self) -> Result<EntryOpts>;
/// Get a universe instance from the given arguments. /// Get a universe instance from the given arguments.
fn resolve(&self) -> anyhow::Result<LspUniverse>; fn resolve(&self) -> Result<LspUniverse>;
} }
impl WorldProvider for CompileOnceArgs { impl WorldProvider for CompileOnceArgs {
fn resolve(&self) -> anyhow::Result<LspUniverse> { fn resolve(&self) -> Result<LspUniverse> {
let entry = self.entry()?.try_into()?; let entry = self.entry()?.try_into()?;
let inputs = self let inputs = self
.inputs .inputs
@ -75,7 +74,7 @@ impl WorldProvider for CompileOnceArgs {
.context("failed to create universe") .context("failed to create universe")
} }
fn entry(&self) -> anyhow::Result<EntryOpts> { fn entry(&self) -> Result<EntryOpts> {
let input = self.input.as_ref().context("entry file must be provided")?; let input = self.input.as_ref().context("entry file must be provided")?;
let input = Path::new(&input); let input = Path::new(&input);
let entry = if input.is_absolute() { let entry = if input.is_absolute() {
@ -134,7 +133,7 @@ impl LspUniverseBuilder {
inputs: ImmutDict, inputs: ImmutDict,
font_resolver: Arc<TinymistFontResolver>, font_resolver: Arc<TinymistFontResolver>,
package_registry: HttpRegistry, package_registry: HttpRegistry,
) -> ZResult<LspUniverse> { ) -> Result<LspUniverse> {
let registry = Arc::new(package_registry); let registry = Arc::new(package_registry);
let resolver = Arc::new(RegistryPathMapper::new(registry.clone())); let resolver = Arc::new(RegistryPathMapper::new(registry.clone()));
@ -148,7 +147,7 @@ impl LspUniverseBuilder {
} }
/// Resolve fonts from given options. /// Resolve fonts from given options.
pub fn only_embedded_fonts() -> ZResult<TinymistFontResolver> { pub fn only_embedded_fonts() -> Result<TinymistFontResolver> {
let mut searcher = SystemFontSearcher::new(); let mut searcher = SystemFontSearcher::new();
searcher.resolve_opts(CompileFontOpts { searcher.resolve_opts(CompileFontOpts {
font_profile_cache_path: Default::default(), font_profile_cache_path: Default::default(),
@ -160,7 +159,7 @@ impl LspUniverseBuilder {
} }
/// Resolve fonts from given options. /// Resolve fonts from given options.
pub fn resolve_fonts(args: CompileFontArgs) -> ZResult<TinymistFontResolver> { pub fn resolve_fonts(args: CompileFontArgs) -> Result<TinymistFontResolver> {
let mut searcher = SystemFontSearcher::new(); let mut searcher = SystemFontSearcher::new();
searcher.resolve_opts(CompileFontOpts { searcher.resolve_opts(CompileFontOpts {
font_profile_cache_path: Default::default(), font_profile_cache_path: Default::default(),

View file

@ -1,6 +1,6 @@
[package] [package]
name = "tinymist-std" name = "tinymist-std"
description = "Additional functions wrapping Rust's std library." description = "Additional functions wrapping Rust's standard library."
authors.workspace = true authors.workspace = true
version.workspace = true version.workspace = true
license.workspace = true license.workspace = true
@ -10,32 +10,54 @@ repository.workspace = true
[dependencies] [dependencies]
comemo.workspace = true anyhow.workspace = true
ecow.workspace = true
parking_lot.workspace = true
web-time.workspace = true
wasm-bindgen = { workspace = true, optional = true }
js-sys = { workspace = true, optional = true }
bitvec = { version = "1" }
dashmap = { version = "5" }
# tiny-skia-path.workspace = true
path-clean.workspace = true
base64.workspace = true base64.workspace = true
bitvec.workspace = true
comemo.workspace = true
dashmap.workspace = true
ecow.workspace = true
fxhash.workspace = true fxhash.workspace = true
log.workspace = true
path-clean.workspace = true
parking_lot.workspace = true
rustc-hash.workspace = true rustc-hash.workspace = true
siphasher.workspace = true
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
serde_repr = "0.1" serde_repr.workspace = true
serde_json.workspace = true serde_json.workspace = true
serde_with.workspace = true serde_with.workspace = true
siphasher.workspace = true
web-time.workspace = true
tempfile = { workspace = true, optional = true }
same-file = { workspace = true, optional = true }
# feature = "web"
js-sys = { workspace = true, optional = true }
wasm-bindgen = { workspace = true, optional = true }
# feature = "rkyv"
rkyv = { workspace = true, optional = true } rkyv = { workspace = true, optional = true }
# feature = "typst"
typst = { workspace = true, optional = true } typst = { workspace = true, optional = true }
typst-shim = { workspace = true, optional = true } typst-shim = { workspace = true, optional = true }
[target.'cfg(target_os = "macos")'.dependencies]
core-foundation.workspace = true
[target.'cfg(unix)'.dependencies]
libc.workspace = true
[target.'cfg(windows)'.dependencies]
windows-sys = { workspace = true, features = [
"Win32_Foundation",
"Win32_Security",
"Win32_Storage_FileSystem",
"Win32_System_IO",
"Win32_System_Console",
"Win32_System_JobObjects",
"Win32_System_Threading",
] }
[dev-dependencies] [dev-dependencies]
hex.workspace = true hex.workspace = true
@ -48,13 +70,11 @@ typst = ["dep:typst", "dep:typst-shim"]
rkyv = ["dep:rkyv", "rkyv/alloc", "rkyv/archive_le"] rkyv = ["dep:rkyv", "rkyv/alloc", "rkyv/archive_le"]
rkyv-validation = ["dep:rkyv", "rkyv/validation"] rkyv-validation = ["dep:rkyv", "rkyv/validation"]
# flat-vector = ["rkyv", "rkyv-validation"]
__web = ["dep:wasm-bindgen", "dep:js-sys"] __web = ["dep:wasm-bindgen", "dep:js-sys"]
web = ["__web"] web = ["__web"]
system = [] system = ["dep:tempfile", "dep:same-file"]
bi-hash = [] bi-hash = []
item-dashmap = []
[lints] [lints]
workspace = true workspace = true

View file

@ -1,3 +1,5 @@
//! A map that shards items by their fingerprint.
use std::{collections::HashMap, num::NonZeroU32}; use std::{collections::HashMap, num::NonZeroU32};
use crate::hash::Fingerprint; use crate::hash::Fingerprint;
@ -31,12 +33,13 @@ fn default_shard_size() -> NonZeroU32 {
type FMapBase<V> = parking_lot::RwLock<HashMap<Fingerprint, V>>; type FMapBase<V> = parking_lot::RwLock<HashMap<Fingerprint, V>>;
/// A map that shards items by their fingerprint. /// A map that shards items by their fingerprint. This is faster
/// than the dashmap in some cases.
/// ///
/// It is fast since a fingerprint could split items into different shards /// It is fast since a fingerprint could split items into different shards
/// efficiently. /// efficiently.
/// ///
/// Note: If a fingerprint is calculated from a hash function, it is not /// Note: If a fingerprint is not calculated from a hash function, it is not
/// guaranteed that the fingerprint is evenly distributed. Thus, in that case, /// guaranteed that the fingerprint is evenly distributed. Thus, in that case,
/// the performance of this map is not guaranteed. /// the performance of this map is not guaranteed.
pub struct FingerprintMap<V> { pub struct FingerprintMap<V> {
@ -76,6 +79,7 @@ impl<V> FingerprintMap<V> {
.flat_map(|shard| shard.into_inner().into_iter()) .flat_map(|shard| shard.into_inner().into_iter())
} }
/// Get the shard
pub fn shard(&self, fg: Fingerprint) -> &FMapBase<V> { pub fn shard(&self, fg: Fingerprint) -> &FMapBase<V> {
let shards = &self.shards; let shards = &self.shards;
let route_idx = (fg.lower32() & self.mask) as usize; let route_idx = (fg.lower32() & self.mask) as usize;
@ -92,6 +96,7 @@ impl<V> FingerprintMap<V> {
&mut self.shards &mut self.shards
} }
/// Checks if the map is empty.
pub fn contains_key(&self, fg: &Fingerprint) -> bool { pub fn contains_key(&self, fg: &Fingerprint) -> bool {
self.shard(*fg).read().contains_key(fg) self.shard(*fg).read().contains_key(fg)
} }

View file

@ -1,3 +1,5 @@
//! This module contains the implementation of the abstract data types.
pub mod fmap; pub mod fmap;
pub use fmap::FingerprintMap; pub use fmap::FingerprintMap;

View file

@ -1,6 +1,16 @@
//! <https://github.com/rust-analyzer/rowan/blob/v0.16.1/src/cow_mut.rs>
//!
//! This module provides a `CowMut` type, which is a mutable version of `Cow`.
//! Although it is strange that we can have a `CowMut`, because it should "copy
//! on write", we also don't love the `Cow` API and use `Cow` without even
//! touching its `DerefMut` feature.
/// A mutable version of [Cow][`std::borrow::Cow`].
#[derive(Debug)] #[derive(Debug)]
pub enum CowMut<'a, T> { pub enum CowMut<'a, T> {
/// An owned data.
Owned(T), Owned(T),
/// A borrowed mut data.
Borrowed(&'a mut T), Borrowed(&'a mut T),
} }

View file

@ -1,61 +0,0 @@
//todo: move to core/src/hash.rs
use std::{
hash::{Hash, Hasher},
ops::Deref,
};
use crate::hash::item_hash128;
pub trait StaticHash128 {
fn get_hash(&self) -> u128;
}
impl Hash for dyn StaticHash128 {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.get_hash());
}
}
pub struct HashedTrait<T: ?Sized> {
hash: u128,
t: Box<T>,
}
impl<T: ?Sized> HashedTrait<T> {
pub fn new(hash: u128, t: Box<T>) -> Self {
Self { hash, t }
}
}
impl<T: ?Sized> Deref for HashedTrait<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.t
}
}
impl<T> Hash for HashedTrait<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.hash);
}
}
impl<T: Hash + Default + 'static> Default for HashedTrait<T> {
fn default() -> Self {
let t = T::default();
Self {
hash: item_hash128(&t),
t: Box::new(t),
}
}
}
impl<T: ?Sized> StaticHash128 for HashedTrait<T> {
fn get_hash(&self) -> u128 {
self.hash
}
}

View file

@ -7,6 +7,7 @@ use serde_with::{
}; };
use serde_with::{DeserializeAs, SerializeAs}; use serde_with::{DeserializeAs, SerializeAs};
/// A marker type for serializing and deserializing `Cow<[u8]>` as base64.
pub struct AsCowBytes; pub struct AsCowBytes;
type StdBase64 = Base64<Standard, Padded>; type StdBase64 = Base64<Standard, Padded>;

View file

@ -3,9 +3,6 @@ use std::{path::Path, sync::Arc};
pub use takable::*; pub use takable::*;
mod hash;
pub use hash::*;
pub mod cow_mut; pub mod cow_mut;
mod query; mod query;
@ -17,6 +14,9 @@ pub use read::*;
mod marker; mod marker;
pub use marker::*; pub use marker::*;
/// An immutable string.
pub type ImmutStr = Arc<str>; pub type ImmutStr = Arc<str>;
/// An immutable byte slice.
pub type ImmutBytes = Arc<[u8]>; pub type ImmutBytes = Arc<[u8]>;
/// An immutable path.
pub type ImmutPath = Arc<Path>; pub type ImmutPath = Arc<Path>;

View file

@ -15,6 +15,7 @@ pub struct QueryRef<Res, Err, QueryContext = ()> {
} }
impl<T, E, QC> QueryRef<T, E, QC> { impl<T, E, QC> QueryRef<T, E, QC> {
/// Create a new query reference with the given value.
pub fn with_value(value: T) -> Self { pub fn with_value(value: T) -> Self {
let cell = OnceLock::new(); let cell = OnceLock::new();
cell.get_or_init(|| Ok(value)); cell.get_or_init(|| Ok(value));
@ -24,6 +25,8 @@ impl<T, E, QC> QueryRef<T, E, QC> {
} }
} }
/// Create a new query reference with the given context to execute the
/// query.
pub fn with_context(ctx: QC) -> Self { pub fn with_context(ctx: QC) -> Self {
Self { Self {
ctx: Mutex::new(Some(ctx)), ctx: Mutex::new(Some(ctx)),

View file

@ -1,3 +1,5 @@
/// A trait for reading all data from a source into a buffer.
pub trait ReadAllOnce { pub trait ReadAllOnce {
/// Reads all data from the source into the buffer.
fn read_all(self, buf: &mut Vec<u8>) -> std::io::Result<usize>; fn read_all(self, buf: &mut Vec<u8>) -> std::io::Result<usize>;
} }

View file

@ -1,3 +1,6 @@
//! The debug location that can be used to locate a position in a document or a
//! file.
use core::fmt; use core::fmt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -38,6 +41,7 @@ pub type RawSourceSpan = u64;
/// See [`CharPosition`] for the definition of the position inside a file. /// See [`CharPosition`] for the definition of the position inside a file.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileLocation { pub struct FileLocation {
/// The file path.
pub filepath: String, pub filepath: String,
} }
@ -73,11 +77,14 @@ impl From<Option<(usize, usize)>> for CharPosition {
/// See [`CharPosition`] for the definition of the position inside a file. /// See [`CharPosition`] for the definition of the position inside a file.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SourceLocation { pub struct SourceLocation {
/// The file path.
pub filepath: String, pub filepath: String,
/// The position in the file.
pub pos: CharPosition, pub pos: CharPosition,
} }
impl SourceLocation { impl SourceLocation {
/// Create a new source location.
pub fn from_flat( pub fn from_flat(
flat: FlatSourceLocation, flat: FlatSourceLocation,
i: &impl std::ops::Index<usize, Output = FileLocation>, i: &impl std::ops::Index<usize, Output = FileLocation>,
@ -94,16 +101,20 @@ impl SourceLocation {
/// See [`CharPosition`] for the definition of the position inside a file. /// See [`CharPosition`] for the definition of the position inside a file.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FlatSourceLocation { pub struct FlatSourceLocation {
/// The file path.
pub filepath: u32, pub filepath: u32,
/// The position in the file.
pub pos: CharPosition, pub pos: CharPosition,
} }
// /// A resolved file range. /// A resolved file range.
// /// ///
// /// See [`CharPosition`] for the definition of the position inside a file. /// See [`CharPosition`] for the definition of the position inside a file.
#[derive(Debug, Clone, Serialize, Deserialize, Default)] #[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct CharRange { pub struct CharRange {
/// The start position.
pub start: CharPosition, pub start: CharPosition,
/// The end position.
pub end: CharPosition, pub end: CharPosition,
} }
@ -117,12 +128,14 @@ impl fmt::Display for CharRange {
} }
} }
// /// A resolved source (text) range. /// A resolved source (text) range.
// /// ///
// /// See [`CharPosition`] for the definition of the position inside a file. /// See [`CharPosition`] for the definition of the position inside a file.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SourceRange { pub struct SourceRange {
/// The file path.
pub path: String, pub path: String,
/// The range in the file.
pub range: CharRange, pub range: CharRange,
} }
@ -144,7 +157,10 @@ mod typst_ext {
/// text or string content. /// text or string content.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct SourceSpanOffset { pub struct SourceSpanOffset {
/// The source span.
pub span: SourceSpan, pub span: SourceSpan,
/// The offset relative to the start of the span. This is usually useful
/// if the location is not a span created by the parser.
pub offset: usize, pub offset: usize,
} }

View file

@ -1,3 +1,5 @@
//! Error handling utilities for the `tinymist` crate.
use core::fmt; use core::fmt;
use ecow::EcoString; use ecow::EcoString;
@ -5,12 +7,17 @@ use serde::{Deserialize, Serialize};
use crate::debug_loc::CharRange; use crate::debug_loc::CharRange;
/// The severity of a diagnostic message, following the LSP specification.
#[derive(serde_repr::Serialize_repr, serde_repr::Deserialize_repr, Debug, Clone)] #[derive(serde_repr::Serialize_repr, serde_repr::Deserialize_repr, Debug, Clone)]
#[repr(u8)] #[repr(u8)]
pub enum DiagSeverity { pub enum DiagSeverity {
/// An error message.
Error = 1, Error = 1,
/// A warning message.
Warning = 2, Warning = 2,
/// An information message.
Information = 3, Information = 3,
/// A hint message.
Hint = 4, Hint = 4,
} }
@ -26,30 +33,41 @@ impl fmt::Display for DiagSeverity {
} }
/// <https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic> /// <https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#diagnostic>
/// The `owner` and `source` fields are not included in the struct, but they
/// could be added to `ErrorImpl::arguments`.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DiagMessage { pub struct DiagMessage {
/// The typst package specifier.
pub package: String, pub package: String,
/// The file path relative to the root of the workspace or the package.
pub path: String, pub path: String,
/// The diagnostic message.
pub message: String, pub message: String,
/// The severity of the diagnostic message.
pub severity: DiagSeverity, pub severity: DiagSeverity,
/// The char range in the file. The position encoding must be negotiated.
pub range: Option<CharRange>, pub range: Option<CharRange>,
// These field could be added to ErrorImpl::arguments
// owner: Option<ImmutStr>,
// source: ImmutStr,
} }
impl DiagMessage {} impl DiagMessage {}
/// ALl kind of errors that can occur in the `tinymist` crate.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
#[non_exhaustive] #[non_exhaustive]
pub enum ErrKind { pub enum ErrKind {
/// No message.
None, None,
/// A string message.
Msg(EcoString), Msg(EcoString),
/// A source diagnostic message.
Diag(Box<DiagMessage>), Diag(Box<DiagMessage>),
/// An inner error.
Inner(Error), Inner(Error),
} }
/// A trait to convert an error kind into an error kind.
pub trait ErrKindExt { pub trait ErrKindExt {
/// Convert the error kind into an error kind.
fn to_error_kind(self) -> ErrKind; fn to_error_kind(self) -> ErrKind;
} }
@ -65,6 +83,12 @@ impl ErrKindExt for std::io::Error {
} }
} }
impl ErrKindExt for std::str::Utf8Error {
fn to_error_kind(self) -> ErrKind {
ErrKind::Msg(self.to_string().into())
}
}
impl ErrKindExt for String { impl ErrKindExt for String {
fn to_error_kind(self) -> ErrKind { fn to_error_kind(self) -> ErrKind {
ErrKind::Msg(self.into()) ErrKind::Msg(self.into())
@ -101,11 +125,27 @@ impl ErrKindExt for serde_json::Error {
} }
} }
impl ErrKindExt for anyhow::Error {
fn to_error_kind(self) -> ErrKind {
ErrKind::Msg(self.to_string().into())
}
}
impl ErrKindExt for Error {
fn to_error_kind(self) -> ErrKind {
ErrKind::Msg(self.to_string().into())
}
}
/// The internal error implementation.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ErrorImpl { pub struct ErrorImpl {
/// A static error identifier.
loc: &'static str, loc: &'static str,
/// The kind of error.
kind: ErrKind, kind: ErrKind,
arguments: Box<[(&'static str, String)]>, /// Additional extractable arguments for the error.
args: Option<Box<[(&'static str, String)]>>,
} }
/// This type represents all possible errors that can occur in typst.ts /// This type represents all possible errors that can occur in typst.ts
@ -118,43 +158,65 @@ pub struct Error {
} }
impl Error { impl Error {
pub fn new(loc: &'static str, kind: ErrKind, arguments: Box<[(&'static str, String)]>) -> Self { /// Creates a new error.
pub fn new(
loc: &'static str,
kind: ErrKind,
args: Option<Box<[(&'static str, String)]>>,
) -> Self {
Self { Self {
err: Box::new(ErrorImpl { err: Box::new(ErrorImpl { loc, kind, args }),
loc,
kind,
arguments,
}),
} }
} }
/// Returns the location of the error.
pub fn loc(&self) -> &'static str { pub fn loc(&self) -> &'static str {
self.err.loc self.err.loc
} }
/// Returns the kind of the error.
pub fn kind(&self) -> &ErrKind { pub fn kind(&self) -> &ErrKind {
&self.err.kind &self.err.kind
} }
/// Returns the arguments of the error.
pub fn arguments(&self) -> &[(&'static str, String)] { pub fn arguments(&self) -> &[(&'static str, String)] {
&self.err.arguments self.err.args.as_deref().unwrap_or_default()
} }
} }
impl fmt::Display for Error { impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let err = &self.err; let err = &self.err;
match &err.kind {
ErrKind::Msg(msg) => write!(f, "{}: {} with {:?}", err.loc, msg, err.arguments), if err.loc.is_empty() {
ErrKind::Diag(diag) => { match &err.kind {
write!(f, "{}: {} with {:?}", err.loc, diag.message, err.arguments) ErrKind::Msg(msg) => write!(f, "{msg} with {:?}", err.args),
ErrKind::Diag(diag) => {
write!(f, "{} with {:?}", diag.message, err.args)
}
ErrKind::Inner(e) => write!(f, "{e} with {:?}", err.args),
ErrKind::None => write!(f, "error with {:?}", err.args),
}
} else {
match &err.kind {
ErrKind::Msg(msg) => write!(f, "{}: {} with {:?}", err.loc, msg, err.args),
ErrKind::Diag(diag) => {
write!(f, "{}: {} with {:?}", err.loc, diag.message, err.args)
}
ErrKind::Inner(e) => write!(f, "{}: {} with {:?}", err.loc, e, err.args),
ErrKind::None => write!(f, "{}: with {:?}", err.loc, err.args),
} }
ErrKind::Inner(e) => write!(f, "{}: {} with {:?}", err.loc, e, err.arguments),
ErrKind::None => write!(f, "{}: with {:?}", err.loc, err.arguments),
} }
} }
} }
impl From<anyhow::Error> for Error {
fn from(e: anyhow::Error) -> Self {
Error::new("", e.to_string().to_error_kind(), None)
}
}
impl std::error::Error for Error {} impl std::error::Error for Error {}
#[cfg(feature = "web")] #[cfg(feature = "web")]
@ -178,47 +240,94 @@ impl From<&Error> for wasm_bindgen::JsValue {
} }
} }
/// The result type used in the `tinymist` crate.
pub type Result<T, Err = Error> = std::result::Result<T, Err>;
/// A trait to add context to a result.
pub trait WithContext<T>: Sized {
/// Add a context to the result.
fn context(self, loc: &'static str) -> Result<T>;
/// Add a context to the result with additional arguments.
fn with_context<F>(self, loc: &'static str, f: F) -> Result<T>
where
F: FnOnce() -> Option<Box<[(&'static str, String)]>>;
}
impl<T, E: ErrKindExt> WithContext<T> for Result<T, E> {
fn context(self, loc: &'static str) -> Result<T> {
self.map_err(|e| Error::new(loc, e.to_error_kind(), None))
}
fn with_context<F>(self, loc: &'static str, f: F) -> Result<T>
where
F: FnOnce() -> Option<Box<[(&'static str, String)]>>,
{
self.map_err(|e| Error::new(loc, e.to_error_kind(), f()))
}
}
impl<T> WithContext<T> for Option<T> {
fn context(self, loc: &'static str) -> Result<T> {
self.ok_or_else(|| Error::new(loc, ErrKind::None, None))
}
fn with_context<F>(self, loc: &'static str, f: F) -> Result<T>
where
F: FnOnce() -> Option<Box<[(&'static str, String)]>>,
{
self.ok_or_else(|| Error::new(loc, ErrKind::None, f()))
}
}
/// A trait to add context to a result without a specific error type.
pub trait WithContextUntyped<T>: Sized {
/// Add a context to the result.
fn context_ut(self, loc: &'static str) -> Result<T>;
/// Add a context to the result with additional arguments.
fn with_context_ut<F>(self, loc: &'static str, f: F) -> Result<T>
where
F: FnOnce() -> Option<Box<[(&'static str, String)]>>;
}
impl<T, E: std::fmt::Display> WithContextUntyped<T> for Result<T, E> {
fn context_ut(self, loc: &'static str) -> Result<T> {
self.map_err(|e| Error::new(loc, ErrKind::Msg(ecow::eco_format!("{e}")), None))
}
fn with_context_ut<F>(self, loc: &'static str, f: F) -> Result<T>
where
F: FnOnce() -> Option<Box<[(&'static str, String)]>>,
{
self.map_err(|e| Error::new(loc, ErrKind::Msg(ecow::eco_format!("{e}")), f()))
}
}
/// The error prelude.
pub mod prelude { pub mod prelude {
#![allow(missing_docs)]
use super::ErrKindExt; use super::ErrKindExt;
use crate::Error; use crate::Error;
pub type ZResult<T> = Result<T, Error>; pub use super::{WithContext, WithContextUntyped};
pub use crate::Result;
pub trait WithContext<T>: Sized {
fn context(self, loc: &'static str) -> ZResult<T>;
fn with_context<F>(self, loc: &'static str, f: F) -> ZResult<T>
where
F: FnOnce() -> Box<[(&'static str, String)]>;
}
impl<T, E: ErrKindExt> WithContext<T> for Result<T, E> {
fn context(self, loc: &'static str) -> ZResult<T> {
self.map_err(|e| Error::new(loc, e.to_error_kind(), Box::new([])))
}
fn with_context<F>(self, loc: &'static str, f: F) -> ZResult<T>
where
F: FnOnce() -> Box<[(&'static str, String)]>,
{
self.map_err(|e| Error::new(loc, e.to_error_kind(), f()))
}
}
pub fn map_string_err<T: ToString>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_string_err<T: ToString>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.to_string().to_error_kind(), Box::new([])) move |e| Error::new(loc, e.to_string().to_error_kind(), None)
} }
pub fn map_into_err<S: ErrKindExt, T: Into<S>>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_into_err<S: ErrKindExt, T: Into<S>>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.into().to_error_kind(), Box::new([])) move |e| Error::new(loc, e.into().to_error_kind(), None)
} }
pub fn map_err<T: ErrKindExt>(loc: &'static str) -> impl Fn(T) -> Error { pub fn map_err<T: ErrKindExt>(loc: &'static str) -> impl Fn(T) -> Error {
move |e| Error::new(loc, e.to_error_kind(), Box::new([])) move |e| Error::new(loc, e.to_error_kind(), None)
} }
pub fn wrap_err(loc: &'static str) -> impl Fn(Error) -> Error { pub fn wrap_err(loc: &'static str) -> impl Fn(Error) -> Error {
move |e| Error::new(loc, crate::ErrKind::Inner(e), Box::new([])) move |e| Error::new(loc, crate::ErrKind::Inner(e), None)
} }
pub fn map_string_err_with_args< pub fn map_string_err_with_args<
@ -226,13 +335,13 @@ pub mod prelude {
Args: IntoIterator<Item = (&'static str, String)>, Args: IntoIterator<Item = (&'static str, String)>,
>( >(
loc: &'static str, loc: &'static str,
arguments: Args, args: Args,
) -> impl FnOnce(T) -> Error { ) -> impl FnOnce(T) -> Error {
move |e| { move |e| {
Error::new( Error::new(
loc, loc,
e.to_string().to_error_kind(), e.to_string().to_error_kind(),
arguments.into_iter().collect::<Vec<_>>().into_boxed_slice(), Some(args.into_iter().collect::<Vec<_>>().into_boxed_slice()),
) )
} }
} }
@ -243,49 +352,49 @@ pub mod prelude {
Args: IntoIterator<Item = (&'static str, String)>, Args: IntoIterator<Item = (&'static str, String)>,
>( >(
loc: &'static str, loc: &'static str,
arguments: Args, args: Args,
) -> impl FnOnce(T) -> Error { ) -> impl FnOnce(T) -> Error {
move |e| { move |e| {
Error::new( Error::new(
loc, loc,
e.into().to_error_kind(), e.into().to_error_kind(),
arguments.into_iter().collect::<Vec<_>>().into_boxed_slice(), Some(args.into_iter().collect::<Vec<_>>().into_boxed_slice()),
) )
} }
} }
pub fn map_err_with_args<T: ErrKindExt, Args: IntoIterator<Item = (&'static str, String)>>( pub fn map_err_with_args<T: ErrKindExt, Args: IntoIterator<Item = (&'static str, String)>>(
loc: &'static str, loc: &'static str,
arguments: Args, args: Args,
) -> impl FnOnce(T) -> Error { ) -> impl FnOnce(T) -> Error {
move |e| { move |e| {
Error::new( Error::new(
loc, loc,
e.to_error_kind(), e.to_error_kind(),
arguments.into_iter().collect::<Vec<_>>().into_boxed_slice(), Some(args.into_iter().collect::<Vec<_>>().into_boxed_slice()),
) )
} }
} }
pub fn wrap_err_with_args<Args: IntoIterator<Item = (&'static str, String)>>( pub fn wrap_err_with_args<Args: IntoIterator<Item = (&'static str, String)>>(
loc: &'static str, loc: &'static str,
arguments: Args, args: Args,
) -> impl FnOnce(Error) -> Error { ) -> impl FnOnce(Error) -> Error {
move |e| { move |e| {
Error::new( Error::new(
loc, loc,
crate::ErrKind::Inner(e), crate::ErrKind::Inner(e),
arguments.into_iter().collect::<Vec<_>>().into_boxed_slice(), Some(args.into_iter().collect::<Vec<_>>().into_boxed_slice()),
) )
} }
} }
pub fn _error_once(loc: &'static str, args: Box<[(&'static str, String)]>) -> Error { pub fn _error_once(loc: &'static str, args: Box<[(&'static str, String)]>) -> Error {
Error::new(loc, crate::ErrKind::None, args) Error::new(loc, crate::ErrKind::None, Some(args))
} }
pub fn _msg(loc: &'static str, msg: EcoString) -> Error { pub fn _msg(loc: &'static str, msg: EcoString) -> Error {
Error::new(loc, crate::ErrKind::Msg(msg), Box::new([])) Error::new(loc, crate::ErrKind::Msg(msg), None)
} }
pub use ecow::eco_format as _eco_format; pub use ecow::eco_format as _eco_format;

View file

@ -1,7 +1,6 @@
//! Filesystem support for tinymist. //! Filesystem support for tinymist.
#[cfg(feature = "system")]
pub mod flock; pub mod flock;
#[cfg(feature = "system")]
pub mod paths; pub mod paths;
mod errors;
pub use errors::*;

View file

@ -13,10 +13,10 @@ use std::io;
use std::io::{Read, Seek, SeekFrom, Write}; use std::io::{Read, Seek, SeekFrom, Write};
use std::path::{Display, Path, PathBuf}; use std::path::{Display, Path, PathBuf};
use crate::errors::Result; use self::sys::*;
use crate::paths; use super::paths;
use anyhow::Context as _; use anyhow::Context as _;
use sys::*; use anyhow::Result;
/// A locked file. /// A locked file.
/// ///

View file

@ -1,7 +1,6 @@
//! Upstream: <https://github.com/rust-lang/cargo/blob/rust-1.83.0/crates/cargo-util/src/paths.rs> //! Upstream: <https://github.com/rust-lang/cargo/blob/rust-1.83.0/crates/cargo-util/src/paths.rs>
//! Various utilities for working with files and paths. //! Various utilities for working with files and paths.
use anyhow::{Context, Result};
use std::env; use std::env;
use std::ffi::{OsStr, OsString}; use std::ffi::{OsStr, OsString};
use std::fs::{self, File, Metadata, OpenOptions}; use std::fs::{self, File, Metadata, OpenOptions};
@ -9,6 +8,8 @@ use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use std::iter; use std::iter;
use std::path::{Component, Path, PathBuf}; use std::path::{Component, Path, PathBuf};
use anyhow::{Context, Result};
use tempfile::Builder as TempFileBuilder; use tempfile::Builder as TempFileBuilder;
/// Joins paths into a string suitable for the `PATH` environment variable. /// Joins paths into a string suitable for the `PATH` environment variable.

View file

@ -1,8 +1,10 @@
//! The hash extension module. It provides extra concepts like `Fingerprint` and
//! `HashedTrait`.
use core::fmt; use core::fmt;
use std::{ use std::any::Any;
any::Any, use std::hash::{Hash, Hasher};
hash::{Hash, Hasher}, use std::ops::Deref;
};
use base64::Engine; use base64::Engine;
use fxhash::FxHasher32; use fxhash::FxHasher32;
@ -11,12 +13,13 @@ use siphasher::sip128::{Hasher128, SipHasher13};
#[cfg(feature = "rkyv")] #[cfg(feature = "rkyv")]
use rkyv::{Archive, Deserialize as rDeser, Serialize as rSer}; use rkyv::{Archive, Deserialize as rDeser, Serialize as rSer};
use crate::error::prelude::ZResult; use crate::error::prelude::Result;
pub(crate) type FxBuildHasher = std::hash::BuildHasherDefault<FxHasher>; pub(crate) type FxBuildHasher = std::hash::BuildHasherDefault<FxHasher>;
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
// pub type FxIndexSet<K> = indexmap::IndexSet<K, FxHasher>; // pub type FxIndexSet<K> = indexmap::IndexSet<K, FxHasher>;
// pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, FxHasher>; // pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, FxHasher>;
/// A dashmap that uses the FxHasher as the underlying hasher.
pub type FxDashMap<K, V> = dashmap::DashMap<K, V, FxBuildHasher>; pub type FxDashMap<K, V> = dashmap::DashMap<K, V, FxBuildHasher>;
/// See <https://github.com/rust-lang/rust/blob/master/compiler/rustc_hir/src/stable_hash_impls.rs#L22> /// See <https://github.com/rust-lang/rust/blob/master/compiler/rustc_hir/src/stable_hash_impls.rs#L22>
@ -85,7 +88,7 @@ impl Fingerprint {
} }
/// Creates a new `Fingerprint` from a svg id that **doesn't have prefix**. /// Creates a new `Fingerprint` from a svg id that **doesn't have prefix**.
pub fn try_from_str(s: &str) -> ZResult<Self> { pub fn try_from_str(s: &str) -> Result<Self> {
let bytes = base64::engine::general_purpose::STANDARD_NO_PAD let bytes = base64::engine::general_purpose::STANDARD_NO_PAD
.decode(&s.as_bytes()[..11]) .decode(&s.as_bytes()[..11])
.expect("invalid base64 string"); .expect("invalid base64 string");
@ -135,9 +138,11 @@ pub struct FingerprintSipHasher {
data: Vec<u8>, data: Vec<u8>,
} }
/// The base hasher for the [`FingerprintSipHasher`].
pub type FingerprintSipHasherBase = SipHasher13; pub type FingerprintSipHasherBase = SipHasher13;
impl FingerprintSipHasher { impl FingerprintSipHasher {
/// Get the fast hash value and the underlying data.
pub fn fast_hash(&self) -> (u32, &Vec<u8>) { pub fn fast_hash(&self) -> (u32, &Vec<u8>) {
let mut inner = FxHasher32::default(); let mut inner = FxHasher32::default();
self.data.hash(&mut inner); self.data.hash(&mut inner);
@ -187,6 +192,7 @@ pub struct FingerprintBuilder {
#[cfg(not(feature = "bi-hash"))] #[cfg(not(feature = "bi-hash"))]
impl FingerprintBuilder { impl FingerprintBuilder {
/// Resolve the fingerprint without checking the conflict.
pub fn resolve_unchecked<T: Hash>(&self, item: &T) -> Fingerprint { pub fn resolve_unchecked<T: Hash>(&self, item: &T) -> Fingerprint {
let mut s = FingerprintSipHasher { data: Vec::new() }; let mut s = FingerprintSipHasher { data: Vec::new() };
item.hash(&mut s); item.hash(&mut s);
@ -194,6 +200,7 @@ impl FingerprintBuilder {
fingerprint fingerprint
} }
/// Resolve the fingerprint and check the conflict.
pub fn resolve<T: Hash + 'static>(&self, item: &T) -> Fingerprint { pub fn resolve<T: Hash + 'static>(&self, item: &T) -> Fingerprint {
let mut s = FingerprintSipHasher { data: Vec::new() }; let mut s = FingerprintSipHasher { data: Vec::new() };
item.type_id().hash(&mut s); item.type_id().hash(&mut s);
@ -216,6 +223,7 @@ impl FingerprintBuilder {
#[cfg(feature = "bi-hash")] #[cfg(feature = "bi-hash")]
impl FingerprintBuilder { impl FingerprintBuilder {
/// Resolve the fingerprint without checking the conflict.
pub fn resolve_unchecked<T: Hash>(&self, item: &T) -> Fingerprint { pub fn resolve_unchecked<T: Hash>(&self, item: &T) -> Fingerprint {
let mut s = FingerprintSipHasher { data: Vec::new() }; let mut s = FingerprintSipHasher { data: Vec::new() };
item.hash(&mut s); item.hash(&mut s);
@ -233,6 +241,7 @@ impl FingerprintBuilder {
fingerprint fingerprint
} }
/// Resolve the fingerprint and check the conflict.
pub fn resolve<T: Hash + 'static>(&self, item: &T) -> Fingerprint { pub fn resolve<T: Hash + 'static>(&self, item: &T) -> Fingerprint {
let mut s = FingerprintSipHasher { data: Vec::new() }; let mut s = FingerprintSipHasher { data: Vec::new() };
item.type_id().hash(&mut s); item.type_id().hash(&mut s);
@ -297,6 +306,66 @@ pub fn hash64<T: Hash + ?Sized>(v: &T) -> u64 {
// todo: rustc hash doesn't have 32-bit hash // todo: rustc hash doesn't have 32-bit hash
pub use fxhash::hash32; pub use fxhash::hash32;
/// A trait that provides a static prehashed 128-bit hash.
pub trait StaticHash128 {
/// Get the prehashed 128-bit hash.
fn get_hash(&self) -> u128;
}
impl Hash for dyn StaticHash128 {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.get_hash());
}
}
/// A trait that provides a static prehashed 64-bit hash for any internal `T`.
///
/// Please ensure that the `T` is really mapped to the hash. Use it at your own
/// risk.
pub struct HashedTrait<T: ?Sized> {
hash: u128,
t: Box<T>,
}
impl<T: ?Sized> HashedTrait<T> {
/// Create a new `HashedTrait` with the given hash and the trait object.
pub fn new(hash: u128, t: Box<T>) -> Self {
Self { hash, t }
}
}
impl<T: ?Sized> Deref for HashedTrait<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.t
}
}
impl<T> Hash for HashedTrait<T> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_u128(self.hash);
}
}
impl<T: Hash + Default + 'static> Default for HashedTrait<T> {
fn default() -> Self {
let t = T::default();
Self {
hash: item_hash128(&t),
t: Box::new(t),
}
}
}
impl<T: ?Sized> StaticHash128 for HashedTrait<T> {
fn get_hash(&self) -> u128 {
self.hash
}
}
#[test] #[test]
fn test_fingerprint() { fn test_fingerprint() {
let t = Fingerprint::from_pair(0, 1); let t = Fingerprint::from_pair(0, 1);

View file

@ -1,8 +1,9 @@
#![allow(missing_docs)] //! Additional functions wrapping Rust's standard library.
pub mod adt; pub mod adt;
pub mod debug_loc; pub mod debug_loc;
pub mod error; pub mod error;
pub mod fs;
pub mod hash; pub mod hash;
pub mod path; pub mod path;
pub mod time; pub mod time;
@ -11,7 +12,7 @@ pub(crate) mod concepts;
pub use concepts::*; pub use concepts::*;
pub use error::{ErrKind, Error}; pub use error::{ErrKind, Error, Result};
#[cfg(feature = "typst")] #[cfg(feature = "typst")]
pub use typst_shim; pub use typst_shim;
@ -19,8 +20,8 @@ pub use typst_shim;
#[cfg(feature = "rkyv")] #[cfg(feature = "rkyv")]
use rkyv::{Archive, Deserialize as rDeser, Serialize as rSer}; use rkyv::{Archive, Deserialize as rDeser, Serialize as rSer};
/// The local id of a svg item. /// The local id of an item.
/// This id is only unique within the svg document. /// This id is only unique within a task or process.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
#[cfg_attr(feature = "rkyv", derive(Archive, rDeser, rSer))] #[cfg_attr(feature = "rkyv", derive(Archive, rDeser, rSer))]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))] #[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]

View file

@ -1,3 +1,5 @@
//! Path utilities.
use std::path::{Component, Path}; use std::path::{Component, Path};
pub use path_clean::PathClean; pub use path_clean::PathClean;

View file

@ -1,3 +1,5 @@
//! Cross platform time utilities.
pub use std::time::SystemTime as Time; pub use std::time::SystemTime as Time;
pub use web_time::Duration; pub use web_time::Duration;
pub use web_time::Instant; pub use web_time::Instant;

View file

@ -113,7 +113,7 @@ impl EntryState {
} }
} }
pub fn try_select_path_in_workspace(&self, p: &Path) -> ZResult<Option<EntryState>> { pub fn try_select_path_in_workspace(&self, p: &Path) -> Result<Option<EntryState>> {
Ok(match self.workspace_root() { Ok(match self.workspace_root() {
Some(root) => match p.strip_prefix(&root) { Some(root) => match p.strip_prefix(&root) {
Ok(p) => Some(EntryState::new_rooted( Ok(p) => Some(EntryState::new_rooted(

View file

@ -105,7 +105,7 @@ impl SystemFontSearcher {
} }
/// Resolve fonts from given options. /// Resolve fonts from given options.
pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> ZResult<()> { pub fn resolve_opts(&mut self, opts: CompileFontOpts) -> Result<()> {
if opts if opts
.font_profile_cache_path .font_profile_cache_path
.to_str() .to_str()

View file

@ -21,7 +21,7 @@ pub(crate) fn convert_pair(pair: JsValue) -> (JsValue, JsValue) {
} }
struct FontBuilder {} struct FontBuilder {}
fn font_family_web_to_typst(family: &str, full_name: &str) -> ZResult<String> { fn font_family_web_to_typst(family: &str, full_name: &str) -> Result<String> {
let mut family = family; let mut family = family;
if family.starts_with("Noto") if family.starts_with("Noto")
|| family.starts_with("NewCM") || family.starts_with("NewCM")
@ -51,7 +51,7 @@ fn infer_info_from_web_font(
postscript_name, postscript_name,
style, style,
}: WebFontInfo, }: WebFontInfo,
) -> ZResult<FontInfo> { ) -> Result<FontInfo> {
let family = font_family_web_to_typst(&family, &full_name)?; let family = font_family_web_to_typst(&family, &full_name)?;
let mut full = full_name; let mut full = full_name;
@ -237,7 +237,7 @@ impl FontBuilder {
// {:?}", field, val))) .unwrap()) // {:?}", field, val))) .unwrap())
// } // }
fn to_string(&self, field: &str, val: &JsValue) -> ZResult<String> { fn to_string(&self, field: &str, val: &JsValue) -> Result<String> {
Ok(val Ok(val
.as_string() .as_string()
.ok_or_else(|| JsValue::from_str(&format!("expected string for {field}, got {val:?}"))) .ok_or_else(|| JsValue::from_str(&format!("expected string for {field}, got {val:?}")))
@ -247,7 +247,7 @@ impl FontBuilder {
fn font_web_to_typst( fn font_web_to_typst(
&self, &self,
val: &JsValue, val: &JsValue,
) -> ZResult<(JsValue, js_sys::Function, Vec<typst::text::FontInfo>)> { ) -> Result<(JsValue, js_sys::Function, Vec<typst::text::FontInfo>)> {
let mut postscript_name = String::new(); let mut postscript_name = String::new();
let mut family = String::new(); let mut family = String::new();
let mut full_name = String::new(); let mut full_name = String::new();
@ -414,7 +414,7 @@ impl BrowserFontSearcher {
} }
} }
pub async fn add_web_fonts(&mut self, fonts: js_sys::Array) -> ZResult<()> { pub async fn add_web_fonts(&mut self, fonts: js_sys::Array) -> Result<()> {
let font_builder = FontBuilder {}; let font_builder = FontBuilder {};
for v in fonts.iter() { for v in fonts.iter() {
@ -451,7 +451,7 @@ impl BrowserFontSearcher {
} }
} }
pub async fn add_glyph_pack(&mut self) -> ZResult<()> { pub async fn add_glyph_pack(&mut self) -> Result<()> {
Err(error_once!( Err(error_once!(
"BrowserFontSearcher.add_glyph_pack is not implemented" "BrowserFontSearcher.add_glyph_pack is not implemented"
)) ))

View file

@ -32,7 +32,7 @@ impl TypstSystemUniverse {
/// Create [`TypstSystemWorld`] with the given options. /// Create [`TypstSystemWorld`] with the given options.
/// See SystemCompilerFeat for instantiation details. /// See SystemCompilerFeat for instantiation details.
/// See [`CompileOpts`] for available options. /// See [`CompileOpts`] for available options.
pub fn new(mut opts: CompileOpts) -> ZResult<Self> { pub fn new(mut opts: CompileOpts) -> Result<Self> {
let registry: Arc<HttpRegistry> = Arc::default(); let registry: Arc<HttpRegistry> = Arc::default();
let resolver = Arc::new(RegistryPathMapper::new(registry.clone())); let resolver = Arc::new(RegistryPathMapper::new(registry.clone()));
let inputs = std::mem::take(&mut opts.inputs); let inputs = std::mem::take(&mut opts.inputs);
@ -46,7 +46,7 @@ impl TypstSystemUniverse {
} }
/// Resolve fonts from given options. /// Resolve fonts from given options.
fn resolve_fonts(opts: CompileOpts) -> ZResult<FontResolverImpl> { fn resolve_fonts(opts: CompileOpts) -> Result<FontResolverImpl> {
let mut searcher = SystemFontSearcher::new(); let mut searcher = SystemFontSearcher::new();
searcher.resolve_opts(opts.into())?; searcher.resolve_opts(opts.into())?;
Ok(searcher.into()) Ok(searcher.into())

View file

@ -190,7 +190,7 @@ impl<F: CompilerFeat> CompilerUniverse<F> {
&self, &self,
file_path: Option<String>, file_path: Option<String>,
encoding: OffsetEncoding, encoding: OffsetEncoding,
) -> ZResult<Arc<Vec<SemanticToken>>> { ) -> Result<Arc<Vec<SemanticToken>>> {
let world = match file_path { let world = match file_path {
Some(e) => { Some(e) => {
let path = Path::new(&e); let path = Path::new(&e);

View file

@ -60,7 +60,6 @@ strum.workspace = true
sync-lsp.workspace = true sync-lsp.workspace = true
tinymist-assets = { workspace = true } tinymist-assets = { workspace = true }
tinymist-query.workspace = true tinymist-query.workspace = true
tinymist-fs.workspace = true
tinymist-std.workspace = true tinymist-std.workspace = true
tinymist-core = { workspace = true, default-features = false, features = [] } tinymist-core = { workspace = true, default-features = false, features = [] }
tinymist-project.workspace = true tinymist-project.workspace = true

View file

@ -42,13 +42,14 @@ pub use world::{CompileFontArgs, CompileOnceArgs, CompilePackageArgs};
use lsp_server::{RequestId, ResponseError}; use lsp_server::{RequestId, ResponseError};
use serde_json::from_value; use serde_json::from_value;
use sync_lsp::*; use sync_lsp::*;
use tinymist_std::error::Result;
use utils::*; use utils::*;
use world::*; use world::*;
use tinymist_query::CompilerQueryResponse; use tinymist_query::CompilerQueryResponse;
/// The future type for a lsp query. /// The future type for a lsp query.
pub type QueryFuture = anyhow::Result<ResponseFuture<anyhow::Result<CompilerQueryResponse>>>; pub type QueryFuture = Result<ResponseFuture<Result<CompilerQueryResponse>>>;
trait LspClientExt { trait LspClientExt {
fn schedule_query(&self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult; fn schedule_query(&self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult;

View file

@ -9,7 +9,6 @@ use std::{
sync::Arc, sync::Arc,
}; };
use anyhow::{bail, Context};
use clap::Parser; use clap::Parser;
use clap_builder::CommandFactory; use clap_builder::CommandFactory;
use clap_complete::generate; use clap_complete::generate;
@ -32,6 +31,7 @@ use tinymist::{world::TaskInputs, world::WorldProvider};
use tinymist_core::LONG_VERSION; use tinymist_core::LONG_VERSION;
use tinymist_project::EntryResolver; use tinymist_project::EntryResolver;
use tinymist_query::package::PackageInfo; use tinymist_query::package::PackageInfo;
use tinymist_std::{bail, error::prelude::*};
use typst::foundations::IntoValue; use typst::foundations::IntoValue;
use typst_shim::utils::LazyHash; use typst_shim::utils::LazyHash;
@ -61,7 +61,7 @@ impl Default for Runtimes {
static RUNTIMES: Lazy<Runtimes> = Lazy::new(Default::default); static RUNTIMES: Lazy<Runtimes> = Lazy::new(Default::default);
/// The main entry point. /// The main entry point.
fn main() -> anyhow::Result<()> { fn main() -> Result<()> {
#[cfg(feature = "dhat-heap")] #[cfg(feature = "dhat-heap")]
let _profiler = dhat::Profiler::new_heap(); let _profiler = dhat::Profiler::new_heap();
@ -103,9 +103,9 @@ fn main() -> anyhow::Result<()> {
} }
/// Generates completion script to stdout. /// Generates completion script to stdout.
pub fn completion(args: ShellCompletionArgs) -> anyhow::Result<()> { pub fn completion(args: ShellCompletionArgs) -> Result<()> {
let Some(shell) = args.shell.or_else(Shell::from_env) else { let Some(shell) = args.shell.or_else(Shell::from_env) else {
anyhow::bail!("could not infer shell"); tinymist_std::bail!("could not infer shell");
}; };
let mut cmd = CliArguments::command(); let mut cmd = CliArguments::command();
@ -115,7 +115,7 @@ pub fn completion(args: ShellCompletionArgs) -> anyhow::Result<()> {
} }
/// Runs compilation /// Runs compilation
pub fn compile(args: CompileArgs) -> anyhow::Result<()> { pub fn compile(args: CompileArgs) -> Result<()> {
use std::io::Write; use std::io::Write;
let input = args let input = args
@ -150,7 +150,7 @@ pub fn compile(args: CompileArgs) -> anyhow::Result<()> {
} }
/// The main entry point for the language server. /// The main entry point for the language server.
pub fn lsp_main(args: LspArgs) -> anyhow::Result<()> { pub fn lsp_main(args: LspArgs) -> Result<()> {
let pairs = LONG_VERSION.trim().split('\n'); let pairs = LONG_VERSION.trim().split('\n');
let pairs = pairs let pairs = pairs
.map(|e| e.splitn(2, ":").map(|e| e.trim()).collect::<Vec<_>>()) .map(|e| e.splitn(2, ":").map(|e| e.trim()).collect::<Vec<_>>())
@ -178,19 +178,19 @@ pub fn lsp_main(args: LspArgs) -> anyhow::Result<()> {
} }
/// The main entry point for the compiler. /// The main entry point for the compiler.
pub fn trace_lsp_main(args: TraceLspArgs) -> anyhow::Result<()> { pub fn trace_lsp_main(args: TraceLspArgs) -> Result<()> {
let mut input = PathBuf::from(match args.compile.input { let mut input = PathBuf::from(match args.compile.input {
Some(value) => value, Some(value) => value,
None => return Err(anyhow::anyhow!("provide a valid path")), None => Err(anyhow::anyhow!("provide a valid path"))?,
}); });
let mut root_path = args.compile.root.unwrap_or(PathBuf::from(".")); let mut root_path = args.compile.root.unwrap_or(PathBuf::from("."));
if root_path.is_relative() { if root_path.is_relative() {
root_path = std::env::current_dir()?.join(root_path); root_path = std::env::current_dir().context("cwd")?.join(root_path);
} }
if input.is_relative() { if input.is_relative() {
input = std::env::current_dir()?.join(input); input = std::env::current_dir().context("cwd")?.join(input);
} }
if !input.starts_with(&root_path) { if !input.starts_with(&root_path) {
bail!("input file is not within the root path: {input:?} not in {root_path:?}"); bail!("input file is not within the root path: {input:?} not in {root_path:?}");
@ -233,7 +233,7 @@ pub fn trace_lsp_main(args: TraceLspArgs) -> anyhow::Result<()> {
let resp = service.ready(()).unwrap(); let resp = service.ready(()).unwrap();
let MaybeDone::Done(resp) = resp else { let MaybeDone::Done(resp) = resp else {
bail!("internal error: not sync init") anyhow::bail!("internal error: not sync init")
}; };
resp.unwrap(); resp.unwrap();
@ -274,7 +274,7 @@ pub fn trace_lsp_main(args: TraceLspArgs) -> anyhow::Result<()> {
} }
/// The main entry point for language server queries. /// The main entry point for language server queries.
pub fn query_main(cmds: QueryCommands) -> anyhow::Result<()> { pub fn query_main(cmds: QueryCommands) -> Result<()> {
use tinymist_project::package::PackageRegistry; use tinymist_project::package::PackageRegistry;
with_stdio_transport(MirrorArgs::default(), |conn| { with_stdio_transport(MirrorArgs::default(), |conn| {
@ -297,7 +297,7 @@ pub fn query_main(cmds: QueryCommands) -> anyhow::Result<()> {
let resp = service.ready(()).unwrap(); let resp = service.ready(()).unwrap();
let MaybeDone::Done(resp) = resp else { let MaybeDone::Done(resp) = resp else {
bail!("internal error: not sync init") anyhow::bail!("internal error: not sync init")
}; };
resp.unwrap(); resp.unwrap();

View file

@ -23,8 +23,7 @@ pub use tinymist_project::*;
use std::sync::Arc; use std::sync::Arc;
use anyhow::bail; use log::{error, trace};
use log::{error, info, trace};
use parking_lot::Mutex; use parking_lot::Mutex;
use reflexo::{hash::FxHashMap, path::unix_slash}; use reflexo::{hash::FxHashMap, path::unix_slash};
use reflexo_typst::{typst::prelude::EcoVec, CompileReport}; use reflexo_typst::{typst::prelude::EcoVec, CompileReport};
@ -34,9 +33,9 @@ use tinymist_query::{
CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, SemanticRequest, StatefulRequest, CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, SemanticRequest, StatefulRequest,
VersionedDocument, VersionedDocument,
}; };
use tinymist_std::error::prelude::*; use tinymist_std::{bail, error::prelude::*};
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use typst::{diag::SourceDiagnostic, World}; use typst::diag::SourceDiagnostic;
use crate::actor::editor::{CompileStatus, DocVersion, EditorRequest, TinymistCompileStatusEnum}; use crate::actor::editor::{CompileStatus, DocVersion, EditorRequest, TinymistCompileStatusEnum};
use crate::stats::{CompilerQueryStats, QueryStatGuard}; use crate::stats::{CompilerQueryStats, QueryStatGuard};
@ -100,7 +99,7 @@ pub struct Project {
impl Project { impl Project {
/// Snapshot the compiler thread for tasks /// Snapshot the compiler thread for tasks
pub fn snapshot(&mut self) -> ZResult<WorldSnapFut> { pub fn snapshot(&mut self) -> Result<WorldSnapFut> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let snap = self.state.snapshot(); let snap = self.state.snapshot();
let _ = tx.send(snap); let _ = tx.send(snap);
@ -109,7 +108,7 @@ impl Project {
} }
/// Snapshot the compiler thread for language queries /// Snapshot the compiler thread for language queries
pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> ZResult<QuerySnapFut> { pub fn query_snapshot(&mut self, q: Option<&CompilerQueryRequest>) -> Result<QuerySnapFut> {
let fut = self.snapshot()?; let fut = self.snapshot()?;
let analysis = self.analysis.clone(); let analysis = self.analysis.clone();
let rev_lock = analysis.lock_revision(q); let rev_lock = analysis.lock_revision(q);
@ -150,7 +149,7 @@ impl Project {
&mut self, &mut self,
group: &str, group: &str,
entry: EntryState, entry: EntryState,
) -> ZResult<ProjectInsId> { ) -> Result<ProjectInsId> {
self.state.restart_dedicate(group, entry) self.state.restart_dedicate(group, entry)
} }
} }
@ -367,7 +366,7 @@ pub struct WorldSnapFut {
impl WorldSnapFut { impl WorldSnapFut {
/// wait for the snapshot to be ready /// wait for the snapshot to be ready
pub async fn receive(self) -> ZResult<CompileSnapshot<LspCompilerFeat>> { pub async fn receive(self) -> Result<CompileSnapshot<LspCompilerFeat>> {
self.rx self.rx
.await .await
.map_err(map_string_err("failed to get snapshot")) .map_err(map_string_err("failed to get snapshot"))
@ -382,7 +381,7 @@ pub struct QuerySnapFut {
impl QuerySnapFut { impl QuerySnapFut {
/// wait for the snapshot to be ready /// wait for the snapshot to be ready
pub async fn receive(self) -> ZResult<QuerySnap> { pub async fn receive(self) -> Result<QuerySnap> {
let snap = self.fut.receive().await?; let snap = self.fut.receive().await?;
Ok(QuerySnap { Ok(QuerySnap {
snap, snap,
@ -416,7 +415,7 @@ impl QuerySnap {
self, self,
query: T, query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse, wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> anyhow::Result<CompilerQueryResponse> { ) -> Result<CompilerQueryResponse> {
let doc = self.snap.success_doc.as_ref().map(|doc| VersionedDocument { let doc = self.snap.success_doc.as_ref().map(|doc| VersionedDocument {
version: self.world.revision().get(), version: self.world.revision().get(),
document: doc.clone(), document: doc.clone(),
@ -429,20 +428,16 @@ impl QuerySnap {
self, self,
query: T, query: T,
wrapper: fn(Option<T::Response>) -> CompilerQueryResponse, wrapper: fn(Option<T::Response>) -> CompilerQueryResponse,
) -> anyhow::Result<CompilerQueryResponse> { ) -> Result<CompilerQueryResponse> {
self.run_analysis(|ctx| query.request(ctx)).map(wrapper) self.run_analysis(|ctx| query.request(ctx)).map(wrapper)
} }
pub fn run_analysis<T>(self, f: impl FnOnce(&mut LocalContextGuard) -> T) -> anyhow::Result<T> { pub fn run_analysis<T>(self, f: impl FnOnce(&mut LocalContextGuard) -> T) -> Result<T> {
let world = self.snap.world; let world = self.snap.world;
let Some(main) = world.main_id() else { let Some(..) = world.main_id() else {
error!("Project: main file is not set"); error!("Project: main file is not set");
bail!("main file is not set"); bail!("main file is not set");
}; };
world.source(main).map_err(|err| {
info!("Project: failed to prepare main file: {err:?}");
anyhow::anyhow!("failed to get source: {err}")
})?;
let mut analysis = self.analysis.snapshot_(world, self.rev_lock); let mut analysis = self.analysis.snapshot_(world, self.rev_lock);
Ok(f(&mut analysis)) Ok(f(&mut analysis))

View file

@ -6,8 +6,6 @@ use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use actor::editor::EditorActor; use actor::editor::EditorActor;
use anyhow::anyhow;
use anyhow::Context;
use log::{error, info, trace}; use log::{error, info, trace};
use lsp_server::RequestId; use lsp_server::RequestId;
use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration}; use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration};
@ -304,7 +302,7 @@ impl LanguageState {
impl LanguageState { impl LanguageState {
// todo: handle error // todo: handle error
fn register_capability(&self, registrations: Vec<Registration>) -> anyhow::Result<()> { fn register_capability(&self, registrations: Vec<Registration>) -> Result<()> {
self.client.send_request_::<RegisterCapability>( self.client.send_request_::<RegisterCapability>(
RegistrationParams { registrations }, RegistrationParams { registrations },
|_, resp| { |_, resp| {
@ -316,7 +314,7 @@ impl LanguageState {
Ok(()) Ok(())
} }
fn unregister_capability(&self, unregisterations: Vec<Unregistration>) -> anyhow::Result<()> { fn unregister_capability(&self, unregisterations: Vec<Unregistration>) -> Result<()> {
self.client.send_request_::<UnregisterCapability>( self.client.send_request_::<UnregisterCapability>(
UnregistrationParams { unregisterations }, UnregistrationParams { unregisterations },
|_, resp| { |_, resp| {
@ -329,7 +327,7 @@ impl LanguageState {
} }
/// Registers or unregisters semantic tokens. /// Registers or unregisters semantic tokens.
fn enable_sema_token_caps(&mut self, enable: bool) -> anyhow::Result<()> { fn enable_sema_token_caps(&mut self, enable: bool) -> Result<()> {
if !self.const_config().tokens_dynamic_registration { if !self.const_config().tokens_dynamic_registration {
trace!("skip register semantic by config"); trace!("skip register semantic by config");
return Ok(()); return Ok(());
@ -375,7 +373,7 @@ impl LanguageState {
} }
/// Registers or unregisters document formatter. /// Registers or unregisters document formatter.
fn enable_formatter_caps(&mut self, enable: bool) -> anyhow::Result<()> { fn enable_formatter_caps(&mut self, enable: bool) -> Result<()> {
if !self.const_config().doc_fmt_dynamic_registration { if !self.const_config().doc_fmt_dynamic_registration {
trace!("skip dynamic register formatter by config"); trace!("skip dynamic register formatter by config");
return Ok(()); return Ok(());
@ -980,7 +978,7 @@ impl LanguageState {
} }
/// Snapshot the compiler thread for tasks /// Snapshot the compiler thread for tasks
pub fn snapshot(&mut self) -> ZResult<WorldSnapFut> { pub fn snapshot(&mut self) -> Result<WorldSnapFut> {
self.project.snapshot() self.project.snapshot()
} }
@ -990,7 +988,7 @@ impl LanguageState {
} }
/// Snapshot the compiler thread for language queries /// Snapshot the compiler thread for language queries
pub fn query_snapshot(&mut self) -> ZResult<QuerySnapFut> { pub fn query_snapshot(&mut self) -> Result<QuerySnapFut> {
self.project.query_snapshot(None) self.project.query_snapshot(None)
} }
@ -998,7 +996,7 @@ impl LanguageState {
pub fn query_snapshot_with_stat( pub fn query_snapshot_with_stat(
&mut self, &mut self,
q: &CompilerQueryRequest, q: &CompilerQueryRequest,
) -> ZResult<QuerySnapWithStat> { ) -> Result<QuerySnapWithStat> {
let name: &'static str = q.into(); let name: &'static str = q.into();
let path = q.associated_path(); let path = q.associated_path();
let stat = self.project.stats.query_stat(path, name); let stat = self.project.stats.query_stat(path, name);
@ -1116,7 +1114,7 @@ impl LanguageState {
impl LanguageState { impl LanguageState {
/// Restart the primary server. /// Restart the primary server.
pub fn restart_primary(&mut self) -> ZResult<ProjectInsId> { pub fn restart_primary(&mut self) -> Result<ProjectInsId> {
let entry = self.entry_resolver().resolve_default(); let entry = self.entry_resolver().resolve_default();
let config = &self.config; let config = &self.config;
@ -1148,7 +1146,7 @@ impl LanguageState {
&mut self, &mut self,
dedicate: &str, dedicate: &str,
entry: Option<ImmutPath>, entry: Option<ImmutPath>,
) -> ZResult<ProjectInsId> { ) -> Result<ProjectInsId> {
let entry = self.config.compile.entry_resolver.resolve(entry); let entry = self.config.compile.entry_resolver.resolve(entry);
self.project.restart_dedicate(dedicate, entry) self.project.restart_dedicate(dedicate, entry)
} }
@ -1367,10 +1365,10 @@ impl LanguageState {
pub fn query_source<T>( pub fn query_source<T>(
&self, &self,
path: ImmutPath, path: ImmutPath,
f: impl FnOnce(Source) -> anyhow::Result<T>, f: impl FnOnce(Source) -> Result<T>,
) -> anyhow::Result<T> { ) -> Result<T> {
let snapshot = self.memory_changes.get(&path); let snapshot = self.memory_changes.get(&path);
let snapshot = snapshot.ok_or_else(|| anyhow!("file missing {path:?}"))?; let snapshot = snapshot.ok_or_else(|| anyhow::anyhow!("file missing {path:?}"))?;
let source = snapshot.content.clone(); let source = snapshot.content.clone();
f(source) f(source)
} }

View file

@ -555,7 +555,7 @@ pub async fn make_http_server(
} }
/// Entry point of the preview tool. /// Entry point of the preview tool.
pub async fn preview_main(args: PreviewCliArgs) -> anyhow::Result<()> { pub async fn preview_main(args: PreviewCliArgs) -> Result<()> {
log::info!("Arguments: {args:#?}"); log::info!("Arguments: {args:#?}");
let handle = tokio::runtime::Handle::current(); let handle = tokio::runtime::Handle::current();
@ -615,7 +615,7 @@ pub async fn preview_main(args: PreviewCliArgs) -> anyhow::Result<()> {
); );
let registered = preview_state.register(&server.primary.id, previewer.compile_watcher()); let registered = preview_state.register(&server.primary.id, previewer.compile_watcher());
if !registered { if !registered {
anyhow::bail!("failed to register preview"); tinymist_std::bail!("failed to register preview");
} }
let handle = Arc::new(PreviewProjectHandler { let handle = Arc::new(PreviewProjectHandler {

View file

@ -3,13 +3,13 @@
use std::path::Path; use std::path::Path;
use crate::project::*; use crate::project::*;
use prelude::ZResult; use prelude::Result;
trait LockFileExt { trait LockFileExt {
fn declare(&mut self, args: &DocNewArgs) -> Id; fn declare(&mut self, args: &DocNewArgs) -> Id;
fn preview(&mut self, doc_id: Id, args: &TaskPreviewArgs) -> ZResult<Id>; fn preview(&mut self, doc_id: Id, args: &TaskPreviewArgs) -> Result<Id>;
fn compile(&mut self, args: TaskCompileArgs) -> ZResult<Id>; fn compile(&mut self, args: TaskCompileArgs) -> Result<Id>;
fn export(&mut self, doc_id: Id, args: TaskCompileArgs) -> ZResult<Id>; fn export(&mut self, doc_id: Id, args: TaskCompileArgs) -> Result<Id>;
} }
impl LockFileExt for LockFile { impl LockFileExt for LockFile {
@ -56,12 +56,12 @@ impl LockFileExt for LockFile {
id id
} }
fn compile(&mut self, args: TaskCompileArgs) -> ZResult<Id> { fn compile(&mut self, args: TaskCompileArgs) -> Result<Id> {
let id = self.declare(&args.declare); let id = self.declare(&args.declare);
self.export(id, args) self.export(id, args)
} }
fn export(&mut self, doc_id: Id, args: TaskCompileArgs) -> ZResult<Id> { fn export(&mut self, doc_id: Id, args: TaskCompileArgs) -> Result<Id> {
let task = args.to_task(doc_id)?; let task = args.to_task(doc_id)?;
let task_id = task.id().clone(); let task_id = task.id().clone();
@ -70,7 +70,7 @@ impl LockFileExt for LockFile {
Ok(task_id) Ok(task_id)
} }
fn preview(&mut self, doc_id: Id, args: &TaskPreviewArgs) -> ZResult<Id> { fn preview(&mut self, doc_id: Id, args: &TaskPreviewArgs) -> Result<Id> {
let task_id = args let task_id = args
.name .name
.as_ref() .as_ref()
@ -91,7 +91,7 @@ impl LockFileExt for LockFile {
} }
/// Project document commands' main /// Project document commands' main
pub fn project_main(args: DocCommands) -> anyhow::Result<()> { pub fn project_main(args: DocCommands) -> Result<()> {
LockFile::update(Path::new("."), |state| { LockFile::update(Path::new("."), |state| {
match args { match args {
DocCommands::New(args) => { DocCommands::New(args) => {
@ -112,7 +112,7 @@ pub fn project_main(args: DocCommands) -> anyhow::Result<()> {
} }
/// Project task commands' main /// Project task commands' main
pub fn task_main(args: TaskCommands) -> anyhow::Result<()> { pub fn task_main(args: TaskCommands) -> Result<()> {
LockFile::update(Path::new("."), |state| { LockFile::update(Path::new("."), |state| {
match args { match args {
TaskCommands::Compile(args) => { TaskCommands::Compile(args) => {