mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-24 21:19:37 +00:00
feat: resolve projects by lockDatabase (#1142)
* feat: implement lock-based project resolution * feat: first working example
This commit is contained in:
parent
89c178295a
commit
6d1e40d3a9
12 changed files with 460 additions and 65 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -3999,6 +3999,7 @@ dependencies = [
|
||||||
"reflexo",
|
"reflexo",
|
||||||
"reflexo-typst",
|
"reflexo-typst",
|
||||||
"reflexo-vec2svg",
|
"reflexo-vec2svg",
|
||||||
|
"rpds",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ macro_rules! display_possible_values {
|
||||||
Clone,
|
Clone,
|
||||||
Eq,
|
Eq,
|
||||||
PartialEq,
|
PartialEq,
|
||||||
|
Hash,
|
||||||
Ord,
|
Ord,
|
||||||
PartialOrd,
|
PartialOrd,
|
||||||
serde::Serialize,
|
serde::Serialize,
|
||||||
|
|
@ -71,7 +72,9 @@ pub enum OutputFormat {
|
||||||
display_possible_values!(OutputFormat);
|
display_possible_values!(OutputFormat);
|
||||||
|
|
||||||
/// A PDF standard that Typst can enforce conformance with.
|
/// A PDF standard that Typst can enforce conformance with.
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum, serde::Serialize, serde::Deserialize)]
|
#[derive(
|
||||||
|
Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, serde::Serialize, serde::Deserialize,
|
||||||
|
)]
|
||||||
#[allow(non_camel_case_types)]
|
#[allow(non_camel_case_types)]
|
||||||
pub enum PdfStandard {
|
pub enum PdfStandard {
|
||||||
/// PDF 1.7.
|
/// PDF 1.7.
|
||||||
|
|
@ -91,7 +94,7 @@ display_possible_values!(PdfStandard);
|
||||||
/// value parser, in order to generate better errors.
|
/// value parser, in order to generate better errors.
|
||||||
///
|
///
|
||||||
/// See also: <https://github.com/clap-rs/clap/issues/5065>
|
/// See also: <https://github.com/clap-rs/clap/issues/5065>
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
||||||
|
|
||||||
impl FromStr for Pages {
|
impl FromStr for Pages {
|
||||||
|
|
|
||||||
|
|
@ -67,11 +67,20 @@ impl EntryResolver {
|
||||||
|
|
||||||
/// Resolves the entry state.
|
/// Resolves the entry state.
|
||||||
pub fn resolve(&self, entry: Option<ImmutPath>) -> EntryState {
|
pub fn resolve(&self, entry: Option<ImmutPath>) -> EntryState {
|
||||||
|
let root_dir = self.root(entry.as_ref());
|
||||||
|
self.resolve_with_root(root_dir, entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolves the entry state.
|
||||||
|
pub fn resolve_with_root(
|
||||||
|
&self,
|
||||||
|
root_dir: Option<ImmutPath>,
|
||||||
|
entry: Option<ImmutPath>,
|
||||||
|
) -> EntryState {
|
||||||
// todo: formalize untitled path
|
// todo: formalize untitled path
|
||||||
// let is_untitled = entry.as_ref().is_some_and(|p| p.starts_with("/untitled"));
|
// let is_untitled = entry.as_ref().is_some_and(|p| p.starts_with("/untitled"));
|
||||||
// let root_dir = self.determine_root(if is_untitled { None } else {
|
// let root_dir = self.determine_root(if is_untitled { None } else {
|
||||||
// entry.as_ref() });
|
// entry.as_ref() });
|
||||||
let root_dir = self.root(entry.as_ref());
|
|
||||||
|
|
||||||
let entry = match (entry, root_dir) {
|
let entry = match (entry, root_dir) {
|
||||||
// (Some(entry), Some(root)) if is_untitled => Some(EntryState::new_rooted(
|
// (Some(entry), Some(root)) if is_untitled => Some(EntryState::new_rooted(
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,11 @@ use std::{path::Path, sync::Arc};
|
||||||
use ecow::EcoVec;
|
use ecow::EcoVec;
|
||||||
use reflexo_typst::ImmutPath;
|
use reflexo_typst::ImmutPath;
|
||||||
use tinymist_std::path::unix_slash;
|
use tinymist_std::path::unix_slash;
|
||||||
use tinymist_world::EntryReader;
|
use typst::diag::EcoString;
|
||||||
use typst::{diag::EcoString, syntax::FileId};
|
use typst::World;
|
||||||
|
|
||||||
use crate::model::{Id, ProjectInput, ProjectMaterial, ProjectRoute, ProjectTask, ResourcePath};
|
use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath};
|
||||||
use crate::LspWorld;
|
use crate::{LspWorld, ProjectPathMaterial};
|
||||||
|
|
||||||
/// Make a new project lock updater.
|
/// Make a new project lock updater.
|
||||||
pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater {
|
pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater {
|
||||||
|
|
@ -20,7 +20,7 @@ pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater {
|
||||||
enum LockUpdate {
|
enum LockUpdate {
|
||||||
Input(ProjectInput),
|
Input(ProjectInput),
|
||||||
Task(ProjectTask),
|
Task(ProjectTask),
|
||||||
Material(ProjectMaterial),
|
Material(ProjectPathMaterial),
|
||||||
Route(ProjectRoute),
|
Route(ProjectRoute),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -31,17 +31,10 @@ pub struct ProjectLockUpdater {
|
||||||
|
|
||||||
impl ProjectLockUpdater {
|
impl ProjectLockUpdater {
|
||||||
pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> {
|
pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> {
|
||||||
let entry = world.entry_state();
|
let id = Id::from_world(world)?;
|
||||||
log::info!("ProjectCompiler: record compile for {entry:?}");
|
|
||||||
// todo: correct root
|
|
||||||
let root = entry.workspace_root()?;
|
|
||||||
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
|
||||||
log::info!("ProjectCompiler: record compile for id {id} at {root:?}");
|
|
||||||
|
|
||||||
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
|
||||||
let id: Id = path.into();
|
|
||||||
|
|
||||||
let root = ResourcePath::from_user_sys(Path::new("."));
|
let root = ResourcePath::from_user_sys(Path::new("."));
|
||||||
|
let main = ResourcePath::from_user_sys(world.path_for_id(world.main()).ok()?.as_path());
|
||||||
|
|
||||||
let font_resolver = &world.font_resolver;
|
let font_resolver = &world.font_resolver;
|
||||||
let font_paths = font_resolver
|
let font_paths = font_resolver
|
||||||
|
|
@ -67,6 +60,7 @@ impl ProjectLockUpdater {
|
||||||
let input = ProjectInput {
|
let input = ProjectInput {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
root: Some(root),
|
root: Some(root),
|
||||||
|
main: Some(main),
|
||||||
font_paths,
|
font_paths,
|
||||||
system_fonts: true, // !args.font.ignore_system_fonts,
|
system_fonts: true, // !args.font.ignore_system_fonts,
|
||||||
package_path: None,
|
package_path: None,
|
||||||
|
|
@ -82,17 +76,11 @@ impl ProjectLockUpdater {
|
||||||
self.updates.push(LockUpdate::Task(task));
|
self.updates.push(LockUpdate::Task(task));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_materials(&mut self, doc_id: Id, ids: EcoVec<FileId>) {
|
pub fn update_materials(&mut self, doc_id: Id, files: EcoVec<ImmutPath>) {
|
||||||
let mut files = ids
|
self.updates
|
||||||
.into_iter()
|
.push(LockUpdate::Material(ProjectPathMaterial::from_deps(
|
||||||
.map(ResourcePath::from_file_id)
|
doc_id, files,
|
||||||
.collect::<Vec<_>>();
|
)));
|
||||||
files.sort();
|
|
||||||
self.updates.push(LockUpdate::Material(ProjectMaterial {
|
|
||||||
root: EcoString::default(),
|
|
||||||
id: doc_id,
|
|
||||||
files,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn route(&mut self, doc_id: Id, priority: u32) {
|
pub fn route(&mut self, doc_id: Id, priority: u32) {
|
||||||
|
|
@ -115,21 +103,24 @@ impl ProjectLockUpdater {
|
||||||
l.replace_task(task);
|
l.replace_task(task);
|
||||||
}
|
}
|
||||||
LockUpdate::Material(mut mat) => {
|
LockUpdate::Material(mut mat) => {
|
||||||
|
let root: EcoString = unix_slash(&self.root).into();
|
||||||
mat.root = root.clone();
|
mat.root = root.clone();
|
||||||
let cache_dir = dirs::cache_dir();
|
let cache_dir = dirs::cache_dir();
|
||||||
if let Some(cache_dir) = cache_dir {
|
if let Some(cache_dir) = cache_dir {
|
||||||
let id = tinymist_std::hash::hash128(&mat.id);
|
let id = tinymist_std::hash::hash128(&mat.id);
|
||||||
let lower4096 = root_hash & 0xfff;
|
let root_lo = root_hash & 0xfff;
|
||||||
let upper4096 = root_hash >> 12;
|
let root_hi = root_hash >> 12;
|
||||||
|
let id_lo = id & 0xfff;
|
||||||
|
let id_hi = id >> 12;
|
||||||
|
|
||||||
// let hash_str = format!("{root:016x}/{id:016x}");
|
let hash_str =
|
||||||
let hash_str = format!("{lower4096:03x}/{upper4096:013x}/{id:016x}");
|
format!("{root_lo:03x}/{root_hi:013x}/{id_lo:03x}/{id_hi:016x}");
|
||||||
|
|
||||||
let cache_dir = cache_dir.join("tinymist/projects").join(hash_str);
|
let cache_dir = cache_dir.join("tinymist/projects").join(hash_str);
|
||||||
let _ = std::fs::create_dir_all(&cache_dir);
|
let _ = std::fs::create_dir_all(&cache_dir);
|
||||||
|
|
||||||
let data = serde_json::to_string(&mat).unwrap();
|
let data = serde_json::to_string(&mat).unwrap();
|
||||||
let path = cache_dir.join("material.json");
|
let path = cache_dir.join("path-material.json");
|
||||||
let result = tinymist_fs::paths::write_atomic(path, data);
|
let result = tinymist_fs::paths::write_atomic(path, data);
|
||||||
if let Err(err) = result {
|
if let Err(err) = result {
|
||||||
log::error!("ProjectCompiler: write material error: {err}");
|
log::error!("ProjectCompiler: write material error: {err}");
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,25 @@
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
use std::io::{Read, Seek, SeekFrom, Write};
|
use std::io::{Read, Seek, SeekFrom, Write};
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::{cmp::Ordering, path::Path, str::FromStr};
|
use std::{cmp::Ordering, path::Path, str::FromStr};
|
||||||
|
|
||||||
use anyhow::{bail, Context};
|
use anyhow::{bail, Context};
|
||||||
use clap::ValueHint;
|
use clap::ValueHint;
|
||||||
|
use ecow::{eco_vec, EcoVec};
|
||||||
use tinymist_std::path::unix_slash;
|
use tinymist_std::path::unix_slash;
|
||||||
|
use tinymist_std::ImmutPath;
|
||||||
|
use tinymist_world::EntryReader;
|
||||||
use typst::diag::EcoString;
|
use typst::diag::EcoString;
|
||||||
use typst::syntax::FileId;
|
use typst::syntax::FileId;
|
||||||
|
|
||||||
pub use anyhow::Result;
|
pub use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::LspWorld;
|
||||||
|
|
||||||
use super::{Pages, PdfStandard, TaskWhen};
|
use super::{Pages, PdfStandard, TaskWhen};
|
||||||
|
|
||||||
const LOCKFILE_PATH: &str = "tinymist.lock";
|
pub const LOCK_FILENAME: &str = "tinymist.lock";
|
||||||
|
|
||||||
const LOCK_VERSION: &str = "0.1.0-beta0";
|
const LOCK_VERSION: &str = "0.1.0-beta0";
|
||||||
|
|
||||||
|
|
@ -51,7 +58,7 @@ impl LockFileCompat {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
pub struct LockFile {
|
pub struct LockFile {
|
||||||
// The lock file version.
|
// The lock file version.
|
||||||
// version: String,
|
// version: String,
|
||||||
|
|
@ -62,11 +69,15 @@ pub struct LockFile {
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||||
pub task: Vec<ProjectTask>,
|
pub task: Vec<ProjectTask>,
|
||||||
/// The project's task route.
|
/// The project's task route.
|
||||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
#[serde(skip_serializing_if = "EcoVec::is_empty", default)]
|
||||||
pub route: Vec<ProjectRoute>,
|
pub route: EcoVec<ProjectRoute>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LockFile {
|
impl LockFile {
|
||||||
|
pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> {
|
||||||
|
self.document.iter().find(|i| &i.id == id)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn replace_document(&mut self, input: ProjectInput) {
|
pub fn replace_document(&mut self, input: ProjectInput) {
|
||||||
let id = input.id.clone();
|
let id = input.id.clone();
|
||||||
let index = self.document.iter().position(|i| i.id == id);
|
let index = self.document.iter().position(|i| i.id == id);
|
||||||
|
|
@ -176,7 +187,7 @@ impl LockFile {
|
||||||
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
|
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
|
||||||
let fs = tinymist_fs::flock::Filesystem::new(cwd.to_owned());
|
let fs = tinymist_fs::flock::Filesystem::new(cwd.to_owned());
|
||||||
|
|
||||||
let mut lock_file = fs.open_rw_exclusive_create(LOCKFILE_PATH, "project commands")?;
|
let mut lock_file = fs.open_rw_exclusive_create(LOCK_FILENAME, "project commands")?;
|
||||||
|
|
||||||
let mut data = vec![];
|
let mut data = vec![];
|
||||||
lock_file.read_to_end(&mut data)?;
|
lock_file.read_to_end(&mut data)?;
|
||||||
|
|
@ -188,7 +199,7 @@ impl LockFile {
|
||||||
LockFile {
|
LockFile {
|
||||||
document: vec![],
|
document: vec![],
|
||||||
task: vec![],
|
task: vec![],
|
||||||
route: vec![],
|
route: eco_vec![],
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let old_state = toml::from_str::<LockFileCompat>(old_data)
|
let old_state = toml::from_str::<LockFileCompat>(old_data)
|
||||||
|
|
@ -235,6 +246,64 @@ impl LockFile {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn read(dir: &Path) -> Result<Self> {
|
||||||
|
let fs = tinymist_fs::flock::Filesystem::new(dir.to_owned());
|
||||||
|
|
||||||
|
let mut lock_file = fs.open_ro_shared(LOCK_FILENAME, "project commands")?;
|
||||||
|
|
||||||
|
let mut data = vec![];
|
||||||
|
lock_file.read_to_end(&mut data)?;
|
||||||
|
|
||||||
|
let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
|
||||||
|
|
||||||
|
let state = toml::from_str::<LockFileCompat>(data)
|
||||||
|
.context("tinymist.lock file is not a valid TOML file")?;
|
||||||
|
|
||||||
|
state.migrate()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A scalar that is not NaN.
|
||||||
|
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||||
|
pub struct Scalar(f32);
|
||||||
|
|
||||||
|
impl TryFrom<f32> for Scalar {
|
||||||
|
type Error = &'static str;
|
||||||
|
|
||||||
|
fn try_from(value: f32) -> Result<Self, Self::Error> {
|
||||||
|
if value.is_nan() {
|
||||||
|
Err("NaN is not a valid scalar value")
|
||||||
|
} else {
|
||||||
|
Ok(Scalar(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Scalar {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.0 == other.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Scalar {}
|
||||||
|
|
||||||
|
impl Hash for Scalar {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
self.0.to_bits().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Scalar {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Scalar {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.0.partial_cmp(&other.0).unwrap()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project ID.
|
/// A project ID.
|
||||||
|
|
@ -248,6 +317,14 @@ impl Id {
|
||||||
pub fn new(s: String) -> Self {
|
pub fn new(s: String) -> Self {
|
||||||
Id(s)
|
Id(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn from_world(world: &LspWorld) -> Option<Self> {
|
||||||
|
let entry = world.entry_state();
|
||||||
|
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
||||||
|
|
||||||
|
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
||||||
|
Some(path.into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Id {
|
impl fmt::Display for Id {
|
||||||
|
|
@ -286,7 +363,7 @@ impl From<&DocIdArgs> for Id {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A resource path.
|
/// A resource path.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||||
pub struct ResourcePath(EcoString, String);
|
pub struct ResourcePath(EcoString, String);
|
||||||
|
|
||||||
impl fmt::Display for ResourcePath {
|
impl fmt::Display for ResourcePath {
|
||||||
|
|
@ -354,10 +431,23 @@ impl ResourcePath {
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_abs_path(&self, rel: &Path) -> Option<PathBuf> {
|
||||||
|
if self.0 == "file" {
|
||||||
|
let path = Path::new(&self.1);
|
||||||
|
if path.is_absolute() {
|
||||||
|
Some(path.to_owned())
|
||||||
|
} else {
|
||||||
|
Some(rel.join(path))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project input specifier.
|
/// A project input specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ProjectInput {
|
pub struct ProjectInput {
|
||||||
/// The project's ID.
|
/// The project's ID.
|
||||||
|
|
@ -365,6 +455,9 @@ pub struct ProjectInput {
|
||||||
/// The project's root directory.
|
/// The project's root directory.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub root: Option<ResourcePath>,
|
pub root: Option<ResourcePath>,
|
||||||
|
/// The project's main file.
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub main: Option<ResourcePath>,
|
||||||
/// The project's font paths.
|
/// The project's font paths.
|
||||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
pub font_paths: Vec<ResourcePath>,
|
pub font_paths: Vec<ResourcePath>,
|
||||||
|
|
@ -380,7 +473,7 @@ pub struct ProjectInput {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project task specifier.
|
/// A project task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||||
pub enum ProjectTask {
|
pub enum ProjectTask {
|
||||||
/// A preview task.
|
/// A preview task.
|
||||||
|
|
@ -433,7 +526,7 @@ impl ProjectTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An lsp task specifier.
|
/// An lsp task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct PreviewTask {
|
pub struct PreviewTask {
|
||||||
/// The task's ID.
|
/// The task's ID.
|
||||||
|
|
@ -445,7 +538,7 @@ pub struct PreviewTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export task specifier.
|
/// An export task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportTask {
|
pub struct ExportTask {
|
||||||
/// The task's ID.
|
/// The task's ID.
|
||||||
|
|
@ -460,7 +553,7 @@ pub struct ExportTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project export transform specifier.
|
/// A project export transform specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub enum ExportTransform {
|
pub enum ExportTransform {
|
||||||
/// Only pick a subset of pages.
|
/// Only pick a subset of pages.
|
||||||
|
|
@ -468,7 +561,7 @@ pub enum ExportTransform {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export pdf task specifier.
|
/// An export pdf task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportPdfTask {
|
pub struct ExportPdfTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
|
|
@ -480,18 +573,18 @@ pub struct ExportPdfTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export png task specifier.
|
/// An export png task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportPngTask {
|
pub struct ExportPngTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub export: ExportTask,
|
pub export: ExportTask,
|
||||||
/// The PPI (pixels per inch) to use for PNG export.
|
/// The PPI (pixels per inch) to use for PNG export.
|
||||||
pub ppi: f32,
|
pub ppi: Scalar,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export svg task specifier.
|
/// An export svg task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportSvgTask {
|
pub struct ExportSvgTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
|
|
@ -500,7 +593,7 @@ pub struct ExportSvgTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export html task specifier.
|
/// An export html task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportHtmlTask {
|
pub struct ExportHtmlTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
|
|
@ -509,7 +602,7 @@ pub struct ExportHtmlTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export markdown task specifier.
|
/// An export markdown task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportMarkdownTask {
|
pub struct ExportMarkdownTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
|
|
@ -518,7 +611,7 @@ pub struct ExportMarkdownTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An export text task specifier.
|
/// An export text task specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ExportTextTask {
|
pub struct ExportTextTask {
|
||||||
/// The shared export arguments
|
/// The shared export arguments
|
||||||
|
|
@ -527,7 +620,7 @@ pub struct ExportTextTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project route specifier.
|
/// A project route specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ProjectMaterial {
|
pub struct ProjectMaterial {
|
||||||
/// The root of the project that the material belongs to.
|
/// The root of the project that the material belongs to.
|
||||||
|
|
@ -539,12 +632,37 @@ pub struct ProjectMaterial {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A project route specifier.
|
/// A project route specifier.
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub struct ProjectPathMaterial {
|
||||||
|
/// The root of the project that the material belongs to.
|
||||||
|
pub root: EcoString,
|
||||||
|
/// A project.
|
||||||
|
pub id: Id,
|
||||||
|
/// The files.
|
||||||
|
pub files: Vec<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProjectPathMaterial {
|
||||||
|
pub fn from_deps(doc_id: Id, files: EcoVec<ImmutPath>) -> Self {
|
||||||
|
let mut files: Vec<_> = files.into_iter().map(|p| p.as_ref().to_owned()).collect();
|
||||||
|
files.sort();
|
||||||
|
|
||||||
|
ProjectPathMaterial {
|
||||||
|
root: EcoString::default(),
|
||||||
|
id: doc_id,
|
||||||
|
files,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A project route specifier.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub struct ProjectRoute {
|
pub struct ProjectRoute {
|
||||||
/// A project.
|
/// A project.
|
||||||
pub id: Id,
|
pub id: Id,
|
||||||
/// The priority of the project.
|
/// The priority of the project. (lower numbers are higher priority).
|
||||||
pub priority: u32,
|
pub priority: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ pub mod signature;
|
||||||
pub use signature::*;
|
pub use signature::*;
|
||||||
pub mod semantic_tokens;
|
pub mod semantic_tokens;
|
||||||
pub use semantic_tokens::*;
|
pub use semantic_tokens::*;
|
||||||
|
use tinymist_std::ImmutPath;
|
||||||
use tinymist_world::vfs::WorkspaceResolver;
|
use tinymist_world::vfs::WorkspaceResolver;
|
||||||
use tinymist_world::WorldDeps;
|
use tinymist_world::WorldDeps;
|
||||||
use typst::syntax::Source;
|
use typst::syntax::Source;
|
||||||
|
|
@ -76,6 +77,10 @@ pub trait LspWorldExt {
|
||||||
/// Get all depended file ids of a compilation, inclusively.
|
/// Get all depended file ids of a compilation, inclusively.
|
||||||
/// Note: must be called after compilation.
|
/// Note: must be called after compilation.
|
||||||
fn depended_files(&self) -> EcoVec<FileId>;
|
fn depended_files(&self) -> EcoVec<FileId>;
|
||||||
|
|
||||||
|
/// Get all depended paths in file system of a compilation, inclusively.
|
||||||
|
/// Note: must be called after compilation.
|
||||||
|
fn depended_fs_paths(&self) -> EcoVec<ImmutPath>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LspWorldExt for tinymist_project::LspWorld {
|
impl LspWorldExt for tinymist_project::LspWorld {
|
||||||
|
|
@ -109,6 +114,16 @@ impl LspWorldExt for tinymist_project::LspWorld {
|
||||||
});
|
});
|
||||||
deps
|
deps
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn depended_fs_paths(&self) -> EcoVec<ImmutPath> {
|
||||||
|
let mut deps = EcoVec::new();
|
||||||
|
self.iter_dependencies(&mut |file_id| {
|
||||||
|
if let Ok(path) = self.path_for_id(file_id) {
|
||||||
|
deps.push(path.as_path().into());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
deps
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
||||||
|
|
@ -52,6 +52,7 @@ rayon.workspace = true
|
||||||
reflexo.workspace = true
|
reflexo.workspace = true
|
||||||
reflexo-typst = { workspace = true, features = ["system"] }
|
reflexo-typst = { workspace = true, features = ["system"] }
|
||||||
reflexo-vec2svg.workspace = true
|
reflexo-vec2svg.workspace = true
|
||||||
|
rpds.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
serde_yaml.workspace = true
|
serde_yaml.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ mod cmd;
|
||||||
mod init;
|
mod init;
|
||||||
pub mod project;
|
pub mod project;
|
||||||
mod resource;
|
mod resource;
|
||||||
|
mod route;
|
||||||
mod server;
|
mod server;
|
||||||
mod stats;
|
mod stats;
|
||||||
mod task;
|
mod task;
|
||||||
|
|
|
||||||
197
crates/tinymist/src/route.rs
Normal file
197
crates/tinymist/src/route.rs
Normal file
|
|
@ -0,0 +1,197 @@
|
||||||
|
use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
|
use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash};
|
||||||
|
use rpds::RedBlackTreeMapSync;
|
||||||
|
use tinymist_project::{
|
||||||
|
CompileSnapshot, Id, LockFile, LspCompilerFeat, ProjectPathMaterial, ProjectRoute,
|
||||||
|
};
|
||||||
|
use tinymist_query::LspWorldExt;
|
||||||
|
use tinymist_std::{hash::FxHashMap, ImmutPath};
|
||||||
|
use typst::diag::EcoString;
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ProjectRouteState {
|
||||||
|
path_routes: FxHashMap<ImmutPath, RoutePathState>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ProjectResolution {
|
||||||
|
pub lock_dir: ImmutPath,
|
||||||
|
pub project_id: Id,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ProjectRouteState {
|
||||||
|
pub fn locate(&self, resolved: &ProjectResolution) -> Option<Arc<LockFile>> {
|
||||||
|
let path_route = self.path_routes.get(&resolved.lock_dir)?;
|
||||||
|
Some(path_route.lock.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve(&mut self, leaf: &ImmutPath) -> Option<ProjectResolution> {
|
||||||
|
for path in std::iter::successors(Some(leaf.as_ref()), |p| p.parent()) {
|
||||||
|
if let Some(resolution) = self.resolve_at(path, leaf) {
|
||||||
|
return Some(resolution);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_at(&mut self, lock_dir: &Path, leaf: &Path) -> Option<ProjectResolution> {
|
||||||
|
log::debug!("resolve: {leaf:?} at {lock_dir:?}");
|
||||||
|
let (lock_dir, project_id) = match self.path_routes.get_key_value(lock_dir) {
|
||||||
|
Some((key, path_route)) => (key.clone(), path_route.routes.get(leaf)?.clone()),
|
||||||
|
None => {
|
||||||
|
let lock_dir: ImmutPath = lock_dir.into();
|
||||||
|
let mut new_route = self.load_lock(&lock_dir).unwrap_or_default();
|
||||||
|
|
||||||
|
let mut materials = RedBlackTreeMapSync::default();
|
||||||
|
|
||||||
|
if let Some(cache_dir) = new_route.cache_dir.as_ref() {
|
||||||
|
let entries = walkdir::WalkDir::new(cache_dir)
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|entry| entry.ok())
|
||||||
|
.filter(|entry| entry.file_type().is_file());
|
||||||
|
|
||||||
|
for entry in entries {
|
||||||
|
let material = self.read_material(entry.path());
|
||||||
|
if let Some(material) = material {
|
||||||
|
let id = material.id.clone();
|
||||||
|
materials.insert_mut(id.clone(), material);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let materials = LazyHash::new(materials);
|
||||||
|
new_route.routes = calculate_routes(new_route.lock.route.clone(), &materials);
|
||||||
|
new_route.materials = materials;
|
||||||
|
|
||||||
|
log::debug!("loaded routes at {lock_dir:?}, {:?}", new_route.routes);
|
||||||
|
let project_id = new_route.routes.get(leaf)?.clone();
|
||||||
|
|
||||||
|
self.path_routes.insert(lock_dir.clone(), new_route);
|
||||||
|
(lock_dir, project_id)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(ProjectResolution {
|
||||||
|
lock_dir,
|
||||||
|
project_id,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_lock(&mut self, lock_dir: ImmutPath, lock: LockFile) -> Option<()> {
|
||||||
|
let path_route = self.path_routes.get_mut(&lock_dir)?;
|
||||||
|
|
||||||
|
let lock_unchanged = path_route.lock.as_ref() == &lock;
|
||||||
|
if lock_unchanged {
|
||||||
|
return Some(());
|
||||||
|
}
|
||||||
|
|
||||||
|
path_route.lock = Arc::new(lock);
|
||||||
|
path_route.routes = calculate_routes(path_route.lock.route.clone(), &path_route.materials);
|
||||||
|
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_existing_material(
|
||||||
|
&mut self,
|
||||||
|
lock_dir: ImmutPath,
|
||||||
|
snap: &CompileSnapshot<LspCompilerFeat>,
|
||||||
|
) -> Option<()> {
|
||||||
|
let path_route = self.path_routes.get_mut(&lock_dir)?;
|
||||||
|
|
||||||
|
let id = Id::from_world(&snap.world)?;
|
||||||
|
let deps = snap.world.depended_fs_paths();
|
||||||
|
let material = ProjectPathMaterial::from_deps(id, deps);
|
||||||
|
|
||||||
|
let old = path_route.materials.get_mut(&material.id)?;
|
||||||
|
if old == &material {
|
||||||
|
return Some(());
|
||||||
|
}
|
||||||
|
|
||||||
|
path_route
|
||||||
|
.materials
|
||||||
|
.insert_mut(material.id.clone(), material);
|
||||||
|
path_route.routes = calculate_routes(path_route.lock.route.clone(), &path_route.materials);
|
||||||
|
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_lock(&self, path: &Path) -> Option<RoutePathState> {
|
||||||
|
let lock_data = Arc::new(match LockFile::read(path) {
|
||||||
|
Ok(lock) => lock,
|
||||||
|
Err(e) => {
|
||||||
|
log::debug!("failed to load lock at {path:?}: {e:?}");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
log::info!("loaded lock at {path:?}");
|
||||||
|
|
||||||
|
let root: EcoString = unix_slash(path).into();
|
||||||
|
let root_hash = tinymist_std::hash::hash128(&root);
|
||||||
|
let cache_dir_base = dirs::cache_dir();
|
||||||
|
let mut cache_dir = None;
|
||||||
|
if let Some(cache_dir_base) = cache_dir_base {
|
||||||
|
let root_lo = root_hash & 0xfff;
|
||||||
|
let root_hi = root_hash >> 12;
|
||||||
|
|
||||||
|
// let hash_str = format!("{root:016x}/{id:016x}");
|
||||||
|
let project_state = format!("{root_lo:03x}/{root_hi:013x}");
|
||||||
|
|
||||||
|
cache_dir = Some(
|
||||||
|
cache_dir_base
|
||||||
|
.join("tinymist/projects")
|
||||||
|
.join(project_state)
|
||||||
|
.into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(RoutePathState {
|
||||||
|
lock: lock_data,
|
||||||
|
materials: LazyHash::default(),
|
||||||
|
routes: Arc::new(FxHashMap::default()),
|
||||||
|
cache_dir,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_material(&self, entry_path: &Path) -> Option<ProjectPathMaterial> {
|
||||||
|
log::info!("check material at {entry_path:?}");
|
||||||
|
let name = entry_path.file_name().unwrap_or(entry_path.as_os_str());
|
||||||
|
if name != "path-material.json" {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = std::fs::read(entry_path).ok()?;
|
||||||
|
|
||||||
|
let material = serde_json::from_slice::<ProjectPathMaterial>(&data).ok()?;
|
||||||
|
Some(material)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[comemo::memoize]
|
||||||
|
fn calculate_routes(
|
||||||
|
raw_routes: EcoVec<ProjectRoute>,
|
||||||
|
materials: &LazyHash<rpds::RedBlackTreeMapSync<Id, ProjectPathMaterial>>,
|
||||||
|
) -> Arc<FxHashMap<ImmutPath, Id>> {
|
||||||
|
let mut routes = FxHashMap::default();
|
||||||
|
|
||||||
|
let mut priorities = FxHashMap::default();
|
||||||
|
|
||||||
|
for route in raw_routes.iter() {
|
||||||
|
if let Some(material) = materials.get(&route.id) {
|
||||||
|
for file in material.files.iter() {
|
||||||
|
routes.insert(file.as_path().into(), route.id.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
priorities.insert(route.id.clone(), route.priority);
|
||||||
|
}
|
||||||
|
|
||||||
|
Arc::new(routes)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct RoutePathState {
|
||||||
|
lock: Arc<LockFile>,
|
||||||
|
materials: LazyHash<rpds::RedBlackTreeMapSync<Id, ProjectPathMaterial>>,
|
||||||
|
routes: Arc<FxHashMap<ImmutPath, Id>>,
|
||||||
|
cache_dir: Option<ImmutPath>,
|
||||||
|
}
|
||||||
|
|
@ -19,13 +19,14 @@ use project::{watch_deps, LspPreviewState};
|
||||||
use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut};
|
use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut};
|
||||||
use reflexo_typst::Bytes;
|
use reflexo_typst::Bytes;
|
||||||
use request::{RegisterCapability, UnregisterCapability};
|
use request::{RegisterCapability, UnregisterCapability};
|
||||||
|
use route::{ProjectResolution, ProjectRouteState};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value as JsonValue};
|
use serde_json::{Map, Value as JsonValue};
|
||||||
use sync_lsp::*;
|
use sync_lsp::*;
|
||||||
use task::{
|
use task::{
|
||||||
ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask,
|
ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask,
|
||||||
};
|
};
|
||||||
use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId};
|
use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId, ProjectResolutionKind};
|
||||||
use tinymist_query::analysis::{Analysis, PeriscopeProvider};
|
use tinymist_query::analysis::{Analysis, PeriscopeProvider};
|
||||||
use tinymist_query::{
|
use tinymist_query::{
|
||||||
to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature,
|
to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature,
|
||||||
|
|
@ -66,6 +67,8 @@ fn as_path_pos(inp: TextDocumentPositionParams) -> (PathBuf, Position) {
|
||||||
pub struct LanguageState {
|
pub struct LanguageState {
|
||||||
/// The lsp client
|
/// The lsp client
|
||||||
pub client: TypedLspClient<Self>,
|
pub client: TypedLspClient<Self>,
|
||||||
|
/// The lcok state.
|
||||||
|
pub route: ProjectRouteState,
|
||||||
/// The project state.
|
/// The project state.
|
||||||
pub project: Project,
|
pub project: Project,
|
||||||
|
|
||||||
|
|
@ -127,6 +130,7 @@ impl LanguageState {
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
|
route: ProjectRouteState::default(),
|
||||||
project: handle,
|
project: handle,
|
||||||
editor_tx,
|
editor_tx,
|
||||||
memory_changes: HashMap::new(),
|
memory_changes: HashMap::new(),
|
||||||
|
|
@ -925,6 +929,56 @@ impl LanguageState {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn resolve_task(&self, path: ImmutPath) -> TaskInputs {
|
||||||
|
let entry = self.entry_resolver().resolve(Some(path));
|
||||||
|
|
||||||
|
TaskInputs {
|
||||||
|
entry: Some(entry),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_task_with_state(&mut self, path: ImmutPath) -> TaskInputs {
|
||||||
|
let proj_input = matches!(
|
||||||
|
self.config.project_resolution,
|
||||||
|
ProjectResolutionKind::LockDatabase
|
||||||
|
)
|
||||||
|
.then(|| {
|
||||||
|
let resolution = self.route.resolve(&path)?;
|
||||||
|
let lock = self.route.locate(&resolution)?;
|
||||||
|
|
||||||
|
let ProjectResolution {
|
||||||
|
lock_dir,
|
||||||
|
project_id,
|
||||||
|
} = &resolution;
|
||||||
|
|
||||||
|
let input = lock.get_document(project_id)?;
|
||||||
|
let root = input
|
||||||
|
.root
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|res| Some(res.to_abs_path(lock_dir)?.as_path().into()))
|
||||||
|
.unwrap_or_else(|| lock_dir.clone());
|
||||||
|
let main = input
|
||||||
|
.main
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|main| Some(main.to_abs_path(lock_dir)?.as_path().into()))
|
||||||
|
.unwrap_or_else(|| path.clone());
|
||||||
|
let entry = self
|
||||||
|
.entry_resolver()
|
||||||
|
.resolve_with_root(Some(root), Some(main));
|
||||||
|
log::info!("resolved task with state: {path:?} -> {project_id:?} -> {entry:?}");
|
||||||
|
|
||||||
|
Some(TaskInputs {
|
||||||
|
entry: Some(entry),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
proj_input
|
||||||
|
.flatten()
|
||||||
|
.unwrap_or_else(|| self.resolve_task(path))
|
||||||
|
}
|
||||||
|
|
||||||
/// Snapshot the compiler thread for tasks
|
/// Snapshot the compiler thread for tasks
|
||||||
pub fn snapshot(&mut self) -> ZResult<WorldSnapFut> {
|
pub fn snapshot(&mut self) -> ZResult<WorldSnapFut> {
|
||||||
self.project.snapshot()
|
self.project.snapshot()
|
||||||
|
|
@ -1016,7 +1070,7 @@ impl LanguageState {
|
||||||
let world = snap.world.clone();
|
let world = snap.world.clone();
|
||||||
let doc_id = updater.compiled(&world)?;
|
let doc_id = updater.compiled(&world)?;
|
||||||
|
|
||||||
updater.update_materials(doc_id.clone(), snap.world.depended_files());
|
updater.update_materials(doc_id.clone(), snap.world.depended_fs_paths());
|
||||||
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
|
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
|
||||||
|
|
||||||
updater.commit();
|
updater.commit();
|
||||||
|
|
@ -1359,22 +1413,24 @@ impl LanguageState {
|
||||||
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
||||||
|
|
||||||
let fut_stat = self.query_snapshot_with_stat(&query)?;
|
let fut_stat = self.query_snapshot_with_stat(&query)?;
|
||||||
let entry = query
|
let input = query
|
||||||
.associated_path()
|
.associated_path()
|
||||||
.map(|path| self.entry_resolver().resolve(Some(path.into())))
|
.map(|path| self.resolve_task_with_state(path.into()))
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
let root = self.entry_resolver().root(None)?;
|
let root = self.entry_resolver().root(None)?;
|
||||||
Some(EntryState::new_rooted_by_id(root, *DETACHED_ENTRY))
|
Some(TaskInputs {
|
||||||
|
entry: Some(EntryState::new_rooted_by_id(root, *DETACHED_ENTRY)),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
just_future(async move {
|
just_future(async move {
|
||||||
let mut snap = fut_stat.fut.receive().await?;
|
let mut snap = fut_stat.fut.receive().await?;
|
||||||
// todo: whether it is safe to inherit success_doc with changed entry
|
// todo: whether it is safe to inherit success_doc with changed entry
|
||||||
if !is_pinning {
|
if !is_pinning {
|
||||||
snap = snap.task(TaskInputs {
|
if let Some(input) = input {
|
||||||
entry,
|
snap = snap.task(input);
|
||||||
..Default::default()
|
}
|
||||||
});
|
|
||||||
}
|
}
|
||||||
fut_stat.stat.snap();
|
fut_stat.stat.snap();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -252,6 +252,7 @@ impl ExportConfig {
|
||||||
let _ = page;
|
let _ = page;
|
||||||
|
|
||||||
let ppi = ppi.unwrap_or(144.) as f32;
|
let ppi = ppi.unwrap_or(144.) as f32;
|
||||||
|
let ppi = ppi.try_into().unwrap();
|
||||||
ProjectTask::ExportPng(ExportPngTask { export, ppi })
|
ProjectTask::ExportPng(ExportPngTask { export, ppi })
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -23,6 +23,7 @@ impl LockFileExt for LockFile {
|
||||||
.root
|
.root
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|root| ResourcePath::from_user_sys(Path::new(root)));
|
.map(|root| ResourcePath::from_user_sys(Path::new(root)));
|
||||||
|
let main = ResourcePath::from_user_sys(Path::new(&args.id.input));
|
||||||
|
|
||||||
let font_paths = args
|
let font_paths = args
|
||||||
.font
|
.font
|
||||||
|
|
@ -46,6 +47,7 @@ impl LockFileExt for LockFile {
|
||||||
let input = ProjectInput {
|
let input = ProjectInput {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
root,
|
root,
|
||||||
|
main: Some(main),
|
||||||
font_paths,
|
font_paths,
|
||||||
system_fonts: !args.font.ignore_system_fonts,
|
system_fonts: !args.font.ignore_system_fonts,
|
||||||
package_path,
|
package_path,
|
||||||
|
|
@ -106,7 +108,7 @@ impl LockFileExt for LockFile {
|
||||||
}),
|
}),
|
||||||
OutputFormat::Png => ProjectTask::ExportPng(ExportPngTask {
|
OutputFormat::Png => ProjectTask::ExportPng(ExportPngTask {
|
||||||
export,
|
export,
|
||||||
ppi: args.ppi,
|
ppi: args.ppi.try_into().unwrap(),
|
||||||
}),
|
}),
|
||||||
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||||
OutputFormat::Html => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
OutputFormat::Html => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue