feat: resolve projects by lockDatabase (#1142)

* feat: implement lock-based project resolution

* feat: first working example
This commit is contained in:
Myriad-Dreamin 2025-01-20 14:51:09 +08:00 committed by GitHub
parent 89c178295a
commit 6d1e40d3a9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 460 additions and 65 deletions

View file

@ -24,6 +24,7 @@ macro_rules! display_possible_values {
Clone,
Eq,
PartialEq,
Hash,
Ord,
PartialOrd,
serde::Serialize,
@ -71,7 +72,9 @@ pub enum OutputFormat {
display_possible_values!(OutputFormat);
/// A PDF standard that Typst can enforce conformance with.
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum, serde::Serialize, serde::Deserialize)]
#[derive(
Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, serde::Serialize, serde::Deserialize,
)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.7.
@ -91,7 +94,7 @@ display_possible_values!(PdfStandard);
/// value parser, in order to generate better errors.
///
/// See also: <https://github.com/clap-rs/clap/issues/5065>
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
impl FromStr for Pages {

View file

@ -67,11 +67,20 @@ impl EntryResolver {
/// Resolves the entry state.
pub fn resolve(&self, entry: Option<ImmutPath>) -> EntryState {
let root_dir = self.root(entry.as_ref());
self.resolve_with_root(root_dir, entry)
}
/// Resolves the entry state.
pub fn resolve_with_root(
&self,
root_dir: Option<ImmutPath>,
entry: Option<ImmutPath>,
) -> EntryState {
// todo: formalize untitled path
// let is_untitled = entry.as_ref().is_some_and(|p| p.starts_with("/untitled"));
// let root_dir = self.determine_root(if is_untitled { None } else {
// entry.as_ref() });
let root_dir = self.root(entry.as_ref());
let entry = match (entry, root_dir) {
// (Some(entry), Some(root)) if is_untitled => Some(EntryState::new_rooted(

View file

@ -3,11 +3,11 @@ use std::{path::Path, sync::Arc};
use ecow::EcoVec;
use reflexo_typst::ImmutPath;
use tinymist_std::path::unix_slash;
use tinymist_world::EntryReader;
use typst::{diag::EcoString, syntax::FileId};
use typst::diag::EcoString;
use typst::World;
use crate::model::{Id, ProjectInput, ProjectMaterial, ProjectRoute, ProjectTask, ResourcePath};
use crate::LspWorld;
use crate::model::{Id, ProjectInput, ProjectRoute, ProjectTask, ResourcePath};
use crate::{LspWorld, ProjectPathMaterial};
/// Make a new project lock updater.
pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater {
@ -20,7 +20,7 @@ pub fn update_lock(root: ImmutPath) -> ProjectLockUpdater {
enum LockUpdate {
Input(ProjectInput),
Task(ProjectTask),
Material(ProjectMaterial),
Material(ProjectPathMaterial),
Route(ProjectRoute),
}
@ -31,17 +31,10 @@ pub struct ProjectLockUpdater {
impl ProjectLockUpdater {
pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> {
let entry = world.entry_state();
log::info!("ProjectCompiler: record compile for {entry:?}");
// todo: correct root
let root = entry.workspace_root()?;
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
log::info!("ProjectCompiler: record compile for id {id} at {root:?}");
let path = &ResourcePath::from_user_sys(Path::new(&id));
let id: Id = path.into();
let id = Id::from_world(world)?;
let root = ResourcePath::from_user_sys(Path::new("."));
let main = ResourcePath::from_user_sys(world.path_for_id(world.main()).ok()?.as_path());
let font_resolver = &world.font_resolver;
let font_paths = font_resolver
@ -67,6 +60,7 @@ impl ProjectLockUpdater {
let input = ProjectInput {
id: id.clone(),
root: Some(root),
main: Some(main),
font_paths,
system_fonts: true, // !args.font.ignore_system_fonts,
package_path: None,
@ -82,17 +76,11 @@ impl ProjectLockUpdater {
self.updates.push(LockUpdate::Task(task));
}
pub fn update_materials(&mut self, doc_id: Id, ids: EcoVec<FileId>) {
let mut files = ids
.into_iter()
.map(ResourcePath::from_file_id)
.collect::<Vec<_>>();
files.sort();
self.updates.push(LockUpdate::Material(ProjectMaterial {
root: EcoString::default(),
id: doc_id,
files,
}));
pub fn update_materials(&mut self, doc_id: Id, files: EcoVec<ImmutPath>) {
self.updates
.push(LockUpdate::Material(ProjectPathMaterial::from_deps(
doc_id, files,
)));
}
pub fn route(&mut self, doc_id: Id, priority: u32) {
@ -115,21 +103,24 @@ impl ProjectLockUpdater {
l.replace_task(task);
}
LockUpdate::Material(mut mat) => {
let root: EcoString = unix_slash(&self.root).into();
mat.root = root.clone();
let cache_dir = dirs::cache_dir();
if let Some(cache_dir) = cache_dir {
let id = tinymist_std::hash::hash128(&mat.id);
let lower4096 = root_hash & 0xfff;
let upper4096 = root_hash >> 12;
let root_lo = root_hash & 0xfff;
let root_hi = root_hash >> 12;
let id_lo = id & 0xfff;
let id_hi = id >> 12;
// let hash_str = format!("{root:016x}/{id:016x}");
let hash_str = format!("{lower4096:03x}/{upper4096:013x}/{id:016x}");
let hash_str =
format!("{root_lo:03x}/{root_hi:013x}/{id_lo:03x}/{id_hi:016x}");
let cache_dir = cache_dir.join("tinymist/projects").join(hash_str);
let _ = std::fs::create_dir_all(&cache_dir);
let data = serde_json::to_string(&mat).unwrap();
let path = cache_dir.join("material.json");
let path = cache_dir.join("path-material.json");
let result = tinymist_fs::paths::write_atomic(path, data);
if let Err(err) = result {
log::error!("ProjectCompiler: write material error: {err}");

View file

@ -1,18 +1,25 @@
use core::fmt;
use std::hash::{Hash, Hasher};
use std::io::{Read, Seek, SeekFrom, Write};
use std::path::PathBuf;
use std::{cmp::Ordering, path::Path, str::FromStr};
use anyhow::{bail, Context};
use clap::ValueHint;
use ecow::{eco_vec, EcoVec};
use tinymist_std::path::unix_slash;
use tinymist_std::ImmutPath;
use tinymist_world::EntryReader;
use typst::diag::EcoString;
use typst::syntax::FileId;
pub use anyhow::Result;
use crate::LspWorld;
use super::{Pages, PdfStandard, TaskWhen};
const LOCKFILE_PATH: &str = "tinymist.lock";
pub const LOCK_FILENAME: &str = "tinymist.lock";
const LOCK_VERSION: &str = "0.1.0-beta0";
@ -51,7 +58,7 @@ impl LockFileCompat {
}
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct LockFile {
// The lock file version.
// version: String,
@ -62,11 +69,15 @@ pub struct LockFile {
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub task: Vec<ProjectTask>,
/// The project's task route.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub route: Vec<ProjectRoute>,
#[serde(skip_serializing_if = "EcoVec::is_empty", default)]
pub route: EcoVec<ProjectRoute>,
}
impl LockFile {
pub fn get_document(&self, id: &Id) -> Option<&ProjectInput> {
self.document.iter().find(|i| &i.id == id)
}
pub fn replace_document(&mut self, input: ProjectInput) {
let id = input.id.clone();
let index = self.document.iter().position(|i| i.id == id);
@ -176,7 +187,7 @@ impl LockFile {
pub fn update(cwd: &Path, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<()> {
let fs = tinymist_fs::flock::Filesystem::new(cwd.to_owned());
let mut lock_file = fs.open_rw_exclusive_create(LOCKFILE_PATH, "project commands")?;
let mut lock_file = fs.open_rw_exclusive_create(LOCK_FILENAME, "project commands")?;
let mut data = vec![];
lock_file.read_to_end(&mut data)?;
@ -188,7 +199,7 @@ impl LockFile {
LockFile {
document: vec![],
task: vec![],
route: vec![],
route: eco_vec![],
}
} else {
let old_state = toml::from_str::<LockFileCompat>(old_data)
@ -235,6 +246,64 @@ impl LockFile {
Ok(())
}
pub fn read(dir: &Path) -> Result<Self> {
let fs = tinymist_fs::flock::Filesystem::new(dir.to_owned());
let mut lock_file = fs.open_ro_shared(LOCK_FILENAME, "project commands")?;
let mut data = vec![];
lock_file.read_to_end(&mut data)?;
let data = std::str::from_utf8(&data).context("tinymist.lock file is not valid utf-8")?;
let state = toml::from_str::<LockFileCompat>(data)
.context("tinymist.lock file is not a valid TOML file")?;
state.migrate()
}
}
/// A scalar that is not NaN.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct Scalar(f32);
impl TryFrom<f32> for Scalar {
type Error = &'static str;
fn try_from(value: f32) -> Result<Self, Self::Error> {
if value.is_nan() {
Err("NaN is not a valid scalar value")
} else {
Ok(Scalar(value))
}
}
}
impl PartialEq for Scalar {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl Eq for Scalar {}
impl Hash for Scalar {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.to_bits().hash(state);
}
}
impl PartialOrd for Scalar {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Scalar {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.0.partial_cmp(&other.0).unwrap()
}
}
/// A project ID.
@ -248,6 +317,14 @@ impl Id {
pub fn new(s: String) -> Self {
Id(s)
}
pub fn from_world(world: &LspWorld) -> Option<Self> {
let entry = world.entry_state();
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
let path = &ResourcePath::from_user_sys(Path::new(&id));
Some(path.into())
}
}
impl fmt::Display for Id {
@ -286,7 +363,7 @@ impl From<&DocIdArgs> for Id {
}
/// A resource path.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ResourcePath(EcoString, String);
impl fmt::Display for ResourcePath {
@ -354,10 +431,23 @@ impl ResourcePath {
),
}
}
pub fn to_abs_path(&self, rel: &Path) -> Option<PathBuf> {
if self.0 == "file" {
let path = Path::new(&self.1);
if path.is_absolute() {
Some(path.to_owned())
} else {
Some(rel.join(path))
}
} else {
None
}
}
}
/// A project input specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ProjectInput {
/// The project's ID.
@ -365,6 +455,9 @@ pub struct ProjectInput {
/// The project's root directory.
#[serde(skip_serializing_if = "Option::is_none")]
pub root: Option<ResourcePath>,
/// The project's main file.
#[serde(skip_serializing_if = "Option::is_none")]
pub main: Option<ResourcePath>,
/// The project's font paths.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub font_paths: Vec<ResourcePath>,
@ -380,7 +473,7 @@ pub struct ProjectInput {
}
/// A project task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub enum ProjectTask {
/// A preview task.
@ -433,7 +526,7 @@ impl ProjectTask {
}
/// An lsp task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct PreviewTask {
/// The task's ID.
@ -445,7 +538,7 @@ pub struct PreviewTask {
}
/// An export task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportTask {
/// The task's ID.
@ -460,7 +553,7 @@ pub struct ExportTask {
}
/// A project export transform specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExportTransform {
/// Only pick a subset of pages.
@ -468,7 +561,7 @@ pub enum ExportTransform {
}
/// An export pdf task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportPdfTask {
/// The shared export arguments
@ -480,18 +573,18 @@ pub struct ExportPdfTask {
}
/// An export png task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportPngTask {
/// The shared export arguments
#[serde(flatten)]
pub export: ExportTask,
/// The PPI (pixels per inch) to use for PNG export.
pub ppi: f32,
pub ppi: Scalar,
}
/// An export svg task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportSvgTask {
/// The shared export arguments
@ -500,7 +593,7 @@ pub struct ExportSvgTask {
}
/// An export html task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportHtmlTask {
/// The shared export arguments
@ -509,7 +602,7 @@ pub struct ExportHtmlTask {
}
/// An export markdown task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportMarkdownTask {
/// The shared export arguments
@ -518,7 +611,7 @@ pub struct ExportMarkdownTask {
}
/// An export text task specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ExportTextTask {
/// The shared export arguments
@ -527,7 +620,7 @@ pub struct ExportTextTask {
}
/// A project route specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ProjectMaterial {
/// The root of the project that the material belongs to.
@ -539,12 +632,37 @@ pub struct ProjectMaterial {
}
/// A project route specifier.
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ProjectPathMaterial {
/// The root of the project that the material belongs to.
pub root: EcoString,
/// A project.
pub id: Id,
/// The files.
pub files: Vec<PathBuf>,
}
impl ProjectPathMaterial {
pub fn from_deps(doc_id: Id, files: EcoVec<ImmutPath>) -> Self {
let mut files: Vec<_> = files.into_iter().map(|p| p.as_ref().to_owned()).collect();
files.sort();
ProjectPathMaterial {
root: EcoString::default(),
id: doc_id,
files,
}
}
}
/// A project route specifier.
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct ProjectRoute {
/// A project.
pub id: Id,
/// The priority of the project.
/// The priority of the project. (lower numbers are higher priority).
pub priority: u32,
}

View file

@ -24,6 +24,7 @@ pub mod signature;
pub use signature::*;
pub mod semantic_tokens;
pub use semantic_tokens::*;
use tinymist_std::ImmutPath;
use tinymist_world::vfs::WorkspaceResolver;
use tinymist_world::WorldDeps;
use typst::syntax::Source;
@ -76,6 +77,10 @@ pub trait LspWorldExt {
/// Get all depended file ids of a compilation, inclusively.
/// Note: must be called after compilation.
fn depended_files(&self) -> EcoVec<FileId>;
/// Get all depended paths in file system of a compilation, inclusively.
/// Note: must be called after compilation.
fn depended_fs_paths(&self) -> EcoVec<ImmutPath>;
}
impl LspWorldExt for tinymist_project::LspWorld {
@ -109,6 +114,16 @@ impl LspWorldExt for tinymist_project::LspWorld {
});
deps
}
fn depended_fs_paths(&self) -> EcoVec<ImmutPath> {
let mut deps = EcoVec::new();
self.iter_dependencies(&mut |file_id| {
if let Ok(path) = self.path_for_id(file_id) {
deps.push(path.as_path().into());
}
});
deps
}
}
#[cfg(test)]

View file

@ -52,6 +52,7 @@ rayon.workspace = true
reflexo.workspace = true
reflexo-typst = { workspace = true, features = ["system"] }
reflexo-vec2svg.workspace = true
rpds.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_yaml.workspace = true

View file

@ -23,6 +23,7 @@ mod cmd;
mod init;
pub mod project;
mod resource;
mod route;
mod server;
mod stats;
mod task;

View file

@ -0,0 +1,197 @@
use std::{path::Path, sync::Arc};
use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash};
use rpds::RedBlackTreeMapSync;
use tinymist_project::{
CompileSnapshot, Id, LockFile, LspCompilerFeat, ProjectPathMaterial, ProjectRoute,
};
use tinymist_query::LspWorldExt;
use tinymist_std::{hash::FxHashMap, ImmutPath};
use typst::diag::EcoString;
#[derive(Default)]
pub struct ProjectRouteState {
path_routes: FxHashMap<ImmutPath, RoutePathState>,
}
pub struct ProjectResolution {
pub lock_dir: ImmutPath,
pub project_id: Id,
}
impl ProjectRouteState {
pub fn locate(&self, resolved: &ProjectResolution) -> Option<Arc<LockFile>> {
let path_route = self.path_routes.get(&resolved.lock_dir)?;
Some(path_route.lock.clone())
}
pub fn resolve(&mut self, leaf: &ImmutPath) -> Option<ProjectResolution> {
for path in std::iter::successors(Some(leaf.as_ref()), |p| p.parent()) {
if let Some(resolution) = self.resolve_at(path, leaf) {
return Some(resolution);
}
}
None
}
fn resolve_at(&mut self, lock_dir: &Path, leaf: &Path) -> Option<ProjectResolution> {
log::debug!("resolve: {leaf:?} at {lock_dir:?}");
let (lock_dir, project_id) = match self.path_routes.get_key_value(lock_dir) {
Some((key, path_route)) => (key.clone(), path_route.routes.get(leaf)?.clone()),
None => {
let lock_dir: ImmutPath = lock_dir.into();
let mut new_route = self.load_lock(&lock_dir).unwrap_or_default();
let mut materials = RedBlackTreeMapSync::default();
if let Some(cache_dir) = new_route.cache_dir.as_ref() {
let entries = walkdir::WalkDir::new(cache_dir)
.into_iter()
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().is_file());
for entry in entries {
let material = self.read_material(entry.path());
if let Some(material) = material {
let id = material.id.clone();
materials.insert_mut(id.clone(), material);
}
}
}
let materials = LazyHash::new(materials);
new_route.routes = calculate_routes(new_route.lock.route.clone(), &materials);
new_route.materials = materials;
log::debug!("loaded routes at {lock_dir:?}, {:?}", new_route.routes);
let project_id = new_route.routes.get(leaf)?.clone();
self.path_routes.insert(lock_dir.clone(), new_route);
(lock_dir, project_id)
}
};
Some(ProjectResolution {
lock_dir,
project_id,
})
}
pub fn update_lock(&mut self, lock_dir: ImmutPath, lock: LockFile) -> Option<()> {
let path_route = self.path_routes.get_mut(&lock_dir)?;
let lock_unchanged = path_route.lock.as_ref() == &lock;
if lock_unchanged {
return Some(());
}
path_route.lock = Arc::new(lock);
path_route.routes = calculate_routes(path_route.lock.route.clone(), &path_route.materials);
Some(())
}
pub fn update_existing_material(
&mut self,
lock_dir: ImmutPath,
snap: &CompileSnapshot<LspCompilerFeat>,
) -> Option<()> {
let path_route = self.path_routes.get_mut(&lock_dir)?;
let id = Id::from_world(&snap.world)?;
let deps = snap.world.depended_fs_paths();
let material = ProjectPathMaterial::from_deps(id, deps);
let old = path_route.materials.get_mut(&material.id)?;
if old == &material {
return Some(());
}
path_route
.materials
.insert_mut(material.id.clone(), material);
path_route.routes = calculate_routes(path_route.lock.route.clone(), &path_route.materials);
Some(())
}
fn load_lock(&self, path: &Path) -> Option<RoutePathState> {
let lock_data = Arc::new(match LockFile::read(path) {
Ok(lock) => lock,
Err(e) => {
log::debug!("failed to load lock at {path:?}: {e:?}");
return None;
}
});
log::info!("loaded lock at {path:?}");
let root: EcoString = unix_slash(path).into();
let root_hash = tinymist_std::hash::hash128(&root);
let cache_dir_base = dirs::cache_dir();
let mut cache_dir = None;
if let Some(cache_dir_base) = cache_dir_base {
let root_lo = root_hash & 0xfff;
let root_hi = root_hash >> 12;
// let hash_str = format!("{root:016x}/{id:016x}");
let project_state = format!("{root_lo:03x}/{root_hi:013x}");
cache_dir = Some(
cache_dir_base
.join("tinymist/projects")
.join(project_state)
.into(),
);
}
Some(RoutePathState {
lock: lock_data,
materials: LazyHash::default(),
routes: Arc::new(FxHashMap::default()),
cache_dir,
})
}
fn read_material(&self, entry_path: &Path) -> Option<ProjectPathMaterial> {
log::info!("check material at {entry_path:?}");
let name = entry_path.file_name().unwrap_or(entry_path.as_os_str());
if name != "path-material.json" {
return None;
}
let data = std::fs::read(entry_path).ok()?;
let material = serde_json::from_slice::<ProjectPathMaterial>(&data).ok()?;
Some(material)
}
}
#[comemo::memoize]
fn calculate_routes(
raw_routes: EcoVec<ProjectRoute>,
materials: &LazyHash<rpds::RedBlackTreeMapSync<Id, ProjectPathMaterial>>,
) -> Arc<FxHashMap<ImmutPath, Id>> {
let mut routes = FxHashMap::default();
let mut priorities = FxHashMap::default();
for route in raw_routes.iter() {
if let Some(material) = materials.get(&route.id) {
for file in material.files.iter() {
routes.insert(file.as_path().into(), route.id.clone());
}
}
priorities.insert(route.id.clone(), route.priority);
}
Arc::new(routes)
}
#[derive(Default)]
struct RoutePathState {
lock: Arc<LockFile>,
materials: LazyHash<rpds::RedBlackTreeMapSync<Id, ProjectPathMaterial>>,
routes: Arc<FxHashMap<ImmutPath, Id>>,
cache_dir: Option<ImmutPath>,
}

View file

@ -19,13 +19,14 @@ use project::{watch_deps, LspPreviewState};
use project::{CompileHandlerImpl, Project, QuerySnapFut, QuerySnapWithStat, WorldSnapFut};
use reflexo_typst::Bytes;
use request::{RegisterCapability, UnregisterCapability};
use route::{ProjectResolution, ProjectRouteState};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value as JsonValue};
use sync_lsp::*;
use task::{
ExportConfig, ExportTask, ExportUserConfig, FormatTask, FormatterConfig, UserActionTask,
};
use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId};
use tinymist_project::{CompileSnapshot, EntryResolver, ProjectInsId, ProjectResolutionKind};
use tinymist_query::analysis::{Analysis, PeriscopeProvider};
use tinymist_query::{
to_typst_range, CompilerQueryRequest, CompilerQueryResponse, ExportKind, FoldRequestFeature,
@ -66,6 +67,8 @@ fn as_path_pos(inp: TextDocumentPositionParams) -> (PathBuf, Position) {
pub struct LanguageState {
/// The lsp client
pub client: TypedLspClient<Self>,
/// The lcok state.
pub route: ProjectRouteState,
/// The project state.
pub project: Project,
@ -127,6 +130,7 @@ impl LanguageState {
Self {
client: client.clone(),
route: ProjectRouteState::default(),
project: handle,
editor_tx,
memory_changes: HashMap::new(),
@ -925,6 +929,56 @@ impl LanguageState {
}
}
fn resolve_task(&self, path: ImmutPath) -> TaskInputs {
let entry = self.entry_resolver().resolve(Some(path));
TaskInputs {
entry: Some(entry),
..Default::default()
}
}
fn resolve_task_with_state(&mut self, path: ImmutPath) -> TaskInputs {
let proj_input = matches!(
self.config.project_resolution,
ProjectResolutionKind::LockDatabase
)
.then(|| {
let resolution = self.route.resolve(&path)?;
let lock = self.route.locate(&resolution)?;
let ProjectResolution {
lock_dir,
project_id,
} = &resolution;
let input = lock.get_document(project_id)?;
let root = input
.root
.as_ref()
.and_then(|res| Some(res.to_abs_path(lock_dir)?.as_path().into()))
.unwrap_or_else(|| lock_dir.clone());
let main = input
.main
.as_ref()
.and_then(|main| Some(main.to_abs_path(lock_dir)?.as_path().into()))
.unwrap_or_else(|| path.clone());
let entry = self
.entry_resolver()
.resolve_with_root(Some(root), Some(main));
log::info!("resolved task with state: {path:?} -> {project_id:?} -> {entry:?}");
Some(TaskInputs {
entry: Some(entry),
..Default::default()
})
});
proj_input
.flatten()
.unwrap_or_else(|| self.resolve_task(path))
}
/// Snapshot the compiler thread for tasks
pub fn snapshot(&mut self) -> ZResult<WorldSnapFut> {
self.project.snapshot()
@ -1016,7 +1070,7 @@ impl LanguageState {
let world = snap.world.clone();
let doc_id = updater.compiled(&world)?;
updater.update_materials(doc_id.clone(), snap.world.depended_files());
updater.update_materials(doc_id.clone(), snap.world.depended_fs_paths());
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
updater.commit();
@ -1359,22 +1413,24 @@ impl LanguageState {
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
let fut_stat = self.query_snapshot_with_stat(&query)?;
let entry = query
let input = query
.associated_path()
.map(|path| self.entry_resolver().resolve(Some(path.into())))
.map(|path| self.resolve_task_with_state(path.into()))
.or_else(|| {
let root = self.entry_resolver().root(None)?;
Some(EntryState::new_rooted_by_id(root, *DETACHED_ENTRY))
Some(TaskInputs {
entry: Some(EntryState::new_rooted_by_id(root, *DETACHED_ENTRY)),
..Default::default()
})
});
just_future(async move {
let mut snap = fut_stat.fut.receive().await?;
// todo: whether it is safe to inherit success_doc with changed entry
if !is_pinning {
snap = snap.task(TaskInputs {
entry,
..Default::default()
});
if let Some(input) = input {
snap = snap.task(input);
}
}
fut_stat.stat.snap();

View file

@ -252,6 +252,7 @@ impl ExportConfig {
let _ = page;
let ppi = ppi.unwrap_or(144.) as f32;
let ppi = ppi.try_into().unwrap();
ProjectTask::ExportPng(ExportPngTask { export, ppi })
}
};

View file

@ -23,6 +23,7 @@ impl LockFileExt for LockFile {
.root
.as_ref()
.map(|root| ResourcePath::from_user_sys(Path::new(root)));
let main = ResourcePath::from_user_sys(Path::new(&args.id.input));
let font_paths = args
.font
@ -46,6 +47,7 @@ impl LockFileExt for LockFile {
let input = ProjectInput {
id: id.clone(),
root,
main: Some(main),
font_paths,
system_fonts: !args.font.ignore_system_fonts,
package_path,
@ -106,7 +108,7 @@ impl LockFileExt for LockFile {
}),
OutputFormat::Png => ProjectTask::ExportPng(ExportPngTask {
export,
ppi: args.ppi,
ppi: args.ppi.try_into().unwrap(),
}),
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask { export }),
OutputFormat::Html => ProjectTask::ExportSvg(ExportSvgTask { export }),