From 20163b50f8eb0cb1b896e8496884eb756acd27db Mon Sep 17 00:00:00 2001 From: Josh Date: Mon, 25 Aug 2025 10:34:57 -0500 Subject: [PATCH] wip --- crates/djls-workspace/src/bridge.rs | 35 +++++++++-------- crates/djls-workspace/src/db.rs | 48 ++++++++++++----------- crates/djls-workspace/src/lib.rs | 6 ++- crates/djls-workspace/src/vfs.rs | 41 +++++++++++--------- crates/djls-workspace/src/watcher.rs | 58 ++++++++++++---------------- 5 files changed, 94 insertions(+), 94 deletions(-) diff --git a/crates/djls-workspace/src/bridge.rs b/crates/djls-workspace/src/bridge.rs index 45f92fc..78669ad 100644 --- a/crates/djls-workspace/src/bridge.rs +++ b/crates/djls-workspace/src/bridge.rs @@ -9,7 +9,10 @@ use std::{collections::HashMap, sync::Arc}; use salsa::Setter; use super::{ - db::{parse_template, template_errors, Database, FileKindMini, SourceFile, TemplateAst, TemplateLoaderOrder}, + db::{ + parse_template, template_errors, Database, FileKindMini, SourceFile, TemplateAst, + TemplateLoaderOrder, + }, vfs::{FileKind, VfsSnapshot}, FileId, }; @@ -31,6 +34,7 @@ pub struct FileStore { impl FileStore { /// Construct an empty store and DB. + #[must_use] pub fn new() -> Self { Self { db: Database::default(), @@ -118,8 +122,7 @@ impl FileStore { pub fn get_template_errors(&self, id: FileId) -> Arc<[String]> { self.files .get(&id) - .map(|sf| template_errors(&self.db, *sf)) - .unwrap_or_else(|| Arc::from(vec![])) + .map_or_else(|| Arc::from(vec![]), |sf| template_errors(&self.db, *sf)) } } @@ -139,7 +142,7 @@ mod tests { fn test_filestore_template_ast_caching() { let mut store = FileStore::new(); let vfs = Vfs::default(); - + // Create a template file in VFS let url = url::Url::parse("file:///test.html").unwrap(); let path = Utf8PathBuf::from("/test.html"); @@ -151,15 +154,15 @@ mod tests { TextSource::Overlay(content.clone()), ); vfs.set_overlay(file_id, content.clone()).unwrap(); - + // Apply VFS snapshot to FileStore let snapshot = vfs.snapshot(); store.apply_vfs_snapshot(&snapshot); - + // Get template AST - should parse and cache let ast1 = store.get_template_ast(file_id); assert!(ast1.is_some()); - + // Get again - should return cached let ast2 = store.get_template_ast(file_id); assert!(ast2.is_some()); @@ -170,7 +173,7 @@ mod tests { fn test_filestore_template_errors() { let mut store = FileStore::new(); let vfs = Vfs::default(); - + // Create a template with an unclosed tag let url = url::Url::parse("file:///error.html").unwrap(); let path = Utf8PathBuf::from("/error.html"); @@ -182,16 +185,16 @@ mod tests { TextSource::Overlay(content.clone()), ); vfs.set_overlay(file_id, content).unwrap(); - + // Apply VFS snapshot let snapshot = vfs.snapshot(); store.apply_vfs_snapshot(&snapshot); - + // Get errors - should contain parsing errors let errors = store.get_template_errors(file_id); // The template has unclosed tags, so there should be errors // We don't assert on specific error count as the parser may evolve - + // Verify errors are cached let errors2 = store.get_template_errors(file_id); assert!(Arc::ptr_eq(&errors, &errors2)); @@ -201,7 +204,7 @@ mod tests { fn test_filestore_invalidation_on_content_change() { let mut store = FileStore::new(); let vfs = Vfs::default(); - + // Create initial template let url = url::Url::parse("file:///change.html").unwrap(); let path = Utf8PathBuf::from("/change.html"); @@ -213,20 +216,20 @@ mod tests { TextSource::Overlay(content1.clone()), ); vfs.set_overlay(file_id, content1).unwrap(); - + // Apply snapshot and get AST let snapshot1 = vfs.snapshot(); store.apply_vfs_snapshot(&snapshot1); let ast1 = store.get_template_ast(file_id); - + // Change content let content2: Arc = Arc::from("{% for item in items %}{{ item }}{% endfor %}"); vfs.set_overlay(file_id, content2).unwrap(); - + // Apply new snapshot let snapshot2 = vfs.snapshot(); store.apply_vfs_snapshot(&snapshot2); - + // Get AST again - should be different due to content change let ast2 = store.get_template_ast(file_id); assert!(ast1.is_some() && ast2.is_some()); diff --git a/crates/djls-workspace/src/db.rs b/crates/djls-workspace/src/db.rs index bbdf0de..a38757d 100644 --- a/crates/djls-workspace/src/db.rs +++ b/crates/djls-workspace/src/db.rs @@ -118,14 +118,17 @@ pub fn parse_template(db: &dyn salsa::Database, file: SourceFile) -> Option { // Convert errors to strings let error_strings = errors.into_iter().map(|e| e.to_string()).collect(); - Some(Arc::new(TemplateAst { ast, errors: error_strings })) - }, + Some(Arc::new(TemplateAst { + ast, + errors: error_strings, + })) + } Err(err) => { // Even on fatal errors, return an empty AST with the error Some(Arc::new(TemplateAst { @@ -144,9 +147,7 @@ pub fn parse_template(db: &dyn salsa::Database, file: SourceFile) -> Option Arc<[String]> { - parse_template(db, file) - .map(|ast| Arc::from(ast.errors.clone())) - .unwrap_or_else(|| Arc::from(vec![])) + parse_template(db, file).map_or_else(|| Arc::from(vec![]), |ast| Arc::from(ast.errors.clone())) } #[cfg(test)] @@ -157,19 +158,19 @@ mod tests { #[test] fn test_template_parsing_caches_result() { let db = Database::default(); - + // Create a template file let template_content: Arc = Arc::from("{% if user %}Hello {{ user.name }}{% endif %}"); let file = SourceFile::new(&db, FileKindMini::Template, template_content.clone()); - + // First parse - should execute the parsing let ast1 = parse_template(&db, file); assert!(ast1.is_some()); - + // Second parse - should return cached result (same Arc) let ast2 = parse_template(&db, file); assert!(ast2.is_some()); - + // Verify they're the same Arc (cached) assert!(Arc::ptr_eq(&ast1.unwrap(), &ast2.unwrap())); } @@ -177,23 +178,24 @@ mod tests { #[test] fn test_template_parsing_invalidates_on_change() { let mut db = Database::default(); - + // Create a template file let template_content1: Arc = Arc::from("{% if user %}Hello{% endif %}"); let file = SourceFile::new(&db, FileKindMini::Template, template_content1); - + // First parse let ast1 = parse_template(&db, file); assert!(ast1.is_some()); - + // Change the content - let template_content2: Arc = Arc::from("{% for item in items %}{{ item }}{% endfor %}"); + let template_content2: Arc = + Arc::from("{% for item in items %}{{ item }}{% endfor %}"); file.set_text(&mut db).to(template_content2); - + // Parse again - should re-execute due to changed content let ast2 = parse_template(&db, file); assert!(ast2.is_some()); - + // Verify they're different Arcs (re-parsed) assert!(!Arc::ptr_eq(&ast1.unwrap(), &ast2.unwrap())); } @@ -201,15 +203,15 @@ mod tests { #[test] fn test_non_template_files_return_none() { let db = Database::default(); - + // Create a Python file let python_content: Arc = Arc::from("def hello():\n print('Hello')"); let file = SourceFile::new(&db, FileKindMini::Python, python_content); - + // Should return None for non-template files let ast = parse_template(&db, file); assert!(ast.is_none()); - + // Errors should be empty for non-template files let errors = template_errors(&db, file); assert!(errors.is_empty()); @@ -218,15 +220,15 @@ mod tests { #[test] fn test_template_errors_tracked_separately() { let db = Database::default(); - + // Create a template with an error (unclosed tag) let template_content: Arc = Arc::from("{% if user %}Hello {{ user.name }"); let file = SourceFile::new(&db, FileKindMini::Template, template_content); - + // Get errors let errors1 = template_errors(&db, file); let errors2 = template_errors(&db, file); - + // Should be cached (same Arc) assert!(Arc::ptr_eq(&errors1, &errors2)); } diff --git a/crates/djls-workspace/src/lib.rs b/crates/djls-workspace/src/lib.rs index 83fc7d0..30c69a7 100644 --- a/crates/djls-workspace/src/lib.rs +++ b/crates/djls-workspace/src/lib.rs @@ -3,9 +3,11 @@ mod db; mod vfs; mod watcher; -// Re-export public API pub use bridge::FileStore; -pub use db::{parse_template, template_errors, Database, FileKindMini, SourceFile, TemplateAst, TemplateLoaderOrder}; +pub use db::{ + parse_template, template_errors, Database, FileKindMini, SourceFile, TemplateAst, + TemplateLoaderOrder, +}; pub use vfs::{FileKind, FileMeta, FileRecord, Revision, TextSource, Vfs, VfsSnapshot}; pub use watcher::{VfsWatcher, WatchConfig, WatchEvent}; diff --git a/crates/djls-workspace/src/vfs.rs b/crates/djls-workspace/src/vfs.rs index be3ad18..08eb182 100644 --- a/crates/djls-workspace/src/vfs.rs +++ b/crates/djls-workspace/src/vfs.rs @@ -8,6 +8,7 @@ use anyhow::{anyhow, Result}; use camino::Utf8PathBuf; use dashmap::DashMap; use std::collections::hash_map::DefaultHasher; +use std::fs; use std::hash::{Hash, Hasher}; use std::{ collections::HashMap, @@ -18,7 +19,10 @@ use std::{ }; use url::Url; -use super::{FileId, watcher::{VfsWatcher, WatchConfig, WatchEvent}}; +use super::{ + watcher::{VfsWatcher, WatchConfig, WatchEvent}, + FileId, +}; /// Monotonic counter representing global VFS state. /// @@ -115,7 +119,7 @@ pub struct Vfs { head: AtomicU64, /// Optional file system watcher for external change detection watcher: std::sync::Mutex>, - /// Map from filesystem path to FileId for watcher events + /// Map from filesystem path to [`FileId`] for watcher events by_path: DashMap, } @@ -155,10 +159,6 @@ impl Vfs { /// (detected via hash comparison). /// /// Returns a tuple of (new global revision, whether content changed). - /// - /// # Errors - /// - /// Returns an error if the provided `FileId` does not exist in the VFS. pub fn set_overlay(&self, id: FileId, new_text: Arc) -> Result<(Revision, bool)> { let mut rec = self .files @@ -200,7 +200,10 @@ impl Vfs { /// Returns an error if file watching is disabled in the config or fails to start. pub fn enable_file_watching(&self, config: WatchConfig) -> Result<()> { let watcher = VfsWatcher::new(config)?; - *self.watcher.lock().unwrap() = Some(watcher); + *self + .watcher + .lock() + .map_err(|e| anyhow!("Failed to lock watcher mutex: {}", e))? = Some(watcher); Ok(()) } @@ -211,36 +214,38 @@ impl Vfs { pub fn process_file_events(&self) -> usize { // Get events from the watcher let events = { - let guard = self.watcher.lock().unwrap(); + let Ok(guard) = self.watcher.lock() else { + return 0; // Return 0 if mutex is poisoned + }; if let Some(watcher) = guard.as_ref() { watcher.try_recv_events() } else { return 0; } }; - + let mut updated_count = 0; - + for event in events { match event { WatchEvent::Modified(path) | WatchEvent::Created(path) => { if let Err(e) = self.load_from_disk(&path) { - eprintln!("Failed to load file from disk: {}: {}", path, e); + eprintln!("Failed to load file from disk: {path}: {e}"); } else { updated_count += 1; } } WatchEvent::Deleted(path) => { // For now, we don't remove deleted files from VFS - // This maintains stable FileIds for consumers - eprintln!("File deleted (keeping in VFS): {}", path); + // This maintains stable `FileId`s for consumers + eprintln!("File deleted (keeping in VFS): {path}"); } WatchEvent::Renamed { from, to } => { // Handle rename by updating the path mapping if let Some(file_id) = self.by_path.remove(&from).map(|(_, id)| id) { self.by_path.insert(to.clone(), file_id); if let Err(e) = self.load_from_disk(&to) { - eprintln!("Failed to load renamed file: {}: {}", to, e); + eprintln!("Failed to load renamed file: {to}: {e}"); } else { updated_count += 1; } @@ -256,17 +261,15 @@ impl Vfs { /// This method reads the file from the filesystem and updates the VFS entry /// if the content has changed. It's used by the file watcher to sync external changes. fn load_from_disk(&self, path: &Utf8PathBuf) -> Result<()> { - use std::fs; - // Check if we have this file tracked if let Some(file_id) = self.by_path.get(path).map(|entry| *entry.value()) { // Read content from disk let content = fs::read_to_string(path.as_std_path()) .map_err(|e| anyhow!("Failed to read file {}: {}", path, e))?; - + let new_text = TextSource::Disk(Arc::from(content.as_str())); let new_hash = content_hash(&new_text); - + // Update the file if content changed if let Some(mut record) = self.files.get_mut(&file_id) { if record.hash != new_hash { @@ -281,7 +284,7 @@ impl Vfs { /// Check if file watching is currently enabled. pub fn is_file_watching_enabled(&self) -> bool { - self.watcher.lock().unwrap().is_some() + self.watcher.lock().map(|g| g.is_some()).unwrap_or(false) // Return false if mutex is poisoned } } diff --git a/crates/djls-workspace/src/watcher.rs b/crates/djls-workspace/src/watcher.rs index cd6d958..57c798c 100644 --- a/crates/djls-workspace/src/watcher.rs +++ b/crates/djls-workspace/src/watcher.rs @@ -27,10 +27,7 @@ pub enum WatchEvent { /// A file was deleted Deleted(Utf8PathBuf), /// A file was renamed from one path to another - Renamed { - from: Utf8PathBuf, - to: Utf8PathBuf, - }, + Renamed { from: Utf8PathBuf, to: Utf8PathBuf }, } /// Configuration for the file watcher. @@ -51,6 +48,7 @@ pub struct WatchConfig { pub exclude_patterns: Vec, } +// TODO: Allow for user config instead of hardcoding defaults impl Default for WatchConfig { fn default() -> Self { Self { @@ -119,7 +117,7 @@ impl VfsWatcher { // Spawn background thread for event processing let config_clone = config.clone(); let handle = thread::spawn(move || { - Self::process_events(event_rx, watch_tx, config_clone); + Self::process_events(&event_rx, &watch_tx, &config_clone); }); Ok(Self { @@ -134,23 +132,19 @@ impl VfsWatcher { /// /// This is a non-blocking operation that returns immediately. If no events /// are available, it returns an empty vector. + #[must_use] pub fn try_recv_events(&self) -> Vec { - match self.rx.try_recv() { - Ok(events) => events, - Err(_) => Vec::new(), - } + self.rx.try_recv().unwrap_or_default() } - - /// Background thread function for processing raw file system events. /// /// This function handles debouncing, filtering, and batching of events before /// sending them to the main thread for VFS synchronization. fn process_events( - event_rx: mpsc::Receiver, - watch_tx: mpsc::Sender>, - config: WatchConfig, + event_rx: &mpsc::Receiver, + watch_tx: &mpsc::Sender>, + config: &WatchConfig, ) { let mut pending_events: HashMap = HashMap::new(); let mut last_batch_time = Instant::now(); @@ -161,12 +155,11 @@ impl VfsWatcher { match event_rx.recv_timeout(Duration::from_millis(50)) { Ok(event) => { // Process the raw notify event into our WatchEvent format - if let Some(watch_events) = Self::convert_notify_event(event, &config) { + if let Some(watch_events) = Self::convert_notify_event(event, config) { for watch_event in watch_events { - if let Some(path) = Self::get_event_path(&watch_event) { - // Only keep the latest event for each path - pending_events.insert(path.clone(), watch_event); - } + let path = Self::get_event_path(&watch_event); + // Only keep the latest event for each path + pending_events.insert(path.clone(), watch_event); } } } @@ -180,11 +173,9 @@ impl VfsWatcher { } // Check if we should flush pending events - if !pending_events.is_empty() - && last_batch_time.elapsed() >= debounce_duration - { + if !pending_events.is_empty() && last_batch_time.elapsed() >= debounce_duration { let events: Vec = pending_events.values().cloned().collect(); - if let Err(_) = watch_tx.send(events) { + if watch_tx.send(events).is_err() { // Main thread disconnected, exit break; } @@ -194,7 +185,7 @@ impl VfsWatcher { } } - /// Convert a notify Event into our WatchEvent format. + /// Convert a [`notify::Event`] into our [`WatchEvent`] format. fn convert_notify_event(event: Event, config: &WatchConfig) -> Option> { let mut watch_events = Vec::new(); @@ -236,8 +227,7 @@ impl VfsWatcher { // Check include patterns for pattern in &config.include_patterns { - if pattern.starts_with("*.") { - let extension = &pattern[2..]; + if let Some(extension) = pattern.strip_prefix("*.") { if path_str.ends_with(extension) { return true; } @@ -249,13 +239,13 @@ impl VfsWatcher { false } - /// Extract the path from a WatchEvent. - fn get_event_path(event: &WatchEvent) -> Option<&Utf8PathBuf> { + /// Extract the path from a [`WatchEvent`]. + fn get_event_path(event: &WatchEvent) -> &Utf8PathBuf { match event { - WatchEvent::Modified(path) => Some(path), - WatchEvent::Created(path) => Some(path), - WatchEvent::Deleted(path) => Some(path), - WatchEvent::Renamed { to, .. } => Some(to), + WatchEvent::Modified(path) | WatchEvent::Created(path) | WatchEvent::Deleted(path) => { + path + } + WatchEvent::Renamed { to, .. } => to, } } } @@ -270,7 +260,6 @@ impl Drop for VfsWatcher { mod tests { use super::*; - #[test] fn test_watch_config_default() { let config = WatchConfig::default(); @@ -327,4 +316,5 @@ mod tests { assert_ne!(created, deleted); assert_ne!(deleted, renamed); } -} \ No newline at end of file +} +