move Salsa database ownership from Workspace to Session (#195)

This commit is contained in:
Josh Thomas 2025-09-05 22:10:21 -05:00 committed by GitHub
parent 67c5574f37
commit 554bbe2d86
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 494 additions and 446 deletions

4
Cargo.lock generated
View file

@ -496,11 +496,14 @@ name = "djls-templates"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"djls-workspace",
"insta", "insta",
"salsa",
"serde", "serde",
"tempfile", "tempfile",
"thiserror 2.0.16", "thiserror 2.0.16",
"toml", "toml",
"tower-lsp-server",
] ]
[[package]] [[package]]
@ -511,7 +514,6 @@ dependencies = [
"camino", "camino",
"dashmap", "dashmap",
"djls-project", "djls-project",
"djls-templates",
"notify", "notify",
"percent-encoding", "percent-encoding",
"salsa", "salsa",

View file

@ -0,0 +1,115 @@
//! Concrete Salsa database implementation for the Django Language Server.
//!
//! This module provides the concrete [`DjangoDatabase`] that implements all
//! the database traits from workspace and template crates. This follows Ruff's
//! architecture pattern where the concrete database lives at the top level.
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use djls_templates::db::Db as TemplateDb;
use djls_workspace::db::Db as WorkspaceDb;
use djls_workspace::db::SourceFile;
use djls_workspace::FileKind;
use djls_workspace::FileSystem;
use salsa::Setter;
/// Concrete Salsa database for the Django Language Server.
///
/// This database implements all the traits from various crates:
/// - [`WorkspaceDb`] for file system access and core operations
/// - [`TemplateDb`] for template parsing and diagnostics
#[salsa::db]
#[derive(Clone)]
pub struct DjangoDatabase {
/// File system for reading file content (checks buffers first, then disk).
fs: Arc<dyn FileSystem>,
/// Maps paths to [`SourceFile`] entities for O(1) lookup.
files: Arc<DashMap<PathBuf, SourceFile>>,
storage: salsa::Storage<Self>,
}
impl DjangoDatabase {
/// Create a new [`DjangoDatabase`] with the given file system and file map.
pub fn new(file_system: Arc<dyn FileSystem>, files: Arc<DashMap<PathBuf, SourceFile>>) -> Self {
Self {
storage: salsa::Storage::new(None),
fs: file_system,
files,
}
}
/// Get an existing [`SourceFile`] for the given path without creating it.
///
/// Returns `Some(SourceFile)` if the file is already tracked, `None` otherwise.
pub fn get_file(&self, path: &Path) -> Option<SourceFile> {
self.files.get(path).map(|file_ref| *file_ref)
}
/// Get or create a [`SourceFile`] for the given path.
///
/// Files are created with an initial revision of 0 and tracked in the database's
/// `DashMap`. The `Arc` ensures cheap cloning while maintaining thread safety.
pub fn get_or_create_file(&mut self, path: &PathBuf) -> SourceFile {
if let Some(file_ref) = self.files.get(path) {
return *file_ref;
}
// File doesn't exist, so we need to create it
let kind = FileKind::from_path(path);
let file = SourceFile::new(self, kind, Arc::from(path.to_string_lossy().as_ref()), 0);
self.files.insert(path.clone(), file);
file
}
/// Check if a file is being tracked without creating it.
pub fn has_file(&self, path: &Path) -> bool {
self.files.contains_key(path)
}
/// Touch a file to mark it as modified, triggering re-evaluation of dependent queries.
///
/// Updates the file's revision number to signal that cached query results
/// depending on this file should be invalidated.
pub fn touch_file(&mut self, path: &Path) {
let Some(file_ref) = self.files.get(path) else {
tracing::debug!("File {} not tracked, skipping touch", path.display());
return;
};
let file = *file_ref;
drop(file_ref); // Explicitly drop to release the lock
let current_rev = file.revision(self);
let new_rev = current_rev + 1;
file.set_revision(self).to(new_rev);
tracing::debug!(
"Touched {}: revision {} -> {}",
path.display(),
current_rev,
new_rev
);
}
}
#[salsa::db]
impl salsa::Database for DjangoDatabase {}
#[salsa::db]
impl WorkspaceDb for DjangoDatabase {
fn fs(&self) -> Arc<dyn FileSystem> {
self.fs.clone()
}
fn read_file_content(&self, path: &Path) -> std::io::Result<String> {
self.fs.read_to_string(path)
}
}
#[salsa::db]
impl TemplateDb for DjangoDatabase {}

View file

@ -1,4 +1,5 @@
mod completions; mod completions;
mod db;
mod logging; mod logging;
mod queue; mod queue;
pub mod server; pub mod server;

View file

@ -1,10 +1,15 @@
//! # LSP Session Management //! # LSP Session Management
//! //!
//! This module implements the LSP session abstraction that manages project-specific //! This module implements the LSP session abstraction that manages project-specific
//! state and delegates workspace operations to the Workspace facade. //! state and the Salsa database for incremental computation.
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use djls_conf::Settings; use djls_conf::Settings;
use djls_project::DjangoProject; use djls_project::DjangoProject;
use djls_workspace::db::SourceFile;
use djls_workspace::paths; use djls_workspace::paths;
use djls_workspace::PositionEncoding; use djls_workspace::PositionEncoding;
use djls_workspace::TextDocument; use djls_workspace::TextDocument;
@ -12,16 +17,18 @@ use djls_workspace::Workspace;
use tower_lsp_server::lsp_types; use tower_lsp_server::lsp_types;
use url::Url; use url::Url;
/// LSP Session managing project-specific state and workspace operations. use crate::db::DjangoDatabase;
/// LSP Session managing project-specific state and database operations.
/// ///
/// The Session serves as the main entry point for LSP operations, managing: /// The Session serves as the main entry point for LSP operations, managing:
/// - The Salsa database for incremental computation
/// - Project configuration and settings /// - Project configuration and settings
/// - Client capabilities and position encoding /// - Client capabilities and position encoding
/// - Workspace operations (delegated to the Workspace facade) /// - Workspace operations (buffers and file system)
/// ///
/// All document lifecycle and database operations are delegated to the /// Following Ruff's architecture, the concrete database lives at this level
/// encapsulated Workspace, which provides Salsa database management /// and is passed down to operations that need it.
/// using the built-in Clone pattern for thread safety.
pub struct Session { pub struct Session {
/// The Django project configuration /// The Django project configuration
project: Option<DjangoProject>, project: Option<DjangoProject>,
@ -29,11 +36,10 @@ pub struct Session {
/// LSP server settings /// LSP server settings
settings: Settings, settings: Settings,
/// Workspace facade that encapsulates all workspace-related functionality /// Workspace for buffer and file system management
/// ///
/// This includes document buffers, file system abstraction, and the Salsa database. /// This manages document buffers and file system abstraction,
/// The workspace provides a clean interface for document lifecycle management /// but not the database (which is owned directly by Session).
/// and database operations while maintaining proper isolation and thread safety.
workspace: Workspace, workspace: Workspace,
#[allow(dead_code)] #[allow(dead_code)]
@ -41,6 +47,8 @@ pub struct Session {
/// Position encoding negotiated with client /// Position encoding negotiated with client
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
db: DjangoDatabase,
} }
impl Session { impl Session {
@ -66,10 +74,18 @@ impl Session {
(None, Settings::default()) (None, Settings::default())
}; };
// Create workspace for buffer management
let workspace = Workspace::new();
// Create the concrete database with the workspace's file system
let files = Arc::new(DashMap::new());
let db = DjangoDatabase::new(workspace.file_system(), files);
Self { Self {
db,
project, project,
settings, settings,
workspace: Workspace::new(), workspace,
client_capabilities: params.capabilities.clone(), client_capabilities: params.capabilities.clone(),
position_encoding: PositionEncoding::negotiate(params), position_encoding: PositionEncoding::negotiate(params),
} }
@ -98,52 +114,167 @@ impl Session {
self.position_encoding self.position_encoding
} }
/// Handle opening a document - sets buffer and creates file. /// Execute a read-only operation with access to the database.
/// pub fn with_db<F, R>(&self, f: F) -> R
/// Delegates to the workspace's document management. where
pub fn open_document(&mut self, url: &Url, document: TextDocument) { F: FnOnce(&DjangoDatabase) -> R,
tracing::debug!("Opening document: {}", url); {
self.workspace.open_document(url, document); f(&self.db)
} }
/// Update a document with the given changes. /// Execute a mutable operation with exclusive access to the database.
pub fn with_db_mut<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut DjangoDatabase) -> R,
{
f(&mut self.db)
}
/// Open a document in the session.
/// ///
/// Delegates to the workspace's document management. /// Updates both the workspace buffers and database. Creates the file in
/// the database or invalidates it if it already exists.
pub fn open_document(&mut self, url: &Url, document: TextDocument) {
// Add to workspace buffers
self.workspace.open_document(url, document);
// Update database if it's a file URL
if let Some(path) = paths::url_to_path(url) {
// Check if file already exists (was previously read from disk)
let already_exists = self.db.has_file(&path);
let _file = self.db.get_or_create_file(&path);
if already_exists {
// File was already read - touch to invalidate cache
self.db.touch_file(&path);
}
}
}
/// Update a document with incremental changes.
///
/// Applies changes to the document and triggers database invalidation.
pub fn update_document( pub fn update_document(
&mut self, &mut self,
url: &Url, url: &Url,
changes: Vec<lsp_types::TextDocumentContentChangeEvent>, changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
new_version: i32, version: i32,
) { ) {
// Update in workspace
self.workspace self.workspace
.update_document(url, changes, new_version, self.position_encoding); .update_document(url, changes, version, self.position_encoding);
// Touch file in database to trigger invalidation
if let Some(path) = paths::url_to_path(url) {
if self.db.has_file(&path) {
self.db.touch_file(&path);
}
}
} }
/// Handle closing a document - removes buffer and bumps revision. /// Close a document.
/// ///
/// Delegates to the workspace's document management. /// Removes from workspace buffers and triggers database invalidation to fall back to disk.
pub fn close_document(&mut self, url: &Url) -> Option<TextDocument> { pub fn close_document(&mut self, url: &Url) -> Option<TextDocument> {
tracing::debug!("Closing document: {}", url); let document = self.workspace.close_document(url);
self.workspace.close_document(url)
// Touch file in database to trigger re-read from disk
if let Some(path) = paths::url_to_path(url) {
if self.db.has_file(&path) {
self.db.touch_file(&path);
}
} }
/// Get an open document from the buffer layer, if it exists. document
/// }
/// Delegates to the workspace's document management.
/// Get a document from the buffer if it's open.
#[must_use] #[must_use]
pub fn get_document(&self, url: &Url) -> Option<TextDocument> { pub fn get_document(&self, url: &Url) -> Option<TextDocument> {
self.workspace.get_document(url) self.workspace.get_document(url)
} }
/// Get or create a file in the database.
pub fn get_or_create_file(&mut self, path: &PathBuf) -> SourceFile {
self.db.get_or_create_file(path)
}
} }
impl Default for Session { impl Default for Session {
fn default() -> Self { fn default() -> Self {
Self { Self::new(&lsp_types::InitializeParams::default())
project: None,
settings: Settings::default(),
workspace: Workspace::new(),
client_capabilities: lsp_types::ClientCapabilities::default(),
position_encoding: PositionEncoding::default(),
} }
} }
#[cfg(test)]
mod tests {
use djls_workspace::db::source_text;
use djls_workspace::LanguageId;
use super::*;
#[test]
fn test_session_database_operations() {
let mut session = Session::default();
// Can create files in the database
let path = PathBuf::from("/test.py");
let file = session.get_or_create_file(&path);
// Can read file content through database
let content = session.with_db(|db| source_text(db, file).to_string());
assert_eq!(content, ""); // Non-existent file returns empty
}
#[test]
fn test_session_document_lifecycle() {
let mut session = Session::default();
let url = Url::parse("file:///test.py").unwrap();
// Open document
let document = TextDocument::new("print('hello')".to_string(), 1, LanguageId::Python);
session.open_document(&url, document);
// Should be in workspace buffers
assert!(session.get_document(&url).is_some());
// Should be queryable through database
let path = PathBuf::from("/test.py");
let file = session.get_or_create_file(&path);
let content = session.with_db(|db| source_text(db, file).to_string());
assert_eq!(content, "print('hello')");
// Close document
session.close_document(&url);
assert!(session.get_document(&url).is_none());
}
#[test]
fn test_session_document_update() {
let mut session = Session::default();
let url = Url::parse("file:///test.py").unwrap();
// Open with initial content
let document = TextDocument::new("initial".to_string(), 1, LanguageId::Python);
session.open_document(&url, document);
// Update content
let changes = vec![lsp_types::TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: "updated".to_string(),
}];
session.update_document(&url, changes, 2);
// Verify buffer was updated
let doc = session.get_document(&url).unwrap();
assert_eq!(doc.content(), "updated");
assert_eq!(doc.version(), 2);
// Database should also see updated content
let path = PathBuf::from("/test.py");
let file = session.get_or_create_file(&path);
let content = session.with_db(|db| source_text(db, file).to_string());
assert_eq!(content, "updated");
}
} }

View file

@ -4,10 +4,14 @@ version = "0.0.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
djls-workspace = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
salsa = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
toml = { workspace = true } toml = { workspace = true }
tower-lsp-server = { workspace = true }
[dev-dependencies] [dev-dependencies]
insta = { workspace = true } insta = { workspace = true }

View file

@ -134,7 +134,7 @@ impl From<Token> for Span {
} }
} }
#[derive(Clone, Debug, Error, Serialize)] #[derive(Clone, Debug, Error, PartialEq, Eq, Serialize)]
pub enum AstError { pub enum AstError {
#[error("Empty AST")] #[error("Empty AST")]
EmptyAst, EmptyAst,

View file

@ -0,0 +1,151 @@
//! Template-specific database trait and queries.
//!
//! This module extends the workspace database trait with template-specific
//! functionality including parsing and diagnostic generation.
use std::sync::Arc;
use djls_workspace::db::SourceFile;
use djls_workspace::Db as WorkspaceDb;
use djls_workspace::FileKind;
use tower_lsp_server::lsp_types;
use crate::ast::LineOffsets;
use crate::ast::Span;
use crate::Ast;
use crate::TemplateError;
/// Template-specific database trait extending the workspace database
#[salsa::db]
pub trait Db: WorkspaceDb {
// Template-specific methods can be added here if needed
}
/// Container for a parsed Django template AST.
///
/// Stores both the parsed AST and any errors encountered during parsing.
/// This struct is designed to be cached by Salsa and shared across multiple consumers.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ParsedTemplate {
/// The parsed AST from djls-templates
pub ast: Ast,
/// Any errors encountered during parsing
pub errors: Vec<TemplateError>,
}
/// Parse a Django template file into an AST.
///
/// This Salsa tracked function parses template files on-demand and caches the results.
/// The parse is only re-executed when the file's content changes (detected via revision changes).
///
/// Returns `None` for non-template files.
#[salsa::tracked]
pub fn parse_template(db: &dyn Db, file: SourceFile) -> Option<Arc<ParsedTemplate>> {
// Only parse template files
if file.kind(db) != FileKind::Template {
return None;
}
let text_arc = djls_workspace::db::source_text(db, file);
let text = text_arc.as_ref();
// Call the pure parsing function
match crate::parse_template(text) {
Ok((ast, errors)) => Some(Arc::new(ParsedTemplate { ast, errors })),
Err(err) => {
// Even on fatal errors, return an empty AST with the error
Some(Arc::new(ParsedTemplate {
ast: Ast::default(),
errors: vec![err],
}))
}
}
}
/// Generate LSP diagnostics for a template file.
///
/// This Salsa tracked function computes diagnostics from template parsing errors
/// and caches the results. Diagnostics are only recomputed when the file changes.
#[salsa::tracked]
pub fn template_diagnostics(db: &dyn Db, file: SourceFile) -> Arc<Vec<lsp_types::Diagnostic>> {
// Parse the template to get errors
let Some(parsed) = parse_template(db, file) else {
return Arc::new(Vec::new());
};
if parsed.errors.is_empty() {
return Arc::new(Vec::new());
}
// Convert errors to diagnostics
let line_offsets = parsed.ast.line_offsets();
let diagnostics = parsed
.errors
.iter()
.map(|error| template_error_to_diagnostic(error, line_offsets))
.collect();
Arc::new(diagnostics)
}
/// Convert a [`TemplateError`] to an LSP [`Diagnostic`].
///
/// Maps template parsing and validation errors to LSP diagnostics with appropriate
/// severity levels, ranges, and metadata.
fn template_error_to_diagnostic(
error: &TemplateError,
line_offsets: &LineOffsets,
) -> lsp_types::Diagnostic {
let severity = severity_from_error(error);
let range = error
.span()
.map(|span| span_to_range(span, line_offsets))
.unwrap_or_default();
lsp_types::Diagnostic {
range,
severity: Some(severity),
code: Some(lsp_types::NumberOrString::String(error.code().to_string())),
code_description: None,
source: Some("Django Language Server".to_string()),
message: error.to_string(),
related_information: None,
tags: None,
data: None,
}
}
/// Map a [`TemplateError`] to appropriate diagnostic severity.
fn severity_from_error(error: &TemplateError) -> lsp_types::DiagnosticSeverity {
match error {
TemplateError::Lexer(_) | TemplateError::Parser(_) | TemplateError::Io(_) => {
lsp_types::DiagnosticSeverity::ERROR
}
TemplateError::Validation(_) | TemplateError::Config(_) => {
lsp_types::DiagnosticSeverity::WARNING
}
}
}
/// Convert a template [`Span`] to an LSP [`Range`] using line offsets.
#[allow(clippy::cast_possible_truncation)]
fn span_to_range(span: Span, line_offsets: &LineOffsets) -> lsp_types::Range {
let start_pos = span.start() as usize;
let end_pos = (span.start() + span.length()) as usize;
let (start_line, start_char) = line_offsets.position_to_line_col(start_pos);
let (end_line, end_char) = line_offsets.position_to_line_col(end_pos);
// Note: These casts are safe in practice as line numbers and character positions
// in source files won't exceed u32::MAX (4 billion lines/characters)
lsp_types::Range {
start: lsp_types::Position {
line: (start_line - 1) as u32, // LSP is 0-based, LineOffsets is 1-based
character: start_char as u32,
},
end: lsp_types::Position {
line: (end_line - 1) as u32, // LSP is 0-based, LineOffsets is 1-based
character: end_char as u32,
},
}
}

View file

@ -6,7 +6,7 @@ use crate::ast::Span;
use crate::lexer::LexerError; use crate::lexer::LexerError;
use crate::parser::ParserError; use crate::parser::ParserError;
#[derive(Debug, Error, Serialize)] #[derive(Clone, Debug, Error, PartialEq, Eq, Serialize)]
pub enum TemplateError { pub enum TemplateError {
#[error("Lexer error: {0}")] #[error("Lexer error: {0}")]
Lexer(String), Lexer(String),

View file

@ -1,4 +1,5 @@
pub mod ast; pub mod ast;
pub mod db;
mod error; mod error;
mod lexer; mod lexer;
mod parser; mod parser;

View file

@ -4,7 +4,6 @@ version = "0.0.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
djls-templates = { workspace = true }
djls-project = { workspace = true } djls-project = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }

View file

@ -1,18 +1,15 @@
//! Salsa database for incremental computation. //! Base database trait for workspace operations.
//! //!
//! This module provides the [`Database`] which integrates with Salsa for //! This module provides the base [`Db`] trait that defines file system access
//! incremental computation of Django template parsing and analysis. //! and core file tracking functionality. The concrete database implementation
//! lives in the server crate, following Ruff's architecture pattern.
//! //!
//! ## Architecture //! ## Architecture
//! //!
//! The system uses a two-layer approach: //! The system uses a layered trait approach:
//! 1. **Buffer layer** ([`Buffers`]) - Stores open document content in memory //! 1. **Base trait** ([`Db`]) - Defines file system access methods (this module)
//! 2. **Salsa layer** ([`Database`]) - Tracks files and computes derived queries //! 2. **Extension traits** - Other crates (like djls-templates) extend this trait
//! //! 3. **Concrete implementation** - Server crate implements all traits
//! When Salsa needs file content, it calls [`source_text`] which:
//! 1. Creates a dependency on the file's revision (critical!)
//! 2. Reads through [`WorkspaceFileSystem`] which checks buffers first
//! 3. Falls back to disk if no buffer exists
//! //!
//! ## The Revision Dependency //! ## The Revision Dependency
//! //!
@ -22,9 +19,6 @@
//! ```ignore //! ```ignore
//! let _ = file.revision(db); // Creates the dependency chain! //! let _ = file.revision(db); // Creates the dependency chain!
//! ``` //! ```
//!
//! [`Buffers`]: crate::buffers::Buffers
//! [`WorkspaceFileSystem`]: crate::fs::WorkspaceFileSystem
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@ -38,7 +32,7 @@ use salsa::Setter;
use crate::FileKind; use crate::FileKind;
use crate::FileSystem; use crate::FileSystem;
/// Database trait that provides file system access for Salsa queries /// Base database trait that provides file system access for Salsa queries
#[salsa::db] #[salsa::db]
pub trait Db: salsa::Database { pub trait Db: salsa::Database {
/// Get the file system for reading files. /// Get the file system for reading files.
@ -51,11 +45,10 @@ pub trait Db: salsa::Database {
fn read_file_content(&self, path: &Path) -> std::io::Result<String>; fn read_file_content(&self, path: &Path) -> std::io::Result<String>;
} }
/// Salsa database for incremental computation. /// Temporary concrete database for workspace.
/// ///
/// Tracks files and computes derived queries incrementally. Integrates with /// This will be moved to the server crate in the refactoring.
/// [`WorkspaceFileSystem`](crate::fs::WorkspaceFileSystem) to read file content, /// For now, it's kept here to avoid breaking existing code.
/// which checks buffers before falling back to disk.
#[salsa::db] #[salsa::db]
#[derive(Clone)] #[derive(Clone)]
pub struct Database { pub struct Database {
@ -246,181 +239,5 @@ pub struct FilePath {
pub path: Arc<str>, pub path: Arc<str>,
} }
/// Container for a parsed Django template AST. // Template-specific functionality has been moved to djls-templates crate
/// // See djls_templates::db for template parsing and diagnostics
/// [`TemplateAst`] wraps the parsed AST from djls-templates along with any parsing errors.
/// This struct is designed to be cached by Salsa and shared across multiple consumers
/// without re-parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TemplateAst {
/// The parsed AST from djls-templates
pub ast: djls_templates::Ast,
/// Any errors encountered during parsing (stored as strings for simplicity)
pub errors: Vec<String>,
}
/// Parse a Django template file into an AST.
///
/// This Salsa tracked function parses template files on-demand and caches the results.
/// The parse is only re-executed when the file's content changes (detected via content changes).
///
/// Returns `None` for non-template files.
#[salsa::tracked]
pub fn parse_template(db: &dyn Db, file: SourceFile) -> Option<Arc<TemplateAst>> {
// Only parse template files
if file.kind(db) != FileKind::Template {
return None;
}
let text_arc = source_text(db, file);
let text = text_arc.as_ref();
// Call the pure parsing function from djls-templates
// TODO: Move this whole function into djls-templates
match djls_templates::parse_template(text) {
Ok((ast, errors)) => {
// Convert errors to strings
let error_strings = errors.into_iter().map(|e| e.to_string()).collect();
Some(Arc::new(TemplateAst {
ast,
errors: error_strings,
}))
}
Err(err) => {
// Even on fatal errors, return an empty AST with the error
Some(Arc::new(TemplateAst {
ast: djls_templates::Ast::default(),
errors: vec![err.to_string()],
}))
}
}
}
#[cfg(test)]
mod tests {
use dashmap::DashMap;
use salsa::Setter;
use super::*;
use crate::buffers::Buffers;
use crate::document::TextDocument;
use crate::fs::InMemoryFileSystem;
use crate::fs::WorkspaceFileSystem;
use crate::language::LanguageId;
#[test]
fn test_parse_template_with_overlay() {
// Create a memory filesystem with initial template content
let mut memory_fs = InMemoryFileSystem::new();
let template_path = PathBuf::from("/test/template.html");
memory_fs.add_file(
template_path.clone(),
"{% block content %}Original{% endblock %}".to_string(),
);
// Create overlay storage
let buffers = Buffers::new();
// Create WorkspaceFileSystem that checks overlays first
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(memory_fs),
));
// Create database with the file system
let files = Arc::new(DashMap::new());
let mut db = Database::new(file_system, files);
// Create a SourceFile for the template
let file = db.get_or_create_file(&template_path);
// Parse template - should get original content from disk
let ast1 = parse_template(&db, file).expect("Should parse template");
assert!(ast1.errors.is_empty(), "Should have no errors");
// Add an overlay with updated content
let url = crate::paths::path_to_url(&template_path).unwrap();
let updated_document = TextDocument::new(
"{% block content %}Updated from overlay{% endblock %}".to_string(),
2,
LanguageId::Other,
);
buffers.open(url, updated_document);
// Bump the file revision to trigger re-parse
file.set_revision(&mut db).to(1);
// Parse again - should now get overlay content
let ast2 = parse_template(&db, file).expect("Should parse template");
assert!(ast2.errors.is_empty(), "Should have no errors");
// Verify the content changed (we can't directly check the text,
// but the AST should be different)
// The AST will have different content in the block
assert_ne!(
format!("{:?}", ast1.ast),
format!("{:?}", ast2.ast),
"AST should change when overlay is added"
);
}
#[test]
fn test_parse_template_invalidation_on_revision_change() {
// Create a memory filesystem
let mut memory_fs = InMemoryFileSystem::new();
let template_path = PathBuf::from("/test/template.html");
memory_fs.add_file(
template_path.clone(),
"{% if true %}Initial{% endif %}".to_string(),
);
// Create overlay storage
let buffers = Buffers::new();
// Create WorkspaceFileSystem
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(memory_fs),
));
// Create database
let files = Arc::new(DashMap::new());
let mut db = Database::new(file_system, files);
// Create a SourceFile for the template
let file = db.get_or_create_file(&template_path);
// Parse template first time
let ast1 = parse_template(&db, file).expect("Should parse");
// Parse again without changing revision - should return same Arc (cached)
let ast2 = parse_template(&db, file).expect("Should parse");
assert!(Arc::ptr_eq(&ast1, &ast2), "Should return cached result");
// Update overlay content
let url = crate::paths::path_to_url(&template_path).unwrap();
let updated_document = TextDocument::new(
"{% if false %}Changed{% endif %}".to_string(),
2,
LanguageId::Other,
);
buffers.open(url, updated_document);
// Bump revision to trigger invalidation
file.set_revision(&mut db).to(1);
// Parse again - should get different result due to invalidation
let ast3 = parse_template(&db, file).expect("Should parse");
assert!(
!Arc::ptr_eq(&ast1, &ast3),
"Should re-execute after revision change"
);
// Content should be different
assert_ne!(
format!("{:?}", ast1.ast),
format!("{:?}", ast3.ast),
"AST should be different after content change"
);
}
}

View file

@ -25,6 +25,8 @@ use std::path::Path;
pub use buffers::Buffers; pub use buffers::Buffers;
pub use db::Database; pub use db::Database;
pub use db::Db;
pub use db::SourceFile;
pub use document::TextDocument; pub use document::TextDocument;
pub use encoding::PositionEncoding; pub use encoding::PositionEncoding;
pub use fs::FileSystem; pub use fs::FileSystem;

View file

@ -1,92 +1,73 @@
//! Workspace facade for managing all workspace components //! Workspace facade for managing buffer and file system components
//! //!
//! This module provides the [`Workspace`] struct that encapsulates all workspace //! This module provides the [`Workspace`] struct that encapsulates buffer
//! components including buffers, file system, file tracking, and database handle. //! management and file system abstraction. The Salsa database is managed
//! This provides a clean API boundary between server and workspace layers. //! at the Session level, following Ruff's architecture pattern.
use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use dashmap::DashMap;
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent; use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
use url::Url; use url::Url;
use crate::buffers::Buffers; use crate::buffers::Buffers;
use crate::db::Database;
use crate::document::TextDocument; use crate::document::TextDocument;
use crate::fs::FileSystem;
use crate::fs::OsFileSystem; use crate::fs::OsFileSystem;
use crate::fs::WorkspaceFileSystem; use crate::fs::WorkspaceFileSystem;
use crate::paths::url_to_path;
/// Workspace facade that encapsulates all workspace components. /// Workspace facade that manages buffers and file system.
/// ///
/// This struct provides a unified interface for managing workspace state, /// This struct provides a unified interface for managing document buffers
/// including in-memory buffers, file system abstraction, and the Salsa database. /// and file system operations. The Salsa database is managed at a higher
/// level (Session) and passed in when needed for operations.
pub struct Workspace { pub struct Workspace {
/// Thread-safe shared buffer storage for open documents /// Thread-safe shared buffer storage for open documents
buffers: Buffers, buffers: Buffers,
/// Salsa database for incremental computation /// File system abstraction that checks buffers first, then disk
db: Database, file_system: Arc<WorkspaceFileSystem>,
} }
impl Workspace { impl Workspace {
/// Create a new [`Workspace`] with all components initialized. /// Create a new [`Workspace`] with buffers and file system initialized.
#[must_use] #[must_use]
pub fn new() -> Self { pub fn new() -> Self {
let buffers = Buffers::new(); let buffers = Buffers::new();
let files = Arc::new(DashMap::new());
let file_system = Arc::new(WorkspaceFileSystem::new( let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(), buffers.clone(),
Arc::new(OsFileSystem), Arc::new(OsFileSystem),
)); ));
let db = Database::new(file_system, files);
Self { buffers, db } Self {
buffers,
file_system,
}
} }
/// Execute a read-only operation with access to the database. /// Get the file system for this workspace.
pub fn with_db<F, R>(&self, f: F) -> R ///
where /// The file system checks buffers first, then falls back to disk.
F: FnOnce(&Database) -> R, #[must_use]
{ pub fn file_system(&self) -> Arc<dyn FileSystem> {
f(&self.db) self.file_system.clone()
} }
/// Execute a mutable operation with exclusive access to the database. /// Get the buffers for direct access.
pub fn with_db_mut<F, R>(&mut self, f: F) -> R #[must_use]
where pub fn buffers(&self) -> &Buffers {
F: FnOnce(&mut Database) -> R, &self.buffers
{
f(&mut self.db)
} }
/// Open a document in the workspace. /// Open a document in the workspace.
/// ///
/// Updates both the buffer layer and database layer. Creates the file in /// Adds the document to the buffer layer. The database should be
/// the database or invalidates it if it already exists. /// notified separately by the caller if invalidation is needed.
pub fn open_document(&mut self, url: &Url, document: TextDocument) { pub fn open_document(&mut self, url: &Url, document: TextDocument) {
// Layer 1: Add to buffers
self.buffers.open(url.clone(), document); self.buffers.open(url.clone(), document);
// Layer 2: Create file and touch if it already exists
if let Some(path) = url_to_path(url) {
self.with_db_mut(|db| {
// Check if file already exists (was previously read from disk)
let already_exists = db.has_file(&path);
let _file = db.get_or_create_file(&path);
if already_exists {
// File was already read - touch to invalidate cache
db.touch_file(&path);
}
// Note: New files automatically start at revision 0, no additional action needed
});
}
} }
/// Update a document with incremental changes. /// Update a document with incremental changes.
/// ///
/// Applies changes to the existing document and triggers database invalidation. /// Applies changes to the existing document in buffers.
/// Falls back to full replacement if the document isn't currently open. /// Falls back to full replacement if the document isn't currently open.
pub fn update_document( pub fn update_document(
&mut self, &mut self,
@ -110,25 +91,14 @@ impl Workspace {
self.buffers.open(url.clone(), document); self.buffers.open(url.clone(), document);
} }
} }
// Touch file in database to trigger invalidation
if let Some(path) = url_to_path(url) {
self.invalidate_file_if_exists(&path);
}
} }
/// Close a document and return it. /// Close a document and return it.
/// ///
/// Removes from buffers and triggers database invalidation to fall back to disk. /// Removes from buffers. The database should be notified
/// separately by the caller if invalidation is needed.
pub fn close_document(&mut self, url: &Url) -> Option<TextDocument> { pub fn close_document(&mut self, url: &Url) -> Option<TextDocument> {
let document = self.buffers.close(url); self.buffers.close(url)
// Touch file in database to trigger re-read from disk
if let Some(path) = url_to_path(url) {
self.invalidate_file_if_exists(&path);
}
document
} }
/// Get a document from the buffer if it's open. /// Get a document from the buffer if it's open.
@ -138,17 +108,6 @@ impl Workspace {
pub fn get_document(&self, url: &Url) -> Option<TextDocument> { pub fn get_document(&self, url: &Url) -> Option<TextDocument> {
self.buffers.get(url) self.buffers.get(url)
} }
/// Invalidate a file if it exists in the database.
///
/// Used by document lifecycle methods to trigger cache invalidation.
fn invalidate_file_if_exists(&mut self, path: &Path) {
self.with_db_mut(|db| {
if db.has_file(path) {
db.touch_file(path);
}
});
}
} }
impl Default for Workspace { impl Default for Workspace {
@ -159,88 +118,14 @@ impl Default for Workspace {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::PathBuf;
use tempfile::tempdir; use tempfile::tempdir;
use super::*; use super::*;
use crate::db::source_text;
use crate::encoding::PositionEncoding; use crate::encoding::PositionEncoding;
use crate::LanguageId; use crate::LanguageId;
#[test]
fn test_with_db_read() {
// Read-only access works
let workspace = Workspace::new();
let result = workspace.with_db(|db| {
// Can perform read operations
db.has_file(&PathBuf::from("test.py"))
});
assert!(!result); // File doesn't exist yet
}
#[test]
fn test_with_db_mut() {
// Mutation access works
let mut workspace = Workspace::new();
// Create a file through mutation
workspace.with_db_mut(|db| {
let path = PathBuf::from("test.py");
let _file = db.get_or_create_file(&path);
});
// Verify it exists
let exists = workspace.with_db(|db| db.has_file(&PathBuf::from("test.py")));
assert!(exists);
}
#[test]
fn test_multiple_reads() {
// Multiple with_db calls work correctly with Clone pattern
let workspace = Workspace::new();
// Multiple reads work fine
let result1 = workspace.with_db(|db| db.has_file(&PathBuf::from("file1.py")));
let result2 = workspace.with_db(|db| db.has_file(&PathBuf::from("file2.py")));
// Both should return false since no files were created
assert!(!result1);
assert!(!result2);
}
#[test]
fn test_sequential_mutations() {
// Multiple with_db_mut calls work in sequence
let mut workspace = Workspace::new();
// First mutation
workspace.with_db_mut(|db| {
let _file = db.get_or_create_file(&PathBuf::from("first.py"));
});
// Second mutation
workspace.with_db_mut(|db| {
let _file = db.get_or_create_file(&PathBuf::from("second.py"));
});
// Both files should exist
let (has_first, has_second) = workspace.with_db(|db| {
(
db.has_file(&PathBuf::from("first.py")),
db.has_file(&PathBuf::from("second.py")),
)
});
assert!(has_first);
assert!(has_second);
}
#[test] #[test]
fn test_open_document() { fn test_open_document() {
// Open doc → appears in buffers → queryable via db
let mut workspace = Workspace::new(); let mut workspace = Workspace::new();
let url = Url::parse("file:///test.py").unwrap(); let url = Url::parse("file:///test.py").unwrap();
@ -250,19 +135,10 @@ mod tests {
// Should be in buffers // Should be in buffers
assert!(workspace.buffers.get(&url).is_some()); assert!(workspace.buffers.get(&url).is_some());
// Should be queryable through database
let content = workspace.with_db_mut(|db| {
let file = db.get_or_create_file(&PathBuf::from("/test.py"));
source_text(db, file).to_string()
});
assert_eq!(content, "print('hello')");
} }
#[test] #[test]
fn test_update_document() { fn test_update_document() {
// Update changes buffer content
let mut workspace = Workspace::new(); let mut workspace = Workspace::new();
let url = Url::parse("file:///test.py").unwrap(); let url = Url::parse("file:///test.py").unwrap();
@ -286,7 +162,6 @@ mod tests {
#[test] #[test]
fn test_close_document() { fn test_close_document() {
// Close removes from buffers
let mut workspace = Workspace::new(); let mut workspace = Workspace::new();
let url = Url::parse("file:///test.py").unwrap(); let url = Url::parse("file:///test.py").unwrap();
@ -303,8 +178,7 @@ mod tests {
} }
#[test] #[test]
fn test_buffer_takes_precedence_over_disk() { fn test_file_system_checks_buffers_first() {
// Open doc content overrides file system
let temp_dir = tempdir().unwrap(); let temp_dir = tempdir().unwrap();
let file_path = temp_dir.path().join("test.py"); let file_path = temp_dir.path().join("test.py");
std::fs::write(&file_path, "disk content").unwrap(); std::fs::write(&file_path, "disk content").unwrap();
@ -316,57 +190,8 @@ mod tests {
let document = TextDocument::new("buffer content".to_string(), 1, LanguageId::Python); let document = TextDocument::new("buffer content".to_string(), 1, LanguageId::Python);
workspace.open_document(&url, document); workspace.open_document(&url, document);
// Database should return buffer content, not disk content // File system should return buffer content, not disk content
let content = workspace.with_db_mut(|db| { let content = workspace.file_system().read_to_string(&file_path).unwrap();
let file = db.get_or_create_file(&file_path);
source_text(db, file).to_string()
});
assert_eq!(content, "buffer content"); assert_eq!(content, "buffer content");
} }
#[test]
fn test_missing_file_returns_empty() {
// Non-existent files return "" not error
let mut workspace = Workspace::new();
let content = workspace.with_db_mut(|db| {
let file = db.get_or_create_file(&PathBuf::from("/nonexistent.py"));
source_text(db, file).to_string()
});
assert_eq!(content, "");
}
#[test]
fn test_file_invalidation_on_touch() {
// touch_file triggers Salsa recomputation
let temp_dir = tempdir().unwrap();
let file_path = temp_dir.path().join("test.py");
std::fs::write(&file_path, "version 1").unwrap();
let mut workspace = Workspace::new();
// First read
let content1 = workspace.with_db_mut(|db| {
let file = db.get_or_create_file(&file_path);
source_text(db, file).to_string()
});
assert_eq!(content1, "version 1");
// Update file on disk
std::fs::write(&file_path, "version 2").unwrap();
// Touch to invalidate
workspace.with_db_mut(|db| {
db.touch_file(&file_path);
});
// Should read new content
let content2 = workspace.with_db_mut(|db| {
let file = db.get_or_create_file(&file_path);
source_text(db, file).to_string()
});
assert_eq!(content2, "version 2");
}
} }