mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-09-11 04:46:38 +00:00
wip
This commit is contained in:
parent
541200cbb1
commit
3131470cce
19 changed files with 761 additions and 430 deletions
|
@ -5,6 +5,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
djls-templates = { workspace = true }
|
||||
djls-project = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
|
@ -12,6 +13,7 @@ dashmap = { workspace = true }
|
|||
notify = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tower-lsp-server = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -4,15 +4,20 @@
|
|||
//! It ensures we only touch Salsa when content or classification changes, maximizing
|
||||
//! incremental performance.
|
||||
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::Setter;
|
||||
|
||||
use super::{
|
||||
db::{parse_template, template_errors, Database, SourceFile, TemplateAst, TemplateLoaderOrder},
|
||||
vfs::{FileKind, VfsSnapshot},
|
||||
FileId,
|
||||
};
|
||||
use super::db::parse_template;
|
||||
use super::db::template_errors;
|
||||
use super::db::Database;
|
||||
use super::db::SourceFile;
|
||||
use super::db::TemplateAst;
|
||||
use super::db::TemplateLoaderOrder;
|
||||
use super::vfs::FileKind;
|
||||
use super::vfs::VfsSnapshot;
|
||||
use super::FileId;
|
||||
|
||||
/// Owner of the Salsa [`Database`] plus the handles for updating inputs.
|
||||
///
|
||||
|
@ -63,7 +68,7 @@ impl FileStore {
|
|||
///
|
||||
/// The method is idempotent and minimizes Salsa invalidations by checking for
|
||||
/// actual changes before updating inputs.
|
||||
pub fn apply_vfs_snapshot(&mut self, snap: &VfsSnapshot) {
|
||||
pub(crate) fn apply_vfs_snapshot(&mut self, snap: &VfsSnapshot) {
|
||||
for (id, rec) in &snap.files {
|
||||
let new_text = snap.get_text(*id).unwrap_or_else(|| Arc::<str>::from(""));
|
||||
let new_kind = rec.meta.kind;
|
||||
|
@ -86,14 +91,14 @@ impl FileStore {
|
|||
/// Get the text content of a file by its [`FileId`].
|
||||
///
|
||||
/// Returns `None` if the file is not tracked in the [`FileStore`].
|
||||
pub fn file_text(&self, id: FileId) -> Option<Arc<str>> {
|
||||
pub(crate) fn file_text(&self, id: FileId) -> Option<Arc<str>> {
|
||||
self.files.get(&id).map(|sf| sf.text(&self.db).clone())
|
||||
}
|
||||
|
||||
/// Get the file kind classification by its [`FileId`].
|
||||
///
|
||||
/// Returns `None` if the file is not tracked in the [`FileStore`].
|
||||
pub fn file_kind(&self, id: FileId) -> Option<FileKind> {
|
||||
pub(crate) fn file_kind(&self, id: FileId) -> Option<FileKind> {
|
||||
self.files.get(&id).map(|sf| sf.kind(&self.db))
|
||||
}
|
||||
|
||||
|
@ -102,7 +107,7 @@ impl FileStore {
|
|||
/// This method leverages Salsa's incremental computation to cache parsed ASTs.
|
||||
/// The AST is only re-parsed when the file's content changes in the VFS.
|
||||
/// Returns `None` if the file is not tracked or is not a template file.
|
||||
pub fn get_template_ast(&self, id: FileId) -> Option<Arc<TemplateAst>> {
|
||||
pub(crate) fn get_template_ast(&self, id: FileId) -> Option<Arc<TemplateAst>> {
|
||||
let source_file = self.files.get(&id)?;
|
||||
parse_template(&self.db, *source_file)
|
||||
}
|
||||
|
@ -112,7 +117,7 @@ impl FileStore {
|
|||
/// This method provides quick access to template errors without needing the full AST.
|
||||
/// Useful for diagnostics and error reporting. Returns an empty slice for
|
||||
/// non-template files or files not tracked in the store.
|
||||
pub fn get_template_errors(&self, id: FileId) -> Arc<[String]> {
|
||||
pub(crate) fn get_template_errors(&self, id: FileId) -> Arc<[String]> {
|
||||
self.files
|
||||
.get(&id)
|
||||
.map_or_else(|| Arc::from(vec![]), |sf| template_errors(&self.db, *sf))
|
||||
|
@ -127,10 +132,12 @@ impl Default for FileStore {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::vfs::{TextSource, Vfs};
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
use super::*;
|
||||
use crate::vfs::TextSource;
|
||||
use crate::vfs::Vfs;
|
||||
|
||||
#[test]
|
||||
fn test_filestore_template_ast_caching() {
|
||||
let mut store = FileStore::new();
|
||||
|
|
|
@ -7,6 +7,8 @@ use std::sync::Arc;
|
|||
#[cfg(test)]
|
||||
use std::sync::Mutex;
|
||||
|
||||
use djls_templates::Ast;
|
||||
|
||||
use crate::vfs::FileKind;
|
||||
|
||||
/// Salsa database root for workspace
|
||||
|
@ -139,9 +141,10 @@ pub fn template_errors(db: &dyn salsa::Database, file: SourceFile) -> Arc<[Strin
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use salsa::Setter;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_template_parsing_caches_result() {
|
||||
let db = Database::default();
|
||||
|
|
34
crates/djls-workspace/src/document/language.rs
Normal file
34
crates/djls-workspace/src/document/language.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use crate::vfs::FileKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum LanguageId {
|
||||
HtmlDjango,
|
||||
Other,
|
||||
Python,
|
||||
}
|
||||
|
||||
impl From<&str> for LanguageId {
|
||||
fn from(language_id: &str) -> Self {
|
||||
match language_id {
|
||||
"django-html" | "htmldjango" => Self::HtmlDjango,
|
||||
"python" => Self::Python,
|
||||
_ => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for LanguageId {
|
||||
fn from(language_id: String) -> Self {
|
||||
Self::from(language_id.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageId> for FileKind {
|
||||
fn from(language_id: LanguageId) -> Self {
|
||||
match language_id {
|
||||
LanguageId::Python => Self::Python,
|
||||
LanguageId::HtmlDjango => Self::Template,
|
||||
LanguageId::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
87
crates/djls-workspace/src/document/line_index.rs
Normal file
87
crates/djls-workspace/src/document/line_index.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
use tower_lsp_server::lsp_types::Position;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LineIndex {
|
||||
pub line_starts: Vec<u32>,
|
||||
pub line_starts_utf16: Vec<u32>,
|
||||
pub length: u32,
|
||||
pub length_utf16: u32,
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> Self {
|
||||
let mut line_starts = vec![0];
|
||||
let mut line_starts_utf16 = vec![0];
|
||||
let mut pos_utf8 = 0;
|
||||
let mut pos_utf16 = 0;
|
||||
|
||||
for c in text.chars() {
|
||||
pos_utf8 += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
pos_utf16 += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
if c == '\n' {
|
||||
line_starts.push(pos_utf8);
|
||||
line_starts_utf16.push(pos_utf16);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
line_starts,
|
||||
line_starts_utf16,
|
||||
length: pos_utf8,
|
||||
length_utf16: pos_utf16,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self, position: Position) -> Option<u32> {
|
||||
let line_start = self.line_starts.get(position.line as usize)?;
|
||||
|
||||
Some(line_start + position.character)
|
||||
}
|
||||
|
||||
/// Convert UTF-16 LSP position to UTF-8 byte offset
|
||||
pub fn offset_utf16(&self, position: Position, text: &str) -> Option<u32> {
|
||||
let line_start_utf8 = self.line_starts.get(position.line as usize)?;
|
||||
let _line_start_utf16 = self.line_starts_utf16.get(position.line as usize)?;
|
||||
|
||||
// If position is at start of line, return UTF-8 line start
|
||||
if position.character == 0 {
|
||||
return Some(*line_start_utf8);
|
||||
}
|
||||
|
||||
// Find the line text
|
||||
let next_line_start = self
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.length);
|
||||
|
||||
let line_text = text.get(*line_start_utf8 as usize..next_line_start as usize)?;
|
||||
|
||||
// Convert UTF-16 character offset to UTF-8 byte offset within the line
|
||||
let mut utf16_pos = 0;
|
||||
let mut utf8_pos = 0;
|
||||
|
||||
for c in line_text.chars() {
|
||||
if utf16_pos >= position.character {
|
||||
break;
|
||||
}
|
||||
utf16_pos += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
utf8_pos += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
}
|
||||
|
||||
Some(line_start_utf8 + utf8_pos)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn position(&self, offset: u32) -> Position {
|
||||
let line = match self.line_starts.binary_search(&offset) {
|
||||
Ok(line) => line,
|
||||
Err(line) => line - 1,
|
||||
};
|
||||
|
||||
let line_start = self.line_starts[line];
|
||||
let character = offset - line_start;
|
||||
|
||||
Position::new(u32::try_from(line).unwrap_or(0), character)
|
||||
}
|
||||
}
|
132
crates/djls-workspace/src/document/mod.rs
Normal file
132
crates/djls-workspace/src/document/mod.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
mod language;
|
||||
mod line_index;
|
||||
mod store;
|
||||
mod template;
|
||||
|
||||
pub use language::LanguageId;
|
||||
pub use line_index::LineIndex;
|
||||
pub use store::DocumentStore;
|
||||
pub use template::ClosingBrace;
|
||||
pub use template::TemplateTagContext;
|
||||
use tower_lsp_server::lsp_types::Position;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
use crate::FileId;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TextDocument {
|
||||
pub uri: String,
|
||||
pub version: i32,
|
||||
pub language_id: LanguageId,
|
||||
pub(crate) file_id: FileId,
|
||||
line_index: LineIndex,
|
||||
}
|
||||
|
||||
impl TextDocument {
|
||||
pub(crate) fn new(
|
||||
uri: String,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
file_id: FileId,
|
||||
content: &str,
|
||||
) -> Self {
|
||||
let line_index = LineIndex::new(content);
|
||||
Self {
|
||||
uri,
|
||||
version,
|
||||
language_id,
|
||||
file_id,
|
||||
line_index,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> &LineIndex {
|
||||
&self.line_index
|
||||
}
|
||||
|
||||
pub fn get_content<'a>(&self, content: &'a str) -> &'a str {
|
||||
content
|
||||
}
|
||||
|
||||
pub fn get_line(&self, content: &str, line: u32) -> Option<String> {
|
||||
let line_start = *self.line_index.line_starts.get(line as usize)?;
|
||||
let line_end = self
|
||||
.line_index
|
||||
.line_starts
|
||||
.get(line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.line_index.length);
|
||||
|
||||
Some(content[line_start as usize..line_end as usize].to_string())
|
||||
}
|
||||
|
||||
pub fn get_text_range(&self, content: &str, range: Range) -> Option<String> {
|
||||
let start_offset = self.line_index.offset(range.start)? as usize;
|
||||
let end_offset = self.line_index.offset(range.end)? as usize;
|
||||
|
||||
Some(content[start_offset..end_offset].to_string())
|
||||
}
|
||||
|
||||
pub fn get_template_tag_context(
|
||||
&self,
|
||||
content: &str,
|
||||
position: Position,
|
||||
) -> Option<TemplateTagContext> {
|
||||
let start = self.line_index.line_starts.get(position.line as usize)?;
|
||||
let end = self
|
||||
.line_index
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.line_index.length);
|
||||
|
||||
let line = &content[*start as usize..end as usize];
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
let prefix = &line[..char_pos];
|
||||
let rest_of_line = &line[char_pos..];
|
||||
let rest_trimmed = rest_of_line.trim_start();
|
||||
|
||||
prefix.rfind("{%").map(|tag_start| {
|
||||
// Check if we're immediately after {% with no space
|
||||
let needs_leading_space = prefix.ends_with("{%");
|
||||
|
||||
let closing_brace = if rest_trimmed.starts_with("%}") {
|
||||
ClosingBrace::FullClose
|
||||
} else if rest_trimmed.starts_with('}') {
|
||||
ClosingBrace::PartialClose
|
||||
} else {
|
||||
ClosingBrace::None
|
||||
};
|
||||
|
||||
TemplateTagContext {
|
||||
partial_tag: prefix[tag_start + 2..].trim().to_string(),
|
||||
closing_brace,
|
||||
needs_leading_space,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn position_to_offset(&self, position: Position) -> Option<u32> {
|
||||
self.line_index.offset(position)
|
||||
}
|
||||
|
||||
pub fn offset_to_position(&self, offset: u32) -> Position {
|
||||
self.line_index.position(offset)
|
||||
}
|
||||
|
||||
pub fn update_content(&mut self, content: &str) {
|
||||
self.line_index = LineIndex::new(content);
|
||||
}
|
||||
|
||||
pub fn version(&self) -> i32 {
|
||||
self.version
|
||||
}
|
||||
|
||||
pub fn language_id(&self) -> LanguageId {
|
||||
self.language_id.clone()
|
||||
}
|
||||
}
|
722
crates/djls-workspace/src/document/store.rs
Normal file
722
crates/djls-workspace/src/document/store.rs
Normal file
|
@ -0,0 +1,722 @@
|
|||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use djls_project::TemplateTags;
|
||||
use tower_lsp_server::lsp_types::CompletionItem;
|
||||
use tower_lsp_server::lsp_types::CompletionItemKind;
|
||||
use tower_lsp_server::lsp_types::CompletionResponse;
|
||||
use tower_lsp_server::lsp_types::Diagnostic;
|
||||
use tower_lsp_server::lsp_types::DiagnosticSeverity;
|
||||
use tower_lsp_server::lsp_types::DidChangeTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::DidCloseTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::Documentation;
|
||||
use tower_lsp_server::lsp_types::InsertTextFormat;
|
||||
use tower_lsp_server::lsp_types::MarkupContent;
|
||||
use tower_lsp_server::lsp_types::MarkupKind;
|
||||
use tower_lsp_server::lsp_types::Position;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
|
||||
|
||||
use crate::bridge::FileStore;
|
||||
use crate::db::TemplateAst;
|
||||
use crate::vfs::FileKind;
|
||||
use crate::vfs::TextSource;
|
||||
use crate::vfs::Vfs;
|
||||
use crate::ClosingBrace;
|
||||
use crate::LanguageId;
|
||||
use crate::LineIndex;
|
||||
use crate::TextDocument;
|
||||
|
||||
pub struct DocumentStore {
|
||||
vfs: Arc<Vfs>,
|
||||
file_store: Arc<Mutex<FileStore>>,
|
||||
documents: HashMap<String, TextDocument>,
|
||||
}
|
||||
|
||||
impl Default for DocumentStore {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
vfs: Arc::new(Vfs::default()),
|
||||
file_store: Arc::new(Mutex::new(FileStore::new())),
|
||||
documents: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentStore {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Open a document with the given URI, version, language, and text content.
|
||||
/// This creates a new TextDocument and stores it internally, hiding VFS details.
|
||||
pub fn open_document(
|
||||
&mut self,
|
||||
uri: url::Url,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
text: String,
|
||||
) -> Result<()> {
|
||||
let uri_str = uri.to_string();
|
||||
let kind = FileKind::from(language_id.clone());
|
||||
|
||||
// Convert URI to path - simplified for now, just use URI string
|
||||
let path = Utf8PathBuf::from(uri.as_str());
|
||||
|
||||
// Store content in VFS
|
||||
let text_source = TextSource::Overlay(Arc::from(text.as_str()));
|
||||
let file_id = self.vfs.intern_file(uri, path, kind, text_source);
|
||||
|
||||
// Set overlay content in VFS
|
||||
self.vfs.set_overlay(file_id, Arc::from(text.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Create TextDocument with LineIndex
|
||||
let document = TextDocument::new(uri_str.clone(), version, language_id, file_id, &text);
|
||||
self.documents.insert(uri_str, document);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update a document with the given URI, version, and text changes.
|
||||
/// This applies changes to the document and updates the VFS accordingly.
|
||||
pub fn update_document(
|
||||
&mut self,
|
||||
uri: &url::Url,
|
||||
version: i32,
|
||||
changes: Vec<TextDocumentContentChangeEvent>,
|
||||
) -> Result<()> {
|
||||
let uri_str = uri.to_string();
|
||||
|
||||
// Get document and file_id from the documents HashMap
|
||||
let document = self
|
||||
.documents
|
||||
.get(&uri_str)
|
||||
.ok_or_else(|| anyhow!("Document not found: {}", uri_str))?;
|
||||
let file_id = document.file_id();
|
||||
|
||||
// Get current content from VFS
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let current_content = snapshot
|
||||
.get_text(file_id)
|
||||
.ok_or_else(|| anyhow!("File content not found: {}", uri_str))?;
|
||||
|
||||
// Get line index from the document
|
||||
let line_index = document.line_index();
|
||||
|
||||
// Apply text changes using the existing function
|
||||
let new_content = apply_text_changes(¤t_content, &changes, line_index)?;
|
||||
|
||||
// Update TextDocument version and content
|
||||
if let Some(document) = self.documents.get_mut(&uri_str) {
|
||||
document.version = version;
|
||||
document.update_content(&new_content);
|
||||
}
|
||||
|
||||
// Update VFS with new content
|
||||
self.vfs
|
||||
.set_overlay(file_id, Arc::from(new_content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_did_open(&mut self, params: &DidOpenTextDocumentParams) -> Result<()> {
|
||||
let uri_str = params.text_document.uri.to_string();
|
||||
let uri = params.text_document.uri.clone();
|
||||
let version = params.text_document.version;
|
||||
let content = params.text_document.text.clone();
|
||||
let language_id = LanguageId::from(params.text_document.language_id.as_str());
|
||||
let kind = FileKind::from(language_id.clone());
|
||||
|
||||
// Convert URI to Url for VFS
|
||||
let vfs_url =
|
||||
url::Url::parse(&uri.to_string()).map_err(|e| anyhow!("Invalid URI: {}", e))?;
|
||||
|
||||
// Convert to path - simplified for now, just use URI string
|
||||
let path = Utf8PathBuf::from(uri.as_str());
|
||||
|
||||
// Store content in VFS
|
||||
let text_source = TextSource::Overlay(Arc::from(content.as_str()));
|
||||
let file_id = self.vfs.intern_file(vfs_url, path, kind, text_source);
|
||||
|
||||
// Set overlay content in VFS
|
||||
self.vfs.set_overlay(file_id, Arc::from(content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Create TextDocument metadata
|
||||
let document = TextDocument::new(
|
||||
uri_str.clone(),
|
||||
version,
|
||||
language_id.clone(),
|
||||
file_id,
|
||||
&content,
|
||||
);
|
||||
self.documents.insert(uri_str, document);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_did_change(&mut self, params: &DidChangeTextDocumentParams) -> Result<()> {
|
||||
let uri_str = params.text_document.uri.as_str().to_string();
|
||||
let version = params.text_document.version;
|
||||
|
||||
// Get document and file_id from the documents HashMap
|
||||
let document = self
|
||||
.documents
|
||||
.get(&uri_str)
|
||||
.ok_or_else(|| anyhow!("Document not found: {}", uri_str))?;
|
||||
let file_id = document.file_id();
|
||||
|
||||
// Get current content from VFS
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let current_content = snapshot
|
||||
.get_text(file_id)
|
||||
.ok_or_else(|| anyhow!("File content not found: {}", uri_str))?;
|
||||
|
||||
// Get line index from the document (TextDocument now stores its own LineIndex)
|
||||
let line_index = document.line_index();
|
||||
|
||||
// Apply text changes using the new function
|
||||
let new_content =
|
||||
apply_text_changes(¤t_content, ¶ms.content_changes, line_index)?;
|
||||
|
||||
// Update TextDocument version and content
|
||||
if let Some(document) = self.documents.get_mut(&uri_str) {
|
||||
document.version = version;
|
||||
document.update_content(&new_content);
|
||||
}
|
||||
|
||||
// Update VFS with new content
|
||||
self.vfs
|
||||
.set_overlay(file_id, Arc::from(new_content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Close a document with the given URI.
|
||||
/// This removes the document from internal storage and cleans up resources.
|
||||
pub fn close_document(&mut self, uri: &url::Url) {
|
||||
let uri_str = uri.as_str();
|
||||
|
||||
// Remove TextDocument metadata
|
||||
self.documents.remove(uri_str);
|
||||
|
||||
// Note: We don't remove from VFS as it might be useful for caching
|
||||
// The VFS will handle cleanup internally
|
||||
}
|
||||
|
||||
pub fn handle_did_close(&mut self, params: &DidCloseTextDocumentParams) {
|
||||
let uri_str = params.text_document.uri.as_str();
|
||||
|
||||
// Remove TextDocument metadata
|
||||
self.documents.remove(uri_str);
|
||||
|
||||
// Note: We don't remove from VFS as it might be useful for caching
|
||||
// The VFS will handle cleanup internally
|
||||
}
|
||||
|
||||
pub fn get_line_index(&self, uri: &str) -> Option<&LineIndex> {
|
||||
self.documents.get(uri).map(|doc| doc.line_index())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_version(&self, uri: &str) -> Option<i32> {
|
||||
self.documents.get(uri).map(|doc| doc.version())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn is_version_valid(&self, uri: &str, version: i32) -> bool {
|
||||
self.get_version(uri) == Some(version)
|
||||
}
|
||||
|
||||
// TextDocument helper methods
|
||||
pub fn get_document(&self, uri: &str) -> Option<&TextDocument> {
|
||||
self.documents.get(uri)
|
||||
}
|
||||
|
||||
pub fn get_document_mut(&mut self, uri: &str) -> Option<&mut TextDocument> {
|
||||
self.documents.get_mut(uri)
|
||||
}
|
||||
|
||||
// URI-based query methods (new API)
|
||||
pub fn get_document_by_url(&self, uri: &url::Url) -> Option<&TextDocument> {
|
||||
self.get_document(uri.as_str())
|
||||
}
|
||||
|
||||
pub fn get_document_text(&self, uri: &url::Url) -> Option<Arc<str>> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let file_id = document.file_id();
|
||||
let snapshot = self.vfs.snapshot();
|
||||
snapshot.get_text(file_id)
|
||||
}
|
||||
|
||||
pub fn get_line_text(&self, uri: &url::Url, line: u32) -> Option<String> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let content = snapshot.get_text(document.file_id())?;
|
||||
document.get_line(content.as_ref(), line)
|
||||
}
|
||||
|
||||
pub fn get_word_at_position(&self, uri: &url::Url, position: Position) -> Option<String> {
|
||||
// This is a simplified implementation - get the line and extract word at position
|
||||
let line_text = self.get_line_text(uri, position.line)?;
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
|
||||
if char_pos >= line_text.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find word boundaries (simplified - considers alphanumeric and underscore as word chars)
|
||||
let line_bytes = line_text.as_bytes();
|
||||
let mut start = char_pos;
|
||||
let mut end = char_pos;
|
||||
|
||||
// Find start of word
|
||||
while start > 0 && is_word_char(line_bytes[start - 1]) {
|
||||
start -= 1;
|
||||
}
|
||||
|
||||
// Find end of word
|
||||
while end < line_text.len() && is_word_char(line_bytes[end]) {
|
||||
end += 1;
|
||||
}
|
||||
|
||||
if start < end {
|
||||
Some(line_text[start..end].to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// Position mapping methods
|
||||
pub fn offset_to_position(&self, uri: &url::Url, offset: usize) -> Option<Position> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
Some(document.offset_to_position(offset as u32))
|
||||
}
|
||||
|
||||
pub fn position_to_offset(&self, uri: &url::Url, position: Position) -> Option<usize> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
document
|
||||
.position_to_offset(position)
|
||||
.map(|offset| offset as usize)
|
||||
}
|
||||
|
||||
// Template-specific methods
|
||||
pub fn get_template_ast(&self, uri: &url::Url) -> Option<Arc<TemplateAst>> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
file_store.get_template_ast(file_id)
|
||||
}
|
||||
|
||||
pub fn get_template_errors(&self, uri: &url::Url) -> Vec<String> {
|
||||
let document = match self.get_document_by_url(uri) {
|
||||
Some(doc) => doc,
|
||||
None => return vec![],
|
||||
};
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
let errors = file_store.get_template_errors(file_id);
|
||||
errors.to_vec()
|
||||
}
|
||||
|
||||
pub fn get_template_context(
|
||||
&self,
|
||||
uri: &url::Url,
|
||||
position: Position,
|
||||
) -> Option<crate::TemplateTagContext> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let content = snapshot.get_text(document.file_id())?;
|
||||
document.get_template_tag_context(content.as_ref(), position)
|
||||
}
|
||||
|
||||
pub fn get_completions(
|
||||
&self,
|
||||
uri: &str,
|
||||
position: Position,
|
||||
tags: &TemplateTags,
|
||||
) -> Option<CompletionResponse> {
|
||||
// Check if this is a Django template using TextDocument metadata
|
||||
let document = self.get_document(uri)?;
|
||||
if document.language_id() != LanguageId::HtmlDjango {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Try to get cached AST from FileStore for better context analysis
|
||||
// This demonstrates using the cached AST, though we still fall back to string parsing
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
if let Some(_ast) = file_store.get_template_ast(file_id) {
|
||||
// TODO: In a future enhancement, we could use the AST to provide
|
||||
// more intelligent completions based on the current node context
|
||||
// For now, we continue with the existing string-based approach
|
||||
}
|
||||
|
||||
// Get template tag context from document
|
||||
let vfs_snapshot = self.vfs.snapshot();
|
||||
let text_content = vfs_snapshot.get_text(file_id)?;
|
||||
let content = text_content.as_ref();
|
||||
let context = document.get_template_tag_context(content, position)?;
|
||||
|
||||
let mut completions: Vec<CompletionItem> = tags
|
||||
.iter()
|
||||
.filter(|tag| {
|
||||
context.partial_tag.is_empty() || tag.name().starts_with(&context.partial_tag)
|
||||
})
|
||||
.map(|tag| {
|
||||
let leading_space = if context.needs_leading_space { " " } else { "" };
|
||||
CompletionItem {
|
||||
label: tag.name().to_string(),
|
||||
kind: Some(CompletionItemKind::KEYWORD),
|
||||
detail: Some(format!("Template tag from {}", tag.library())),
|
||||
documentation: tag.doc().as_ref().map(|doc| {
|
||||
Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: (*doc).to_string(),
|
||||
})
|
||||
}),
|
||||
insert_text: Some(match context.closing_brace {
|
||||
ClosingBrace::None => format!("{}{} %}}", leading_space, tag.name()),
|
||||
ClosingBrace::PartialClose => format!("{}{} %", leading_space, tag.name()),
|
||||
ClosingBrace::FullClose => format!("{}{} ", leading_space, tag.name()),
|
||||
}),
|
||||
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
completions.sort_by(|a, b| a.label.cmp(&b.label));
|
||||
Some(CompletionResponse::Array(completions))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get template parsing diagnostics for a file.
|
||||
///
|
||||
/// This method uses the cached template errors from Salsa to generate LSP diagnostics.
|
||||
/// The errors are only re-computed when the file content changes, providing efficient
|
||||
/// incremental error reporting.
|
||||
pub fn get_template_diagnostics(&self, uri: &str) -> Vec<Diagnostic> {
|
||||
let Some(document) = self.get_document(uri) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// Only process template files
|
||||
if document.language_id() != LanguageId::HtmlDjango {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let file_id = document.file_id();
|
||||
let Some(_line_index) = self.get_line_index(uri) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// Get cached template errors from FileStore
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
let errors = file_store.get_template_errors(file_id);
|
||||
|
||||
// Convert template errors to LSP diagnostics
|
||||
errors
|
||||
.iter()
|
||||
.map(|error| {
|
||||
// For now, we'll place all errors at the start of the file
|
||||
// In a future enhancement, we could use error spans for precise locations
|
||||
let range = Range {
|
||||
start: Position {
|
||||
line: 0,
|
||||
character: 0,
|
||||
},
|
||||
end: Position {
|
||||
line: 0,
|
||||
character: 0,
|
||||
},
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
source: Some("djls-templates".to_string()),
|
||||
message: error.clone(),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a byte represents a word character (alphanumeric or underscore)
|
||||
fn is_word_char(byte: u8) -> bool {
|
||||
byte.is_ascii_alphanumeric() || byte == b'_'
|
||||
}
|
||||
|
||||
/// Apply text changes to content, handling multiple changes correctly
|
||||
fn apply_text_changes(
|
||||
content: &str,
|
||||
changes: &[TextDocumentContentChangeEvent],
|
||||
line_index: &LineIndex,
|
||||
) -> Result<String> {
|
||||
if changes.is_empty() {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
// Check for full document replacement first
|
||||
for change in changes {
|
||||
if change.range.is_none() {
|
||||
return Ok(change.text.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Sort changes by start position in reverse order (end to start)
|
||||
let mut sorted_changes = changes.to_vec();
|
||||
sorted_changes.sort_by(|a, b| {
|
||||
match (a.range, b.range) {
|
||||
(Some(range_a), Some(range_b)) => {
|
||||
// Primary sort: by line (reverse)
|
||||
let line_cmp = range_b.start.line.cmp(&range_a.start.line);
|
||||
if line_cmp == std::cmp::Ordering::Equal {
|
||||
// Secondary sort: by character (reverse)
|
||||
range_b.start.character.cmp(&range_a.start.character)
|
||||
} else {
|
||||
line_cmp
|
||||
}
|
||||
}
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
}
|
||||
});
|
||||
|
||||
let mut result = content.to_string();
|
||||
|
||||
for change in &sorted_changes {
|
||||
if let Some(range) = change.range {
|
||||
// Convert UTF-16 positions to UTF-8 offsets
|
||||
let start_offset = line_index
|
||||
.offset_utf16(range.start, &result)
|
||||
.ok_or_else(|| anyhow!("Invalid start position: {:?}", range.start))?;
|
||||
let end_offset = line_index
|
||||
.offset_utf16(range.end, &result)
|
||||
.ok_or_else(|| anyhow!("Invalid end position: {:?}", range.end))?;
|
||||
|
||||
if start_offset as usize > result.len() || end_offset as usize > result.len() {
|
||||
return Err(anyhow!(
|
||||
"Offset out of bounds: start={}, end={}, len={}",
|
||||
start_offset,
|
||||
end_offset,
|
||||
result.len()
|
||||
));
|
||||
}
|
||||
|
||||
// Apply the change
|
||||
result.replace_range(start_offset as usize..end_offset as usize, &change.text);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_apply_single_character_insertion() {
|
||||
let content = "Hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 6), Position::new(0, 6))),
|
||||
range_length: None,
|
||||
text: "beautiful ".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "Hello beautiful world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_single_character_deletion() {
|
||||
let content = "Hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 5), Position::new(0, 6))),
|
||||
range_length: None,
|
||||
text: String::new(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "Helloworld");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_multiple_changes_in_reverse_order() {
|
||||
let content = "line 1\nline 2\nline 3";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert "new " at position (1, 0) and "another " at position (0, 0)
|
||||
let changes = vec![
|
||||
TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(0, 0))),
|
||||
range_length: None,
|
||||
text: "another ".to_string(),
|
||||
},
|
||||
TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(1, 0), Position::new(1, 0))),
|
||||
range_length: None,
|
||||
text: "new ".to_string(),
|
||||
},
|
||||
];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "another line 1\nnew line 2\nline 3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_multiline_replacement() {
|
||||
let content = "line 1\nline 2\nline 3";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(2, 6))),
|
||||
range_length: None,
|
||||
text: "completely new content".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "completely new content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_full_document_replacement() {
|
||||
let content = "old content";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: "brand new content".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "brand new content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_basic() {
|
||||
let content = "hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// ASCII characters should have 1:1 UTF-8:UTF-16 mapping
|
||||
let pos = Position::new(0, 6);
|
||||
let offset = line_index.offset_utf16(pos, content).unwrap();
|
||||
assert_eq!(offset, 6);
|
||||
assert_eq!(&content[6..7], "w");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_with_emoji() {
|
||||
let content = "hello 👋 world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// 👋 is 2 UTF-16 code units but 4 UTF-8 bytes
|
||||
let pos_after_emoji = Position::new(0, 8); // UTF-16 position after "hello 👋"
|
||||
let offset = line_index.offset_utf16(pos_after_emoji, content).unwrap();
|
||||
|
||||
// Should point to the space before "world"
|
||||
assert_eq!(offset, 10); // UTF-8 byte offset
|
||||
assert_eq!(&content[10..11], " ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_multiline() {
|
||||
let content = "first line\nsecond line";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let pos = Position::new(1, 7); // Position at 'l' in "line" on second line
|
||||
let offset = line_index.offset_utf16(pos, content).unwrap();
|
||||
assert_eq!(offset, 18); // 11 (first line + \n) + 7
|
||||
assert_eq!(&content[18..19], "l");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_changes_with_emoji() {
|
||||
let content = "hello 👋 world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert text after the space following the emoji (UTF-16 position 9)
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 9), Position::new(0, 9))),
|
||||
range_length: None,
|
||||
text: "beautiful ".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "hello 👋 beautiful world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_line_index_utf16_tracking() {
|
||||
let content = "a👋b";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Check UTF-16 line starts are tracked correctly
|
||||
assert_eq!(line_index.line_starts_utf16, vec![0]);
|
||||
assert_eq!(line_index.length_utf16, 4); // 'a' (1) + 👋 (2) + 'b' (1) = 4 UTF-16 units
|
||||
assert_eq!(line_index.length, 6); // 'a' (1) + 👋 (4) + 'b' (1) = 6 UTF-8 bytes
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edge_case_changes_at_boundaries() {
|
||||
let content = "abc";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert at beginning
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(0, 0))),
|
||||
range_length: None,
|
||||
text: "start".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "startabc");
|
||||
|
||||
// Insert at end
|
||||
let line_index = LineIndex::new(content);
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 3), Position::new(0, 3))),
|
||||
range_length: None,
|
||||
text: "end".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "abcend");
|
||||
}
|
||||
}
|
13
crates/djls-workspace/src/document/template.rs
Normal file
13
crates/djls-workspace/src/document/template.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
#[derive(Debug)]
|
||||
pub enum ClosingBrace {
|
||||
None,
|
||||
PartialClose, // just }
|
||||
FullClose, // %}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateTagContext {
|
||||
pub partial_tag: String,
|
||||
pub closing_brace: ClosingBrace,
|
||||
pub needs_leading_space: bool,
|
||||
}
|
|
@ -1,14 +1,14 @@
|
|||
mod bridge;
|
||||
mod db;
|
||||
mod document;
|
||||
mod vfs;
|
||||
mod watcher;
|
||||
|
||||
pub use bridge::FileStore;
|
||||
pub use db::{
|
||||
parse_template, template_errors, Database, SourceFile, TemplateAst, TemplateLoaderOrder,
|
||||
};
|
||||
pub use vfs::{FileKind, FileMeta, FileRecord, Revision, TextSource, Vfs, VfsSnapshot};
|
||||
pub use watcher::{VfsWatcher, WatchConfig, WatchEvent};
|
||||
pub use document::ClosingBrace;
|
||||
pub use document::DocumentStore;
|
||||
pub use document::LanguageId;
|
||||
pub use document::LineIndex;
|
||||
pub use document::TemplateTagContext;
|
||||
pub use document::TextDocument;
|
||||
|
||||
/// Stable, compact identifier for files across the subsystem.
|
||||
///
|
||||
|
@ -16,7 +16,7 @@ pub use watcher::{VfsWatcher, WatchConfig, WatchEvent};
|
|||
/// Salsa inputs. Once assigned to a file (via its URI), a [`FileId`] remains stable for the
|
||||
/// lifetime of the VFS, even if the file's content or metadata changes.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
pub struct FileId(u32);
|
||||
pub(crate) struct FileId(u32);
|
||||
|
||||
impl FileId {
|
||||
/// Create a [`FileId`] from a raw u32 value.
|
||||
|
|
|
@ -4,25 +4,28 @@
|
|||
//! and snapshotting. Downstream systems consume snapshots to avoid locking and to
|
||||
//! batch updates.
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
mod watcher;
|
||||
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::sync::atomic::AtomicU32;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use dashmap::DashMap;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
atomic::{AtomicU32, AtomicU64, Ordering},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use url::Url;
|
||||
use watcher::VfsWatcher;
|
||||
use watcher::WatchConfig;
|
||||
use watcher::WatchEvent;
|
||||
|
||||
use super::{
|
||||
watcher::{VfsWatcher, WatchConfig, WatchEvent},
|
||||
FileId,
|
||||
};
|
||||
use super::FileId;
|
||||
|
||||
/// Monotonic counter representing global VFS state.
|
||||
///
|
||||
|
@ -30,18 +33,18 @@ use super::{
|
|||
/// This provides a cheap way to detect if any changes have occurred since
|
||||
/// a previous snapshot was taken.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default, PartialOrd, Ord)]
|
||||
pub struct Revision(u64);
|
||||
pub(crate) struct Revision(u64);
|
||||
|
||||
impl Revision {
|
||||
/// Create a [`Revision`] from a raw u64 value.
|
||||
#[must_use]
|
||||
pub fn from_raw(raw: u64) -> Self {
|
||||
fn from_raw(raw: u64) -> Self {
|
||||
Revision(raw)
|
||||
}
|
||||
|
||||
/// Get the underlying u64 value.
|
||||
#[must_use]
|
||||
pub fn value(self) -> u64 {
|
||||
fn value(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
@ -65,11 +68,11 @@ pub enum FileKind {
|
|||
/// [`FileMeta`] contains all non-content information about a file, including its
|
||||
/// identity (URI), filesystem path, and classification.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileMeta {
|
||||
pub(crate) struct FileMeta {
|
||||
/// The file's URI (typically file:// scheme)
|
||||
pub uri: Url,
|
||||
uri: Url,
|
||||
/// The file's path in the filesystem
|
||||
pub path: Utf8PathBuf,
|
||||
path: Utf8PathBuf,
|
||||
/// Classification for routing to analyzers
|
||||
pub kind: FileKind,
|
||||
}
|
||||
|
@ -80,7 +83,7 @@ pub struct FileMeta {
|
|||
/// debugging and understanding the current state of the VFS. All variants hold
|
||||
/// `Arc<str>` for efficient sharing.
|
||||
#[derive(Clone)]
|
||||
pub enum TextSource {
|
||||
pub(crate) enum TextSource {
|
||||
/// Content loaded from disk
|
||||
Disk(Arc<str>),
|
||||
/// Content from LSP client overlay (in-memory edits)
|
||||
|
@ -89,18 +92,47 @@ pub enum TextSource {
|
|||
Generated(Arc<str>),
|
||||
}
|
||||
|
||||
/// Content hash for efficient change detection.
|
||||
///
|
||||
/// [`FileHash`] encapsulates the hashing logic used to detect when file content
|
||||
/// has changed, avoiding unnecessary recomputation in downstream systems like Salsa.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
struct FileHash(u64);
|
||||
|
||||
impl FileHash {
|
||||
/// Compute hash from text source content.
|
||||
fn from_text_source(src: &TextSource) -> Self {
|
||||
let s: &str = match src {
|
||||
TextSource::Disk(s) | TextSource::Overlay(s) | TextSource::Generated(s) => s,
|
||||
};
|
||||
let mut h = DefaultHasher::new();
|
||||
s.hash(&mut h);
|
||||
Self(h.finish())
|
||||
}
|
||||
|
||||
/// Check if this hash differs from another, indicating content changed.
|
||||
fn differs_from(self, other: Self) -> bool {
|
||||
self.0 != other.0
|
||||
}
|
||||
|
||||
/// Get raw hash value (for debugging/logging).
|
||||
fn raw(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Complete record of a file in the VFS.
|
||||
///
|
||||
/// [`FileRecord`] combines metadata, current text content, and a content hash
|
||||
/// for efficient change detection.
|
||||
#[derive(Clone)]
|
||||
pub struct FileRecord {
|
||||
pub(crate) struct FileRecord {
|
||||
/// File metadata (URI, path, kind, version)
|
||||
pub meta: FileMeta,
|
||||
/// Current text content and its source
|
||||
pub text: TextSource,
|
||||
text: TextSource,
|
||||
/// Hash of current content for change detection
|
||||
pub hash: u64,
|
||||
hash: FileHash,
|
||||
}
|
||||
|
||||
/// Thread-safe virtual file system with change tracking.
|
||||
|
@ -129,7 +161,7 @@ impl Vfs {
|
|||
/// Returns the existing [`FileId`] if the URI is already known, or creates a new
|
||||
/// [`FileRecord`] with the provided metadata and text. This method computes and
|
||||
/// stores a content hash for change detection.
|
||||
pub fn intern_file(
|
||||
pub(crate) fn intern_file(
|
||||
&self,
|
||||
uri: Url,
|
||||
path: Utf8PathBuf,
|
||||
|
@ -145,7 +177,7 @@ impl Vfs {
|
|||
path: path.clone(),
|
||||
kind,
|
||||
};
|
||||
let hash = content_hash(&text);
|
||||
let hash = FileHash::from_text_source(&text);
|
||||
self.by_uri.insert(uri, id);
|
||||
self.by_path.insert(path, id);
|
||||
self.files.insert(id, FileRecord { meta, text, hash });
|
||||
|
@ -159,14 +191,14 @@ impl Vfs {
|
|||
/// (detected via hash comparison).
|
||||
///
|
||||
/// Returns a tuple of (new global revision, whether content changed).
|
||||
pub fn set_overlay(&self, id: FileId, new_text: Arc<str>) -> Result<(Revision, bool)> {
|
||||
pub(crate) fn set_overlay(&self, id: FileId, new_text: Arc<str>) -> Result<(Revision, bool)> {
|
||||
let mut rec = self
|
||||
.files
|
||||
.get_mut(&id)
|
||||
.ok_or_else(|| anyhow!("unknown file: {:?}", id))?;
|
||||
let next = TextSource::Overlay(new_text);
|
||||
let new_hash = content_hash(&next);
|
||||
let changed = new_hash != rec.hash;
|
||||
let new_hash = FileHash::from_text_source(&next);
|
||||
let changed = new_hash.differs_from(rec.hash);
|
||||
if changed {
|
||||
rec.text = next;
|
||||
rec.hash = new_hash;
|
||||
|
@ -183,7 +215,7 @@ impl Vfs {
|
|||
/// Materializes a consistent view of all files for downstream consumers.
|
||||
/// The snapshot includes the current revision and a clone of all file records.
|
||||
/// This operation is relatively cheap due to `Arc` sharing of text content.
|
||||
pub fn snapshot(&self) -> VfsSnapshot {
|
||||
pub(crate) fn snapshot(&self) -> VfsSnapshot {
|
||||
VfsSnapshot {
|
||||
revision: Revision::from_raw(self.head.load(Ordering::SeqCst)),
|
||||
files: self
|
||||
|
@ -268,11 +300,11 @@ impl Vfs {
|
|||
.map_err(|e| anyhow!("Failed to read file {}: {}", path, e))?;
|
||||
|
||||
let new_text = TextSource::Disk(Arc::from(content.as_str()));
|
||||
let new_hash = content_hash(&new_text);
|
||||
let new_hash = FileHash::from_text_source(&new_text);
|
||||
|
||||
// Update the file if content changed
|
||||
if let Some(mut record) = self.files.get_mut(&file_id) {
|
||||
if record.hash != new_hash {
|
||||
if new_hash.differs_from(record.hash) {
|
||||
record.text = new_text;
|
||||
record.hash = new_hash;
|
||||
self.head.fetch_add(1, Ordering::SeqCst);
|
||||
|
@ -301,28 +333,15 @@ impl Default for Vfs {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compute a stable hash over file content.
|
||||
///
|
||||
/// Used for efficient change detection - if the hash hasn't changed,
|
||||
/// the content hasn't changed, avoiding unnecessary Salsa invalidations.
|
||||
fn content_hash(src: &TextSource) -> u64 {
|
||||
let s: &str = match src {
|
||||
TextSource::Disk(s) | TextSource::Overlay(s) | TextSource::Generated(s) => s,
|
||||
};
|
||||
let mut h = DefaultHasher::new();
|
||||
s.hash(&mut h);
|
||||
h.finish()
|
||||
}
|
||||
|
||||
/// Immutable snapshot view of the VFS at a specific revision.
|
||||
///
|
||||
/// [`VfsSnapshot`] provides a consistent view of all files for downstream consumers,
|
||||
/// avoiding the need for locking during processing. Snapshots are created atomically
|
||||
/// and can be safely shared across threads.
|
||||
#[derive(Clone)]
|
||||
pub struct VfsSnapshot {
|
||||
pub(crate) struct VfsSnapshot {
|
||||
/// The global revision at the time of snapshot
|
||||
pub revision: Revision,
|
||||
revision: Revision,
|
||||
/// All files in the VFS at snapshot time
|
||||
pub files: HashMap<FileId, FileRecord>,
|
||||
}
|
|
@ -4,15 +4,21 @@
|
|||
//! and synchronize them with the VFS. It uses cross-platform file watching with
|
||||
//! debouncing to handle rapid changes efficiently.
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::mpsc,
|
||||
thread,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use notify::Config;
|
||||
use notify::Event;
|
||||
use notify::EventKind;
|
||||
use notify::RecommendedWatcher;
|
||||
use notify::RecursiveMode;
|
||||
use notify::Watcher;
|
||||
|
||||
/// Event types that can occur in the file system.
|
||||
///
|
Loading…
Add table
Add a link
Reference in a new issue