mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-09-04 17:30:37 +00:00
wip
This commit is contained in:
parent
541200cbb1
commit
3131470cce
19 changed files with 761 additions and 430 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -509,11 +509,13 @@ dependencies = [
|
|||
"anyhow",
|
||||
"camino",
|
||||
"dashmap",
|
||||
"djls-project",
|
||||
"djls-templates",
|
||||
"notify",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tower-lsp-server",
|
||||
"url",
|
||||
]
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ mod logging;
|
|||
mod queue;
|
||||
mod server;
|
||||
mod session;
|
||||
mod workspace;
|
||||
|
||||
use std::io::IsTerminal;
|
||||
|
||||
|
|
|
@ -218,8 +218,23 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Opened document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
if let Err(e) = session.documents_mut().handle_did_open(¶ms) {
|
||||
tracing::error!("Failed to handle did_open: {}", e);
|
||||
let uri = params.text_document.uri.clone();
|
||||
let version = params.text_document.version;
|
||||
let language_id =
|
||||
djls_workspace::LanguageId::from(params.text_document.language_id.as_str());
|
||||
let text = params.text_document.text.clone();
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Err(e) =
|
||||
session
|
||||
.documents_mut()
|
||||
.open_document(url, version, language_id, text)
|
||||
{
|
||||
tracing::error!("Failed to handle did_open: {}", e);
|
||||
}
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
@ -229,7 +244,21 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Changed document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
let _ = session.documents_mut().handle_did_change(¶ms);
|
||||
let uri = ¶ms.text_document.uri;
|
||||
let version = params.text_document.version;
|
||||
let changes = params.content_changes.clone();
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Err(e) = session
|
||||
.documents_mut()
|
||||
.update_document(&url, version, changes)
|
||||
{
|
||||
tracing::error!("Failed to handle did_change: {}", e);
|
||||
}
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
@ -238,7 +267,14 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Closed document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
session.documents_mut().handle_did_close(¶ms);
|
||||
let uri = ¶ms.text_document.uri;
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
session.documents_mut().close_document(&url);
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
@ -248,14 +284,61 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
.with_session(|session| {
|
||||
if let Some(project) = session.project() {
|
||||
if let Some(tags) = project.template_tags() {
|
||||
return session.documents().get_completions(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
params.text_document_position.position,
|
||||
tags,
|
||||
);
|
||||
let uri = ¶ms.text_document_position.text_document.uri;
|
||||
let position = params.text_document_position.position;
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Some(context) = session.documents().get_template_context(&url, position) {
|
||||
// Use the context to generate completions
|
||||
let mut completions: Vec<tower_lsp_server::lsp_types::CompletionItem> = tags
|
||||
.iter()
|
||||
.filter(|tag| {
|
||||
context.partial_tag.is_empty() || tag.name().starts_with(&context.partial_tag)
|
||||
})
|
||||
.map(|tag| {
|
||||
let leading_space = if context.needs_leading_space { " " } else { "" };
|
||||
tower_lsp_server::lsp_types::CompletionItem {
|
||||
label: tag.name().to_string(),
|
||||
kind: Some(tower_lsp_server::lsp_types::CompletionItemKind::KEYWORD),
|
||||
detail: Some(format!("Template tag from {}", tag.library())),
|
||||
documentation: tag.doc().as_ref().map(|doc| {
|
||||
tower_lsp_server::lsp_types::Documentation::MarkupContent(
|
||||
tower_lsp_server::lsp_types::MarkupContent {
|
||||
kind: tower_lsp_server::lsp_types::MarkupKind::Markdown,
|
||||
value: (*doc).to_string(),
|
||||
}
|
||||
)
|
||||
}),
|
||||
insert_text: Some(match context.closing_brace {
|
||||
djls_workspace::ClosingBrace::None => format!("{}{} %}}", leading_space, tag.name()),
|
||||
djls_workspace::ClosingBrace::PartialClose => format!("{}{} %", leading_space, tag.name()),
|
||||
djls_workspace::ClosingBrace::FullClose => format!("{}{} ", leading_space, tag.name()),
|
||||
}),
|
||||
insert_text_format: Some(tower_lsp_server::lsp_types::InsertTextFormat::PLAIN_TEXT),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
completions.sort_by(|a, b| a.label.cmp(&b.label));
|
||||
Some(tower_lsp_server::lsp_types::CompletionResponse::Array(completions))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
})
|
||||
.await)
|
||||
}
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use djls_conf::Settings;
|
||||
use djls_project::DjangoProject;
|
||||
use djls_workspace::DocumentStore;
|
||||
use percent_encoding::percent_decode_str;
|
||||
use tower_lsp_server::lsp_types::ClientCapabilities;
|
||||
use tower_lsp_server::lsp_types::InitializeParams;
|
||||
|
||||
use crate::workspace::Store;
|
||||
use tower_lsp_server::lsp_types::Uri;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Session {
|
||||
project: Option<DjangoProject>,
|
||||
documents: Store,
|
||||
documents: DocumentStore,
|
||||
settings: Settings,
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -16,8 +19,46 @@ pub struct Session {
|
|||
}
|
||||
|
||||
impl Session {
|
||||
/// Determines the project root path from initialization parameters.
|
||||
///
|
||||
/// Tries the current directory first, then falls back to the first workspace folder.
|
||||
fn get_project_path(params: &InitializeParams) -> Option<PathBuf> {
|
||||
// Try current directory first
|
||||
std::env::current_dir().ok().or_else(|| {
|
||||
// Fall back to the first workspace folder URI
|
||||
params
|
||||
.workspace_folders
|
||||
.as_ref()
|
||||
.and_then(|folders| folders.first())
|
||||
.and_then(|folder| Self::uri_to_pathbuf(&folder.uri))
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts a `file:` URI into an absolute `PathBuf`.
|
||||
fn uri_to_pathbuf(uri: &Uri) -> Option<PathBuf> {
|
||||
// Check if the scheme is "file"
|
||||
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path part as a string
|
||||
let encoded_path_str = uri.path().as_str();
|
||||
|
||||
// Decode the percent-encoded path string
|
||||
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
|
||||
let path_str = decoded_path_cow.as_ref();
|
||||
|
||||
#[cfg(windows)]
|
||||
let path_str = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path_str.strip_prefix('/').unwrap_or(path_str)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path_str))
|
||||
}
|
||||
|
||||
pub fn new(params: &InitializeParams) -> Self {
|
||||
let project_path = crate::workspace::get_project_path(params);
|
||||
let project_path = Self::get_project_path(params);
|
||||
|
||||
let (project, settings) = if let Some(path) = &project_path {
|
||||
let settings =
|
||||
|
@ -33,7 +74,7 @@ impl Session {
|
|||
Self {
|
||||
client_capabilities: params.capabilities.clone(),
|
||||
project,
|
||||
documents: Store::default(),
|
||||
documents: DocumentStore::new(),
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
@ -46,11 +87,11 @@ impl Session {
|
|||
&mut self.project
|
||||
}
|
||||
|
||||
pub fn documents(&self) -> &Store {
|
||||
pub fn documents(&self) -> &DocumentStore {
|
||||
&self.documents
|
||||
}
|
||||
|
||||
pub fn documents_mut(&mut self) -> &mut Store {
|
||||
pub fn documents_mut(&mut self) -> &mut DocumentStore {
|
||||
&mut self.documents
|
||||
}
|
||||
|
||||
|
|
|
@ -1,231 +0,0 @@
|
|||
use djls_workspace::{FileId, VfsSnapshot};
|
||||
use std::sync::Arc;
|
||||
use tower_lsp_server::lsp_types::{Position, Range};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TextDocument {
|
||||
pub uri: String,
|
||||
pub version: i32,
|
||||
pub language_id: LanguageId,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl TextDocument {
|
||||
pub fn new(uri: String, version: i32, language_id: LanguageId, file_id: FileId) -> Self {
|
||||
Self {
|
||||
uri,
|
||||
version,
|
||||
language_id,
|
||||
file_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn get_content(&self, vfs: &VfsSnapshot) -> Option<Arc<str>> {
|
||||
vfs.get_text(self.file_id)
|
||||
}
|
||||
|
||||
pub fn get_line(&self, vfs: &VfsSnapshot, line_index: &LineIndex, line: u32) -> Option<String> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let line_start = *line_index.line_starts.get(line as usize)?;
|
||||
let line_end = line_index
|
||||
.line_starts
|
||||
.get(line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(line_index.length);
|
||||
|
||||
Some(content[line_start as usize..line_end as usize].to_string())
|
||||
}
|
||||
|
||||
pub fn get_text_range(
|
||||
&self,
|
||||
vfs: &VfsSnapshot,
|
||||
line_index: &LineIndex,
|
||||
range: Range,
|
||||
) -> Option<String> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let start_offset = line_index.offset(range.start)? as usize;
|
||||
let end_offset = line_index.offset(range.end)? as usize;
|
||||
|
||||
Some(content[start_offset..end_offset].to_string())
|
||||
}
|
||||
|
||||
pub fn get_template_tag_context(
|
||||
&self,
|
||||
vfs: &VfsSnapshot,
|
||||
line_index: &LineIndex,
|
||||
position: Position,
|
||||
) -> Option<TemplateTagContext> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let start = line_index.line_starts.get(position.line as usize)?;
|
||||
let end = line_index
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(line_index.length);
|
||||
|
||||
let line = &content[*start as usize..end as usize];
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
let prefix = &line[..char_pos];
|
||||
let rest_of_line = &line[char_pos..];
|
||||
let rest_trimmed = rest_of_line.trim_start();
|
||||
|
||||
prefix.rfind("{%").map(|tag_start| {
|
||||
// Check if we're immediately after {% with no space
|
||||
let needs_leading_space = prefix.ends_with("{%");
|
||||
|
||||
let closing_brace = if rest_trimmed.starts_with("%}") {
|
||||
ClosingBrace::FullClose
|
||||
} else if rest_trimmed.starts_with('}') {
|
||||
ClosingBrace::PartialClose
|
||||
} else {
|
||||
ClosingBrace::None
|
||||
};
|
||||
|
||||
TemplateTagContext {
|
||||
partial_tag: prefix[tag_start + 2..].trim().to_string(),
|
||||
closing_brace,
|
||||
needs_leading_space,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LineIndex {
|
||||
pub line_starts: Vec<u32>,
|
||||
pub line_starts_utf16: Vec<u32>,
|
||||
pub length: u32,
|
||||
pub length_utf16: u32,
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> Self {
|
||||
let mut line_starts = vec![0];
|
||||
let mut line_starts_utf16 = vec![0];
|
||||
let mut pos_utf8 = 0;
|
||||
let mut pos_utf16 = 0;
|
||||
|
||||
for c in text.chars() {
|
||||
pos_utf8 += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
pos_utf16 += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
if c == '\n' {
|
||||
line_starts.push(pos_utf8);
|
||||
line_starts_utf16.push(pos_utf16);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
line_starts,
|
||||
line_starts_utf16,
|
||||
length: pos_utf8,
|
||||
length_utf16: pos_utf16,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self, position: Position) -> Option<u32> {
|
||||
let line_start = self.line_starts.get(position.line as usize)?;
|
||||
|
||||
Some(line_start + position.character)
|
||||
}
|
||||
|
||||
/// Convert UTF-16 LSP position to UTF-8 byte offset
|
||||
pub fn offset_utf16(&self, position: Position, text: &str) -> Option<u32> {
|
||||
let line_start_utf8 = self.line_starts.get(position.line as usize)?;
|
||||
let _line_start_utf16 = self.line_starts_utf16.get(position.line as usize)?;
|
||||
|
||||
// If position is at start of line, return UTF-8 line start
|
||||
if position.character == 0 {
|
||||
return Some(*line_start_utf8);
|
||||
}
|
||||
|
||||
// Find the line text
|
||||
let next_line_start = self
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.length);
|
||||
|
||||
let line_text = text.get(*line_start_utf8 as usize..next_line_start as usize)?;
|
||||
|
||||
// Convert UTF-16 character offset to UTF-8 byte offset within the line
|
||||
let mut utf16_pos = 0;
|
||||
let mut utf8_pos = 0;
|
||||
|
||||
for c in line_text.chars() {
|
||||
if utf16_pos >= position.character {
|
||||
break;
|
||||
}
|
||||
utf16_pos += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
utf8_pos += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
}
|
||||
|
||||
Some(line_start_utf8 + utf8_pos)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn position(&self, offset: u32) -> Position {
|
||||
let line = match self.line_starts.binary_search(&offset) {
|
||||
Ok(line) => line,
|
||||
Err(line) => line - 1,
|
||||
};
|
||||
|
||||
let line_start = self.line_starts[line];
|
||||
let character = offset - line_start;
|
||||
|
||||
Position::new(u32::try_from(line).unwrap_or(0), character)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum LanguageId {
|
||||
HtmlDjango,
|
||||
Other,
|
||||
Python,
|
||||
}
|
||||
|
||||
impl From<&str> for LanguageId {
|
||||
fn from(language_id: &str) -> Self {
|
||||
match language_id {
|
||||
"django-html" | "htmldjango" => Self::HtmlDjango,
|
||||
"python" => Self::Python,
|
||||
_ => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for LanguageId {
|
||||
fn from(language_id: String) -> Self {
|
||||
Self::from(language_id.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageId> for djls_workspace::FileKind {
|
||||
fn from(language_id: LanguageId) -> Self {
|
||||
match language_id {
|
||||
LanguageId::Python => Self::Python,
|
||||
LanguageId::HtmlDjango => Self::Template,
|
||||
LanguageId::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ClosingBrace {
|
||||
None,
|
||||
PartialClose, // just }
|
||||
FullClose, // %}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateTagContext {
|
||||
pub partial_tag: String,
|
||||
pub closing_brace: ClosingBrace,
|
||||
pub needs_leading_space: bool,
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
mod document;
|
||||
mod store;
|
||||
mod utils;
|
||||
|
||||
pub use store::Store;
|
||||
pub use utils::get_project_path;
|
|
@ -1,43 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use percent_encoding::percent_decode_str;
|
||||
use tower_lsp_server::lsp_types::InitializeParams;
|
||||
use tower_lsp_server::lsp_types::Uri;
|
||||
|
||||
/// Determines the project root path from initialization parameters.
|
||||
///
|
||||
/// Tries the current directory first, then falls back to the first workspace folder.
|
||||
pub fn get_project_path(params: &InitializeParams) -> Option<PathBuf> {
|
||||
// Try current directory first
|
||||
std::env::current_dir().ok().or_else(|| {
|
||||
// Fall back to the first workspace folder URI
|
||||
params
|
||||
.workspace_folders
|
||||
.as_ref()
|
||||
.and_then(|folders| folders.first())
|
||||
.and_then(|folder| uri_to_pathbuf(&folder.uri))
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts a `file:` URI into an absolute `PathBuf`.
|
||||
fn uri_to_pathbuf(uri: &Uri) -> Option<PathBuf> {
|
||||
// Check if the scheme is "file"
|
||||
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path part as a string
|
||||
let encoded_path_str = uri.path().as_str();
|
||||
|
||||
// Decode the percent-encoded path string
|
||||
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
|
||||
let path_str = decoded_path_cow.as_ref();
|
||||
|
||||
#[cfg(windows)]
|
||||
let path_str = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path_str.strip_prefix('/').unwrap_or(path_str)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path_str))
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
mod ast;
|
||||
pub mod ast;
|
||||
mod error;
|
||||
mod lexer;
|
||||
mod parser;
|
||||
|
|
|
@ -5,6 +5,7 @@ edition = "2021"
|
|||
|
||||
[dependencies]
|
||||
djls-templates = { workspace = true }
|
||||
djls-project = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
|
@ -12,6 +13,7 @@ dashmap = { workspace = true }
|
|||
notify = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tower-lsp-server = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
@ -4,15 +4,20 @@
|
|||
//! It ensures we only touch Salsa when content or classification changes, maximizing
|
||||
//! incremental performance.
|
||||
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::Setter;
|
||||
|
||||
use super::{
|
||||
db::{parse_template, template_errors, Database, SourceFile, TemplateAst, TemplateLoaderOrder},
|
||||
vfs::{FileKind, VfsSnapshot},
|
||||
FileId,
|
||||
};
|
||||
use super::db::parse_template;
|
||||
use super::db::template_errors;
|
||||
use super::db::Database;
|
||||
use super::db::SourceFile;
|
||||
use super::db::TemplateAst;
|
||||
use super::db::TemplateLoaderOrder;
|
||||
use super::vfs::FileKind;
|
||||
use super::vfs::VfsSnapshot;
|
||||
use super::FileId;
|
||||
|
||||
/// Owner of the Salsa [`Database`] plus the handles for updating inputs.
|
||||
///
|
||||
|
@ -63,7 +68,7 @@ impl FileStore {
|
|||
///
|
||||
/// The method is idempotent and minimizes Salsa invalidations by checking for
|
||||
/// actual changes before updating inputs.
|
||||
pub fn apply_vfs_snapshot(&mut self, snap: &VfsSnapshot) {
|
||||
pub(crate) fn apply_vfs_snapshot(&mut self, snap: &VfsSnapshot) {
|
||||
for (id, rec) in &snap.files {
|
||||
let new_text = snap.get_text(*id).unwrap_or_else(|| Arc::<str>::from(""));
|
||||
let new_kind = rec.meta.kind;
|
||||
|
@ -86,14 +91,14 @@ impl FileStore {
|
|||
/// Get the text content of a file by its [`FileId`].
|
||||
///
|
||||
/// Returns `None` if the file is not tracked in the [`FileStore`].
|
||||
pub fn file_text(&self, id: FileId) -> Option<Arc<str>> {
|
||||
pub(crate) fn file_text(&self, id: FileId) -> Option<Arc<str>> {
|
||||
self.files.get(&id).map(|sf| sf.text(&self.db).clone())
|
||||
}
|
||||
|
||||
/// Get the file kind classification by its [`FileId`].
|
||||
///
|
||||
/// Returns `None` if the file is not tracked in the [`FileStore`].
|
||||
pub fn file_kind(&self, id: FileId) -> Option<FileKind> {
|
||||
pub(crate) fn file_kind(&self, id: FileId) -> Option<FileKind> {
|
||||
self.files.get(&id).map(|sf| sf.kind(&self.db))
|
||||
}
|
||||
|
||||
|
@ -102,7 +107,7 @@ impl FileStore {
|
|||
/// This method leverages Salsa's incremental computation to cache parsed ASTs.
|
||||
/// The AST is only re-parsed when the file's content changes in the VFS.
|
||||
/// Returns `None` if the file is not tracked or is not a template file.
|
||||
pub fn get_template_ast(&self, id: FileId) -> Option<Arc<TemplateAst>> {
|
||||
pub(crate) fn get_template_ast(&self, id: FileId) -> Option<Arc<TemplateAst>> {
|
||||
let source_file = self.files.get(&id)?;
|
||||
parse_template(&self.db, *source_file)
|
||||
}
|
||||
|
@ -112,7 +117,7 @@ impl FileStore {
|
|||
/// This method provides quick access to template errors without needing the full AST.
|
||||
/// Useful for diagnostics and error reporting. Returns an empty slice for
|
||||
/// non-template files or files not tracked in the store.
|
||||
pub fn get_template_errors(&self, id: FileId) -> Arc<[String]> {
|
||||
pub(crate) fn get_template_errors(&self, id: FileId) -> Arc<[String]> {
|
||||
self.files
|
||||
.get(&id)
|
||||
.map_or_else(|| Arc::from(vec![]), |sf| template_errors(&self.db, *sf))
|
||||
|
@ -127,10 +132,12 @@ impl Default for FileStore {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::vfs::{TextSource, Vfs};
|
||||
use camino::Utf8PathBuf;
|
||||
|
||||
use super::*;
|
||||
use crate::vfs::TextSource;
|
||||
use crate::vfs::Vfs;
|
||||
|
||||
#[test]
|
||||
fn test_filestore_template_ast_caching() {
|
||||
let mut store = FileStore::new();
|
||||
|
|
|
@ -7,6 +7,8 @@ use std::sync::Arc;
|
|||
#[cfg(test)]
|
||||
use std::sync::Mutex;
|
||||
|
||||
use djls_templates::Ast;
|
||||
|
||||
use crate::vfs::FileKind;
|
||||
|
||||
/// Salsa database root for workspace
|
||||
|
@ -139,9 +141,10 @@ pub fn template_errors(db: &dyn salsa::Database, file: SourceFile) -> Arc<[Strin
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use salsa::Setter;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_template_parsing_caches_result() {
|
||||
let db = Database::default();
|
||||
|
|
34
crates/djls-workspace/src/document/language.rs
Normal file
34
crates/djls-workspace/src/document/language.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use crate::vfs::FileKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum LanguageId {
|
||||
HtmlDjango,
|
||||
Other,
|
||||
Python,
|
||||
}
|
||||
|
||||
impl From<&str> for LanguageId {
|
||||
fn from(language_id: &str) -> Self {
|
||||
match language_id {
|
||||
"django-html" | "htmldjango" => Self::HtmlDjango,
|
||||
"python" => Self::Python,
|
||||
_ => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for LanguageId {
|
||||
fn from(language_id: String) -> Self {
|
||||
Self::from(language_id.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageId> for FileKind {
|
||||
fn from(language_id: LanguageId) -> Self {
|
||||
match language_id {
|
||||
LanguageId::Python => Self::Python,
|
||||
LanguageId::HtmlDjango => Self::Template,
|
||||
LanguageId::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
87
crates/djls-workspace/src/document/line_index.rs
Normal file
87
crates/djls-workspace/src/document/line_index.rs
Normal file
|
@ -0,0 +1,87 @@
|
|||
use tower_lsp_server::lsp_types::Position;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LineIndex {
|
||||
pub line_starts: Vec<u32>,
|
||||
pub line_starts_utf16: Vec<u32>,
|
||||
pub length: u32,
|
||||
pub length_utf16: u32,
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> Self {
|
||||
let mut line_starts = vec![0];
|
||||
let mut line_starts_utf16 = vec![0];
|
||||
let mut pos_utf8 = 0;
|
||||
let mut pos_utf16 = 0;
|
||||
|
||||
for c in text.chars() {
|
||||
pos_utf8 += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
pos_utf16 += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
if c == '\n' {
|
||||
line_starts.push(pos_utf8);
|
||||
line_starts_utf16.push(pos_utf16);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
line_starts,
|
||||
line_starts_utf16,
|
||||
length: pos_utf8,
|
||||
length_utf16: pos_utf16,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self, position: Position) -> Option<u32> {
|
||||
let line_start = self.line_starts.get(position.line as usize)?;
|
||||
|
||||
Some(line_start + position.character)
|
||||
}
|
||||
|
||||
/// Convert UTF-16 LSP position to UTF-8 byte offset
|
||||
pub fn offset_utf16(&self, position: Position, text: &str) -> Option<u32> {
|
||||
let line_start_utf8 = self.line_starts.get(position.line as usize)?;
|
||||
let _line_start_utf16 = self.line_starts_utf16.get(position.line as usize)?;
|
||||
|
||||
// If position is at start of line, return UTF-8 line start
|
||||
if position.character == 0 {
|
||||
return Some(*line_start_utf8);
|
||||
}
|
||||
|
||||
// Find the line text
|
||||
let next_line_start = self
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.length);
|
||||
|
||||
let line_text = text.get(*line_start_utf8 as usize..next_line_start as usize)?;
|
||||
|
||||
// Convert UTF-16 character offset to UTF-8 byte offset within the line
|
||||
let mut utf16_pos = 0;
|
||||
let mut utf8_pos = 0;
|
||||
|
||||
for c in line_text.chars() {
|
||||
if utf16_pos >= position.character {
|
||||
break;
|
||||
}
|
||||
utf16_pos += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
utf8_pos += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
}
|
||||
|
||||
Some(line_start_utf8 + utf8_pos)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn position(&self, offset: u32) -> Position {
|
||||
let line = match self.line_starts.binary_search(&offset) {
|
||||
Ok(line) => line,
|
||||
Err(line) => line - 1,
|
||||
};
|
||||
|
||||
let line_start = self.line_starts[line];
|
||||
let character = offset - line_start;
|
||||
|
||||
Position::new(u32::try_from(line).unwrap_or(0), character)
|
||||
}
|
||||
}
|
132
crates/djls-workspace/src/document/mod.rs
Normal file
132
crates/djls-workspace/src/document/mod.rs
Normal file
|
@ -0,0 +1,132 @@
|
|||
mod language;
|
||||
mod line_index;
|
||||
mod store;
|
||||
mod template;
|
||||
|
||||
pub use language::LanguageId;
|
||||
pub use line_index::LineIndex;
|
||||
pub use store::DocumentStore;
|
||||
pub use template::ClosingBrace;
|
||||
pub use template::TemplateTagContext;
|
||||
use tower_lsp_server::lsp_types::Position;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
use crate::FileId;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TextDocument {
|
||||
pub uri: String,
|
||||
pub version: i32,
|
||||
pub language_id: LanguageId,
|
||||
pub(crate) file_id: FileId,
|
||||
line_index: LineIndex,
|
||||
}
|
||||
|
||||
impl TextDocument {
|
||||
pub(crate) fn new(
|
||||
uri: String,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
file_id: FileId,
|
||||
content: &str,
|
||||
) -> Self {
|
||||
let line_index = LineIndex::new(content);
|
||||
Self {
|
||||
uri,
|
||||
version,
|
||||
language_id,
|
||||
file_id,
|
||||
line_index,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> &LineIndex {
|
||||
&self.line_index
|
||||
}
|
||||
|
||||
pub fn get_content<'a>(&self, content: &'a str) -> &'a str {
|
||||
content
|
||||
}
|
||||
|
||||
pub fn get_line(&self, content: &str, line: u32) -> Option<String> {
|
||||
let line_start = *self.line_index.line_starts.get(line as usize)?;
|
||||
let line_end = self
|
||||
.line_index
|
||||
.line_starts
|
||||
.get(line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.line_index.length);
|
||||
|
||||
Some(content[line_start as usize..line_end as usize].to_string())
|
||||
}
|
||||
|
||||
pub fn get_text_range(&self, content: &str, range: Range) -> Option<String> {
|
||||
let start_offset = self.line_index.offset(range.start)? as usize;
|
||||
let end_offset = self.line_index.offset(range.end)? as usize;
|
||||
|
||||
Some(content[start_offset..end_offset].to_string())
|
||||
}
|
||||
|
||||
pub fn get_template_tag_context(
|
||||
&self,
|
||||
content: &str,
|
||||
position: Position,
|
||||
) -> Option<TemplateTagContext> {
|
||||
let start = self.line_index.line_starts.get(position.line as usize)?;
|
||||
let end = self
|
||||
.line_index
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.line_index.length);
|
||||
|
||||
let line = &content[*start as usize..end as usize];
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
let prefix = &line[..char_pos];
|
||||
let rest_of_line = &line[char_pos..];
|
||||
let rest_trimmed = rest_of_line.trim_start();
|
||||
|
||||
prefix.rfind("{%").map(|tag_start| {
|
||||
// Check if we're immediately after {% with no space
|
||||
let needs_leading_space = prefix.ends_with("{%");
|
||||
|
||||
let closing_brace = if rest_trimmed.starts_with("%}") {
|
||||
ClosingBrace::FullClose
|
||||
} else if rest_trimmed.starts_with('}') {
|
||||
ClosingBrace::PartialClose
|
||||
} else {
|
||||
ClosingBrace::None
|
||||
};
|
||||
|
||||
TemplateTagContext {
|
||||
partial_tag: prefix[tag_start + 2..].trim().to_string(),
|
||||
closing_brace,
|
||||
needs_leading_space,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn position_to_offset(&self, position: Position) -> Option<u32> {
|
||||
self.line_index.offset(position)
|
||||
}
|
||||
|
||||
pub fn offset_to_position(&self, offset: u32) -> Position {
|
||||
self.line_index.position(offset)
|
||||
}
|
||||
|
||||
pub fn update_content(&mut self, content: &str) {
|
||||
self.line_index = LineIndex::new(content);
|
||||
}
|
||||
|
||||
pub fn version(&self) -> i32 {
|
||||
self.version
|
||||
}
|
||||
|
||||
pub fn language_id(&self) -> LanguageId {
|
||||
self.language_id.clone()
|
||||
}
|
||||
}
|
|
@ -1,11 +1,11 @@
|
|||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use djls_project::TemplateTags;
|
||||
use djls_workspace::{FileId, FileKind, FileStore, TextSource, Vfs};
|
||||
use tower_lsp_server::lsp_types::CompletionItem;
|
||||
use tower_lsp_server::lsp_types::CompletionItemKind;
|
||||
use tower_lsp_server::lsp_types::CompletionResponse;
|
||||
|
@ -22,31 +22,118 @@ use tower_lsp_server::lsp_types::Position;
|
|||
use tower_lsp_server::lsp_types::Range;
|
||||
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
|
||||
|
||||
use super::document::{ClosingBrace, LanguageId, LineIndex, TextDocument};
|
||||
use crate::bridge::FileStore;
|
||||
use crate::db::TemplateAst;
|
||||
use crate::vfs::FileKind;
|
||||
use crate::vfs::TextSource;
|
||||
use crate::vfs::Vfs;
|
||||
use crate::ClosingBrace;
|
||||
use crate::LanguageId;
|
||||
use crate::LineIndex;
|
||||
use crate::TextDocument;
|
||||
|
||||
pub struct Store {
|
||||
pub struct DocumentStore {
|
||||
vfs: Arc<Vfs>,
|
||||
file_store: Arc<Mutex<FileStore>>,
|
||||
file_ids: HashMap<String, FileId>,
|
||||
line_indices: HashMap<FileId, LineIndex>,
|
||||
versions: HashMap<String, i32>,
|
||||
documents: HashMap<String, TextDocument>,
|
||||
}
|
||||
|
||||
impl Default for Store {
|
||||
impl Default for DocumentStore {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
vfs: Arc::new(Vfs::default()),
|
||||
file_store: Arc::new(Mutex::new(FileStore::new())),
|
||||
file_ids: HashMap::new(),
|
||||
line_indices: HashMap::new(),
|
||||
versions: HashMap::new(),
|
||||
documents: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Store {
|
||||
impl DocumentStore {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Open a document with the given URI, version, language, and text content.
|
||||
/// This creates a new TextDocument and stores it internally, hiding VFS details.
|
||||
pub fn open_document(
|
||||
&mut self,
|
||||
uri: url::Url,
|
||||
version: i32,
|
||||
language_id: LanguageId,
|
||||
text: String,
|
||||
) -> Result<()> {
|
||||
let uri_str = uri.to_string();
|
||||
let kind = FileKind::from(language_id.clone());
|
||||
|
||||
// Convert URI to path - simplified for now, just use URI string
|
||||
let path = Utf8PathBuf::from(uri.as_str());
|
||||
|
||||
// Store content in VFS
|
||||
let text_source = TextSource::Overlay(Arc::from(text.as_str()));
|
||||
let file_id = self.vfs.intern_file(uri, path, kind, text_source);
|
||||
|
||||
// Set overlay content in VFS
|
||||
self.vfs.set_overlay(file_id, Arc::from(text.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Create TextDocument with LineIndex
|
||||
let document = TextDocument::new(uri_str.clone(), version, language_id, file_id, &text);
|
||||
self.documents.insert(uri_str, document);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update a document with the given URI, version, and text changes.
|
||||
/// This applies changes to the document and updates the VFS accordingly.
|
||||
pub fn update_document(
|
||||
&mut self,
|
||||
uri: &url::Url,
|
||||
version: i32,
|
||||
changes: Vec<TextDocumentContentChangeEvent>,
|
||||
) -> Result<()> {
|
||||
let uri_str = uri.to_string();
|
||||
|
||||
// Get document and file_id from the documents HashMap
|
||||
let document = self
|
||||
.documents
|
||||
.get(&uri_str)
|
||||
.ok_or_else(|| anyhow!("Document not found: {}", uri_str))?;
|
||||
let file_id = document.file_id();
|
||||
|
||||
// Get current content from VFS
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let current_content = snapshot
|
||||
.get_text(file_id)
|
||||
.ok_or_else(|| anyhow!("File content not found: {}", uri_str))?;
|
||||
|
||||
// Get line index from the document
|
||||
let line_index = document.line_index();
|
||||
|
||||
// Apply text changes using the existing function
|
||||
let new_content = apply_text_changes(¤t_content, &changes, line_index)?;
|
||||
|
||||
// Update TextDocument version and content
|
||||
if let Some(document) = self.documents.get_mut(&uri_str) {
|
||||
document.version = version;
|
||||
document.update_content(&new_content);
|
||||
}
|
||||
|
||||
// Update VFS with new content
|
||||
self.vfs
|
||||
.set_overlay(file_id, Arc::from(new_content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_did_open(&mut self, params: &DidOpenTextDocumentParams) -> Result<()> {
|
||||
let uri_str = params.text_document.uri.to_string();
|
||||
let uri = params.text_document.uri.clone();
|
||||
|
@ -75,13 +162,14 @@ impl Store {
|
|||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Create TextDocument metadata
|
||||
let document = TextDocument::new(uri_str.clone(), version, language_id.clone(), file_id);
|
||||
self.documents.insert(uri_str.clone(), document);
|
||||
|
||||
// Cache mappings and indices
|
||||
self.file_ids.insert(uri_str.clone(), file_id);
|
||||
self.line_indices.insert(file_id, LineIndex::new(&content));
|
||||
self.versions.insert(uri_str, version);
|
||||
let document = TextDocument::new(
|
||||
uri_str.clone(),
|
||||
version,
|
||||
language_id.clone(),
|
||||
file_id,
|
||||
&content,
|
||||
);
|
||||
self.documents.insert(uri_str, document);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -90,12 +178,12 @@ impl Store {
|
|||
let uri_str = params.text_document.uri.as_str().to_string();
|
||||
let version = params.text_document.version;
|
||||
|
||||
// Look up FileId
|
||||
let file_id = self
|
||||
.file_ids
|
||||
// Get document and file_id from the documents HashMap
|
||||
let document = self
|
||||
.documents
|
||||
.get(&uri_str)
|
||||
.copied()
|
||||
.ok_or_else(|| anyhow!("Document not found: {}", uri_str))?;
|
||||
let file_id = document.file_id();
|
||||
|
||||
// Get current content from VFS
|
||||
let snapshot = self.vfs.snapshot();
|
||||
|
@ -103,19 +191,17 @@ impl Store {
|
|||
.get_text(file_id)
|
||||
.ok_or_else(|| anyhow!("File content not found: {}", uri_str))?;
|
||||
|
||||
// Get current line index for position calculations
|
||||
let line_index = self
|
||||
.line_indices
|
||||
.get(&file_id)
|
||||
.ok_or_else(|| anyhow!("Line index not found for: {}", uri_str))?;
|
||||
// Get line index from the document (TextDocument now stores its own LineIndex)
|
||||
let line_index = document.line_index();
|
||||
|
||||
// Apply text changes using the new function
|
||||
let new_content =
|
||||
apply_text_changes(¤t_content, ¶ms.content_changes, line_index)?;
|
||||
|
||||
// Update TextDocument version
|
||||
// Update TextDocument version and content
|
||||
if let Some(document) = self.documents.get_mut(&uri_str) {
|
||||
document.version = version;
|
||||
document.update_content(&new_content);
|
||||
}
|
||||
|
||||
// Update VFS with new content
|
||||
|
@ -127,41 +213,38 @@ impl Store {
|
|||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Update cached line index and version
|
||||
self.line_indices
|
||||
.insert(file_id, LineIndex::new(&new_content));
|
||||
self.versions.insert(uri_str, version);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Close a document with the given URI.
|
||||
/// This removes the document from internal storage and cleans up resources.
|
||||
pub fn close_document(&mut self, uri: &url::Url) {
|
||||
let uri_str = uri.as_str();
|
||||
|
||||
// Remove TextDocument metadata
|
||||
self.documents.remove(uri_str);
|
||||
|
||||
// Note: We don't remove from VFS as it might be useful for caching
|
||||
// The VFS will handle cleanup internally
|
||||
}
|
||||
|
||||
pub fn handle_did_close(&mut self, params: &DidCloseTextDocumentParams) {
|
||||
let uri_str = params.text_document.uri.as_str();
|
||||
|
||||
// Remove TextDocument metadata
|
||||
self.documents.remove(uri_str);
|
||||
|
||||
// Look up FileId and remove mappings
|
||||
if let Some(file_id) = self.file_ids.remove(uri_str) {
|
||||
self.line_indices.remove(&file_id);
|
||||
}
|
||||
self.versions.remove(uri_str);
|
||||
|
||||
// Note: We don't remove from VFS as it might be useful for caching
|
||||
// The VFS will handle cleanup internally
|
||||
}
|
||||
|
||||
pub fn get_file_id(&self, uri: &str) -> Option<FileId> {
|
||||
self.file_ids.get(uri).copied()
|
||||
}
|
||||
|
||||
pub fn get_line_index(&self, file_id: FileId) -> Option<&LineIndex> {
|
||||
self.line_indices.get(&file_id)
|
||||
pub fn get_line_index(&self, uri: &str) -> Option<&LineIndex> {
|
||||
self.documents.get(uri).map(|doc| doc.line_index())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_version(&self, uri: &str) -> Option<i32> {
|
||||
self.versions.get(uri).copied()
|
||||
self.documents.get(uri).map(|doc| doc.version())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -178,6 +261,99 @@ impl Store {
|
|||
self.documents.get_mut(uri)
|
||||
}
|
||||
|
||||
// URI-based query methods (new API)
|
||||
pub fn get_document_by_url(&self, uri: &url::Url) -> Option<&TextDocument> {
|
||||
self.get_document(uri.as_str())
|
||||
}
|
||||
|
||||
pub fn get_document_text(&self, uri: &url::Url) -> Option<Arc<str>> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let file_id = document.file_id();
|
||||
let snapshot = self.vfs.snapshot();
|
||||
snapshot.get_text(file_id)
|
||||
}
|
||||
|
||||
pub fn get_line_text(&self, uri: &url::Url, line: u32) -> Option<String> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let content = snapshot.get_text(document.file_id())?;
|
||||
document.get_line(content.as_ref(), line)
|
||||
}
|
||||
|
||||
pub fn get_word_at_position(&self, uri: &url::Url, position: Position) -> Option<String> {
|
||||
// This is a simplified implementation - get the line and extract word at position
|
||||
let line_text = self.get_line_text(uri, position.line)?;
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
|
||||
if char_pos >= line_text.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Find word boundaries (simplified - considers alphanumeric and underscore as word chars)
|
||||
let line_bytes = line_text.as_bytes();
|
||||
let mut start = char_pos;
|
||||
let mut end = char_pos;
|
||||
|
||||
// Find start of word
|
||||
while start > 0 && is_word_char(line_bytes[start - 1]) {
|
||||
start -= 1;
|
||||
}
|
||||
|
||||
// Find end of word
|
||||
while end < line_text.len() && is_word_char(line_bytes[end]) {
|
||||
end += 1;
|
||||
}
|
||||
|
||||
if start < end {
|
||||
Some(line_text[start..end].to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// Position mapping methods
|
||||
pub fn offset_to_position(&self, uri: &url::Url, offset: usize) -> Option<Position> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
Some(document.offset_to_position(offset as u32))
|
||||
}
|
||||
|
||||
pub fn position_to_offset(&self, uri: &url::Url, position: Position) -> Option<usize> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
document
|
||||
.position_to_offset(position)
|
||||
.map(|offset| offset as usize)
|
||||
}
|
||||
|
||||
// Template-specific methods
|
||||
pub fn get_template_ast(&self, uri: &url::Url) -> Option<Arc<TemplateAst>> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
file_store.get_template_ast(file_id)
|
||||
}
|
||||
|
||||
pub fn get_template_errors(&self, uri: &url::Url) -> Vec<String> {
|
||||
let document = match self.get_document_by_url(uri) {
|
||||
Some(doc) => doc,
|
||||
None => return vec![],
|
||||
};
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
let errors = file_store.get_template_errors(file_id);
|
||||
errors.to_vec()
|
||||
}
|
||||
|
||||
pub fn get_template_context(
|
||||
&self,
|
||||
uri: &url::Url,
|
||||
position: Position,
|
||||
) -> Option<crate::TemplateTagContext> {
|
||||
let document = self.get_document_by_url(uri)?;
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let content = snapshot.get_text(document.file_id())?;
|
||||
document.get_template_tag_context(content.as_ref(), position)
|
||||
}
|
||||
|
||||
pub fn get_completions(
|
||||
&self,
|
||||
uri: &str,
|
||||
|
@ -186,7 +362,7 @@ impl Store {
|
|||
) -> Option<CompletionResponse> {
|
||||
// Check if this is a Django template using TextDocument metadata
|
||||
let document = self.get_document(uri)?;
|
||||
if document.language_id != LanguageId::HtmlDjango {
|
||||
if document.language_id() != LanguageId::HtmlDjango {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -202,8 +378,9 @@ impl Store {
|
|||
|
||||
// Get template tag context from document
|
||||
let vfs_snapshot = self.vfs.snapshot();
|
||||
let line_index = self.get_line_index(file_id)?;
|
||||
let context = document.get_template_tag_context(&vfs_snapshot, line_index, position)?;
|
||||
let text_content = vfs_snapshot.get_text(file_id)?;
|
||||
let content = text_content.as_ref();
|
||||
let context = document.get_template_tag_context(content, position)?;
|
||||
|
||||
let mut completions: Vec<CompletionItem> = tags
|
||||
.iter()
|
||||
|
@ -252,12 +429,12 @@ impl Store {
|
|||
};
|
||||
|
||||
// Only process template files
|
||||
if document.language_id != LanguageId::HtmlDjango {
|
||||
if document.language_id() != LanguageId::HtmlDjango {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let file_id = document.file_id();
|
||||
let Some(_line_index) = self.get_line_index(file_id) else {
|
||||
let Some(_line_index) = self.get_line_index(uri) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
|
@ -294,6 +471,11 @@ impl Store {
|
|||
}
|
||||
}
|
||||
|
||||
/// Check if a byte represents a word character (alphanumeric or underscore)
|
||||
fn is_word_char(byte: u8) -> bool {
|
||||
byte.is_ascii_alphanumeric() || byte == b'_'
|
||||
}
|
||||
|
||||
/// Apply text changes to content, handling multiple changes correctly
|
||||
fn apply_text_changes(
|
||||
content: &str,
|
||||
|
@ -360,9 +542,10 @@ fn apply_text_changes(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_apply_single_character_insertion() {
|
||||
let content = "Hello world";
|
13
crates/djls-workspace/src/document/template.rs
Normal file
13
crates/djls-workspace/src/document/template.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
#[derive(Debug)]
|
||||
pub enum ClosingBrace {
|
||||
None,
|
||||
PartialClose, // just }
|
||||
FullClose, // %}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateTagContext {
|
||||
pub partial_tag: String,
|
||||
pub closing_brace: ClosingBrace,
|
||||
pub needs_leading_space: bool,
|
||||
}
|
|
@ -1,14 +1,14 @@
|
|||
mod bridge;
|
||||
mod db;
|
||||
mod document;
|
||||
mod vfs;
|
||||
mod watcher;
|
||||
|
||||
pub use bridge::FileStore;
|
||||
pub use db::{
|
||||
parse_template, template_errors, Database, SourceFile, TemplateAst, TemplateLoaderOrder,
|
||||
};
|
||||
pub use vfs::{FileKind, FileMeta, FileRecord, Revision, TextSource, Vfs, VfsSnapshot};
|
||||
pub use watcher::{VfsWatcher, WatchConfig, WatchEvent};
|
||||
pub use document::ClosingBrace;
|
||||
pub use document::DocumentStore;
|
||||
pub use document::LanguageId;
|
||||
pub use document::LineIndex;
|
||||
pub use document::TemplateTagContext;
|
||||
pub use document::TextDocument;
|
||||
|
||||
/// Stable, compact identifier for files across the subsystem.
|
||||
///
|
||||
|
@ -16,7 +16,7 @@ pub use watcher::{VfsWatcher, WatchConfig, WatchEvent};
|
|||
/// Salsa inputs. Once assigned to a file (via its URI), a [`FileId`] remains stable for the
|
||||
/// lifetime of the VFS, even if the file's content or metadata changes.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
pub struct FileId(u32);
|
||||
pub(crate) struct FileId(u32);
|
||||
|
||||
impl FileId {
|
||||
/// Create a [`FileId`] from a raw u32 value.
|
||||
|
|
|
@ -4,25 +4,28 @@
|
|||
//! and snapshotting. Downstream systems consume snapshots to avoid locking and to
|
||||
//! batch updates.
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
mod watcher;
|
||||
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::sync::atomic::AtomicU32;
|
||||
use std::sync::atomic::AtomicU64;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use dashmap::DashMap;
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fs;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
atomic::{AtomicU32, AtomicU64, Ordering},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use url::Url;
|
||||
use watcher::VfsWatcher;
|
||||
use watcher::WatchConfig;
|
||||
use watcher::WatchEvent;
|
||||
|
||||
use super::{
|
||||
watcher::{VfsWatcher, WatchConfig, WatchEvent},
|
||||
FileId,
|
||||
};
|
||||
use super::FileId;
|
||||
|
||||
/// Monotonic counter representing global VFS state.
|
||||
///
|
||||
|
@ -30,18 +33,18 @@ use super::{
|
|||
/// This provides a cheap way to detect if any changes have occurred since
|
||||
/// a previous snapshot was taken.
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default, PartialOrd, Ord)]
|
||||
pub struct Revision(u64);
|
||||
pub(crate) struct Revision(u64);
|
||||
|
||||
impl Revision {
|
||||
/// Create a [`Revision`] from a raw u64 value.
|
||||
#[must_use]
|
||||
pub fn from_raw(raw: u64) -> Self {
|
||||
fn from_raw(raw: u64) -> Self {
|
||||
Revision(raw)
|
||||
}
|
||||
|
||||
/// Get the underlying u64 value.
|
||||
#[must_use]
|
||||
pub fn value(self) -> u64 {
|
||||
fn value(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
@ -65,11 +68,11 @@ pub enum FileKind {
|
|||
/// [`FileMeta`] contains all non-content information about a file, including its
|
||||
/// identity (URI), filesystem path, and classification.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileMeta {
|
||||
pub(crate) struct FileMeta {
|
||||
/// The file's URI (typically file:// scheme)
|
||||
pub uri: Url,
|
||||
uri: Url,
|
||||
/// The file's path in the filesystem
|
||||
pub path: Utf8PathBuf,
|
||||
path: Utf8PathBuf,
|
||||
/// Classification for routing to analyzers
|
||||
pub kind: FileKind,
|
||||
}
|
||||
|
@ -80,7 +83,7 @@ pub struct FileMeta {
|
|||
/// debugging and understanding the current state of the VFS. All variants hold
|
||||
/// `Arc<str>` for efficient sharing.
|
||||
#[derive(Clone)]
|
||||
pub enum TextSource {
|
||||
pub(crate) enum TextSource {
|
||||
/// Content loaded from disk
|
||||
Disk(Arc<str>),
|
||||
/// Content from LSP client overlay (in-memory edits)
|
||||
|
@ -89,18 +92,47 @@ pub enum TextSource {
|
|||
Generated(Arc<str>),
|
||||
}
|
||||
|
||||
/// Content hash for efficient change detection.
|
||||
///
|
||||
/// [`FileHash`] encapsulates the hashing logic used to detect when file content
|
||||
/// has changed, avoiding unnecessary recomputation in downstream systems like Salsa.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
struct FileHash(u64);
|
||||
|
||||
impl FileHash {
|
||||
/// Compute hash from text source content.
|
||||
fn from_text_source(src: &TextSource) -> Self {
|
||||
let s: &str = match src {
|
||||
TextSource::Disk(s) | TextSource::Overlay(s) | TextSource::Generated(s) => s,
|
||||
};
|
||||
let mut h = DefaultHasher::new();
|
||||
s.hash(&mut h);
|
||||
Self(h.finish())
|
||||
}
|
||||
|
||||
/// Check if this hash differs from another, indicating content changed.
|
||||
fn differs_from(self, other: Self) -> bool {
|
||||
self.0 != other.0
|
||||
}
|
||||
|
||||
/// Get raw hash value (for debugging/logging).
|
||||
fn raw(self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Complete record of a file in the VFS.
|
||||
///
|
||||
/// [`FileRecord`] combines metadata, current text content, and a content hash
|
||||
/// for efficient change detection.
|
||||
#[derive(Clone)]
|
||||
pub struct FileRecord {
|
||||
pub(crate) struct FileRecord {
|
||||
/// File metadata (URI, path, kind, version)
|
||||
pub meta: FileMeta,
|
||||
/// Current text content and its source
|
||||
pub text: TextSource,
|
||||
text: TextSource,
|
||||
/// Hash of current content for change detection
|
||||
pub hash: u64,
|
||||
hash: FileHash,
|
||||
}
|
||||
|
||||
/// Thread-safe virtual file system with change tracking.
|
||||
|
@ -129,7 +161,7 @@ impl Vfs {
|
|||
/// Returns the existing [`FileId`] if the URI is already known, or creates a new
|
||||
/// [`FileRecord`] with the provided metadata and text. This method computes and
|
||||
/// stores a content hash for change detection.
|
||||
pub fn intern_file(
|
||||
pub(crate) fn intern_file(
|
||||
&self,
|
||||
uri: Url,
|
||||
path: Utf8PathBuf,
|
||||
|
@ -145,7 +177,7 @@ impl Vfs {
|
|||
path: path.clone(),
|
||||
kind,
|
||||
};
|
||||
let hash = content_hash(&text);
|
||||
let hash = FileHash::from_text_source(&text);
|
||||
self.by_uri.insert(uri, id);
|
||||
self.by_path.insert(path, id);
|
||||
self.files.insert(id, FileRecord { meta, text, hash });
|
||||
|
@ -159,14 +191,14 @@ impl Vfs {
|
|||
/// (detected via hash comparison).
|
||||
///
|
||||
/// Returns a tuple of (new global revision, whether content changed).
|
||||
pub fn set_overlay(&self, id: FileId, new_text: Arc<str>) -> Result<(Revision, bool)> {
|
||||
pub(crate) fn set_overlay(&self, id: FileId, new_text: Arc<str>) -> Result<(Revision, bool)> {
|
||||
let mut rec = self
|
||||
.files
|
||||
.get_mut(&id)
|
||||
.ok_or_else(|| anyhow!("unknown file: {:?}", id))?;
|
||||
let next = TextSource::Overlay(new_text);
|
||||
let new_hash = content_hash(&next);
|
||||
let changed = new_hash != rec.hash;
|
||||
let new_hash = FileHash::from_text_source(&next);
|
||||
let changed = new_hash.differs_from(rec.hash);
|
||||
if changed {
|
||||
rec.text = next;
|
||||
rec.hash = new_hash;
|
||||
|
@ -183,7 +215,7 @@ impl Vfs {
|
|||
/// Materializes a consistent view of all files for downstream consumers.
|
||||
/// The snapshot includes the current revision and a clone of all file records.
|
||||
/// This operation is relatively cheap due to `Arc` sharing of text content.
|
||||
pub fn snapshot(&self) -> VfsSnapshot {
|
||||
pub(crate) fn snapshot(&self) -> VfsSnapshot {
|
||||
VfsSnapshot {
|
||||
revision: Revision::from_raw(self.head.load(Ordering::SeqCst)),
|
||||
files: self
|
||||
|
@ -268,11 +300,11 @@ impl Vfs {
|
|||
.map_err(|e| anyhow!("Failed to read file {}: {}", path, e))?;
|
||||
|
||||
let new_text = TextSource::Disk(Arc::from(content.as_str()));
|
||||
let new_hash = content_hash(&new_text);
|
||||
let new_hash = FileHash::from_text_source(&new_text);
|
||||
|
||||
// Update the file if content changed
|
||||
if let Some(mut record) = self.files.get_mut(&file_id) {
|
||||
if record.hash != new_hash {
|
||||
if new_hash.differs_from(record.hash) {
|
||||
record.text = new_text;
|
||||
record.hash = new_hash;
|
||||
self.head.fetch_add(1, Ordering::SeqCst);
|
||||
|
@ -301,28 +333,15 @@ impl Default for Vfs {
|
|||
}
|
||||
}
|
||||
|
||||
/// Compute a stable hash over file content.
|
||||
///
|
||||
/// Used for efficient change detection - if the hash hasn't changed,
|
||||
/// the content hasn't changed, avoiding unnecessary Salsa invalidations.
|
||||
fn content_hash(src: &TextSource) -> u64 {
|
||||
let s: &str = match src {
|
||||
TextSource::Disk(s) | TextSource::Overlay(s) | TextSource::Generated(s) => s,
|
||||
};
|
||||
let mut h = DefaultHasher::new();
|
||||
s.hash(&mut h);
|
||||
h.finish()
|
||||
}
|
||||
|
||||
/// Immutable snapshot view of the VFS at a specific revision.
|
||||
///
|
||||
/// [`VfsSnapshot`] provides a consistent view of all files for downstream consumers,
|
||||
/// avoiding the need for locking during processing. Snapshots are created atomically
|
||||
/// and can be safely shared across threads.
|
||||
#[derive(Clone)]
|
||||
pub struct VfsSnapshot {
|
||||
pub(crate) struct VfsSnapshot {
|
||||
/// The global revision at the time of snapshot
|
||||
pub revision: Revision,
|
||||
revision: Revision,
|
||||
/// All files in the VFS at snapshot time
|
||||
pub files: HashMap<FileId, FileRecord>,
|
||||
}
|
|
@ -4,15 +4,21 @@
|
|||
//! and synchronize them with the VFS. It uses cross-platform file watching with
|
||||
//! debouncing to handle rapid changes efficiently.
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::mpsc;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use notify::{Config, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::mpsc,
|
||||
thread,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use notify::Config;
|
||||
use notify::Event;
|
||||
use notify::EventKind;
|
||||
use notify::RecommendedWatcher;
|
||||
use notify::RecursiveMode;
|
||||
use notify::Watcher;
|
||||
|
||||
/// Event types that can occur in the file system.
|
||||
///
|
Loading…
Add table
Add a link
Reference in a new issue