mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-09-11 04:46:38 +00:00
wip
This commit is contained in:
parent
541200cbb1
commit
3131470cce
19 changed files with 761 additions and 430 deletions
|
@ -3,7 +3,6 @@ mod logging;
|
|||
mod queue;
|
||||
mod server;
|
||||
mod session;
|
||||
mod workspace;
|
||||
|
||||
use std::io::IsTerminal;
|
||||
|
||||
|
|
|
@ -218,8 +218,23 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Opened document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
if let Err(e) = session.documents_mut().handle_did_open(¶ms) {
|
||||
tracing::error!("Failed to handle did_open: {}", e);
|
||||
let uri = params.text_document.uri.clone();
|
||||
let version = params.text_document.version;
|
||||
let language_id =
|
||||
djls_workspace::LanguageId::from(params.text_document.language_id.as_str());
|
||||
let text = params.text_document.text.clone();
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Err(e) =
|
||||
session
|
||||
.documents_mut()
|
||||
.open_document(url, version, language_id, text)
|
||||
{
|
||||
tracing::error!("Failed to handle did_open: {}", e);
|
||||
}
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
@ -229,7 +244,21 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Changed document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
let _ = session.documents_mut().handle_did_change(¶ms);
|
||||
let uri = ¶ms.text_document.uri;
|
||||
let version = params.text_document.version;
|
||||
let changes = params.content_changes.clone();
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Err(e) = session
|
||||
.documents_mut()
|
||||
.update_document(&url, version, changes)
|
||||
{
|
||||
tracing::error!("Failed to handle did_change: {}", e);
|
||||
}
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
@ -238,7 +267,14 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
tracing::info!("Closed document: {:?}", params.text_document.uri);
|
||||
|
||||
self.with_session_mut(|session| {
|
||||
session.documents_mut().handle_did_close(¶ms);
|
||||
let uri = ¶ms.text_document.uri;
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
session.documents_mut().close_document(&url);
|
||||
} else {
|
||||
tracing::error!("Invalid URI: {:?}", uri);
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
@ -248,14 +284,61 @@ impl LanguageServer for DjangoLanguageServer {
|
|||
.with_session(|session| {
|
||||
if let Some(project) = session.project() {
|
||||
if let Some(tags) = project.template_tags() {
|
||||
return session.documents().get_completions(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
params.text_document_position.position,
|
||||
tags,
|
||||
);
|
||||
let uri = ¶ms.text_document_position.text_document.uri;
|
||||
let position = params.text_document_position.position;
|
||||
|
||||
// Convert LSP Uri to url::Url
|
||||
if let Ok(url) = url::Url::parse(&uri.to_string()) {
|
||||
if let Some(context) = session.documents().get_template_context(&url, position) {
|
||||
// Use the context to generate completions
|
||||
let mut completions: Vec<tower_lsp_server::lsp_types::CompletionItem> = tags
|
||||
.iter()
|
||||
.filter(|tag| {
|
||||
context.partial_tag.is_empty() || tag.name().starts_with(&context.partial_tag)
|
||||
})
|
||||
.map(|tag| {
|
||||
let leading_space = if context.needs_leading_space { " " } else { "" };
|
||||
tower_lsp_server::lsp_types::CompletionItem {
|
||||
label: tag.name().to_string(),
|
||||
kind: Some(tower_lsp_server::lsp_types::CompletionItemKind::KEYWORD),
|
||||
detail: Some(format!("Template tag from {}", tag.library())),
|
||||
documentation: tag.doc().as_ref().map(|doc| {
|
||||
tower_lsp_server::lsp_types::Documentation::MarkupContent(
|
||||
tower_lsp_server::lsp_types::MarkupContent {
|
||||
kind: tower_lsp_server::lsp_types::MarkupKind::Markdown,
|
||||
value: (*doc).to_string(),
|
||||
}
|
||||
)
|
||||
}),
|
||||
insert_text: Some(match context.closing_brace {
|
||||
djls_workspace::ClosingBrace::None => format!("{}{} %}}", leading_space, tag.name()),
|
||||
djls_workspace::ClosingBrace::PartialClose => format!("{}{} %", leading_space, tag.name()),
|
||||
djls_workspace::ClosingBrace::FullClose => format!("{}{} ", leading_space, tag.name()),
|
||||
}),
|
||||
insert_text_format: Some(tower_lsp_server::lsp_types::InsertTextFormat::PLAIN_TEXT),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
completions.sort_by(|a, b| a.label.cmp(&b.label));
|
||||
Some(tower_lsp_server::lsp_types::CompletionResponse::Array(completions))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
})
|
||||
.await)
|
||||
}
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use djls_conf::Settings;
|
||||
use djls_project::DjangoProject;
|
||||
use djls_workspace::DocumentStore;
|
||||
use percent_encoding::percent_decode_str;
|
||||
use tower_lsp_server::lsp_types::ClientCapabilities;
|
||||
use tower_lsp_server::lsp_types::InitializeParams;
|
||||
|
||||
use crate::workspace::Store;
|
||||
use tower_lsp_server::lsp_types::Uri;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Session {
|
||||
project: Option<DjangoProject>,
|
||||
documents: Store,
|
||||
documents: DocumentStore,
|
||||
settings: Settings,
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -16,8 +19,46 @@ pub struct Session {
|
|||
}
|
||||
|
||||
impl Session {
|
||||
/// Determines the project root path from initialization parameters.
|
||||
///
|
||||
/// Tries the current directory first, then falls back to the first workspace folder.
|
||||
fn get_project_path(params: &InitializeParams) -> Option<PathBuf> {
|
||||
// Try current directory first
|
||||
std::env::current_dir().ok().or_else(|| {
|
||||
// Fall back to the first workspace folder URI
|
||||
params
|
||||
.workspace_folders
|
||||
.as_ref()
|
||||
.and_then(|folders| folders.first())
|
||||
.and_then(|folder| Self::uri_to_pathbuf(&folder.uri))
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts a `file:` URI into an absolute `PathBuf`.
|
||||
fn uri_to_pathbuf(uri: &Uri) -> Option<PathBuf> {
|
||||
// Check if the scheme is "file"
|
||||
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path part as a string
|
||||
let encoded_path_str = uri.path().as_str();
|
||||
|
||||
// Decode the percent-encoded path string
|
||||
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
|
||||
let path_str = decoded_path_cow.as_ref();
|
||||
|
||||
#[cfg(windows)]
|
||||
let path_str = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path_str.strip_prefix('/').unwrap_or(path_str)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path_str))
|
||||
}
|
||||
|
||||
pub fn new(params: &InitializeParams) -> Self {
|
||||
let project_path = crate::workspace::get_project_path(params);
|
||||
let project_path = Self::get_project_path(params);
|
||||
|
||||
let (project, settings) = if let Some(path) = &project_path {
|
||||
let settings =
|
||||
|
@ -33,7 +74,7 @@ impl Session {
|
|||
Self {
|
||||
client_capabilities: params.capabilities.clone(),
|
||||
project,
|
||||
documents: Store::default(),
|
||||
documents: DocumentStore::new(),
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
@ -46,11 +87,11 @@ impl Session {
|
|||
&mut self.project
|
||||
}
|
||||
|
||||
pub fn documents(&self) -> &Store {
|
||||
pub fn documents(&self) -> &DocumentStore {
|
||||
&self.documents
|
||||
}
|
||||
|
||||
pub fn documents_mut(&mut self) -> &mut Store {
|
||||
pub fn documents_mut(&mut self) -> &mut DocumentStore {
|
||||
&mut self.documents
|
||||
}
|
||||
|
||||
|
|
|
@ -1,231 +0,0 @@
|
|||
use djls_workspace::{FileId, VfsSnapshot};
|
||||
use std::sync::Arc;
|
||||
use tower_lsp_server::lsp_types::{Position, Range};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TextDocument {
|
||||
pub uri: String,
|
||||
pub version: i32,
|
||||
pub language_id: LanguageId,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl TextDocument {
|
||||
pub fn new(uri: String, version: i32, language_id: LanguageId, file_id: FileId) -> Self {
|
||||
Self {
|
||||
uri,
|
||||
version,
|
||||
language_id,
|
||||
file_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_id(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn get_content(&self, vfs: &VfsSnapshot) -> Option<Arc<str>> {
|
||||
vfs.get_text(self.file_id)
|
||||
}
|
||||
|
||||
pub fn get_line(&self, vfs: &VfsSnapshot, line_index: &LineIndex, line: u32) -> Option<String> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let line_start = *line_index.line_starts.get(line as usize)?;
|
||||
let line_end = line_index
|
||||
.line_starts
|
||||
.get(line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(line_index.length);
|
||||
|
||||
Some(content[line_start as usize..line_end as usize].to_string())
|
||||
}
|
||||
|
||||
pub fn get_text_range(
|
||||
&self,
|
||||
vfs: &VfsSnapshot,
|
||||
line_index: &LineIndex,
|
||||
range: Range,
|
||||
) -> Option<String> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let start_offset = line_index.offset(range.start)? as usize;
|
||||
let end_offset = line_index.offset(range.end)? as usize;
|
||||
|
||||
Some(content[start_offset..end_offset].to_string())
|
||||
}
|
||||
|
||||
pub fn get_template_tag_context(
|
||||
&self,
|
||||
vfs: &VfsSnapshot,
|
||||
line_index: &LineIndex,
|
||||
position: Position,
|
||||
) -> Option<TemplateTagContext> {
|
||||
let content = self.get_content(vfs)?;
|
||||
|
||||
let start = line_index.line_starts.get(position.line as usize)?;
|
||||
let end = line_index
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(line_index.length);
|
||||
|
||||
let line = &content[*start as usize..end as usize];
|
||||
let char_pos: usize = position.character.try_into().ok()?;
|
||||
let prefix = &line[..char_pos];
|
||||
let rest_of_line = &line[char_pos..];
|
||||
let rest_trimmed = rest_of_line.trim_start();
|
||||
|
||||
prefix.rfind("{%").map(|tag_start| {
|
||||
// Check if we're immediately after {% with no space
|
||||
let needs_leading_space = prefix.ends_with("{%");
|
||||
|
||||
let closing_brace = if rest_trimmed.starts_with("%}") {
|
||||
ClosingBrace::FullClose
|
||||
} else if rest_trimmed.starts_with('}') {
|
||||
ClosingBrace::PartialClose
|
||||
} else {
|
||||
ClosingBrace::None
|
||||
};
|
||||
|
||||
TemplateTagContext {
|
||||
partial_tag: prefix[tag_start + 2..].trim().to_string(),
|
||||
closing_brace,
|
||||
needs_leading_space,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LineIndex {
|
||||
pub line_starts: Vec<u32>,
|
||||
pub line_starts_utf16: Vec<u32>,
|
||||
pub length: u32,
|
||||
pub length_utf16: u32,
|
||||
}
|
||||
|
||||
impl LineIndex {
|
||||
pub fn new(text: &str) -> Self {
|
||||
let mut line_starts = vec![0];
|
||||
let mut line_starts_utf16 = vec![0];
|
||||
let mut pos_utf8 = 0;
|
||||
let mut pos_utf16 = 0;
|
||||
|
||||
for c in text.chars() {
|
||||
pos_utf8 += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
pos_utf16 += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
if c == '\n' {
|
||||
line_starts.push(pos_utf8);
|
||||
line_starts_utf16.push(pos_utf16);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
line_starts,
|
||||
line_starts_utf16,
|
||||
length: pos_utf8,
|
||||
length_utf16: pos_utf16,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn offset(&self, position: Position) -> Option<u32> {
|
||||
let line_start = self.line_starts.get(position.line as usize)?;
|
||||
|
||||
Some(line_start + position.character)
|
||||
}
|
||||
|
||||
/// Convert UTF-16 LSP position to UTF-8 byte offset
|
||||
pub fn offset_utf16(&self, position: Position, text: &str) -> Option<u32> {
|
||||
let line_start_utf8 = self.line_starts.get(position.line as usize)?;
|
||||
let _line_start_utf16 = self.line_starts_utf16.get(position.line as usize)?;
|
||||
|
||||
// If position is at start of line, return UTF-8 line start
|
||||
if position.character == 0 {
|
||||
return Some(*line_start_utf8);
|
||||
}
|
||||
|
||||
// Find the line text
|
||||
let next_line_start = self
|
||||
.line_starts
|
||||
.get(position.line as usize + 1)
|
||||
.copied()
|
||||
.unwrap_or(self.length);
|
||||
|
||||
let line_text = text.get(*line_start_utf8 as usize..next_line_start as usize)?;
|
||||
|
||||
// Convert UTF-16 character offset to UTF-8 byte offset within the line
|
||||
let mut utf16_pos = 0;
|
||||
let mut utf8_pos = 0;
|
||||
|
||||
for c in line_text.chars() {
|
||||
if utf16_pos >= position.character {
|
||||
break;
|
||||
}
|
||||
utf16_pos += u32::try_from(c.len_utf16()).unwrap_or(0);
|
||||
utf8_pos += u32::try_from(c.len_utf8()).unwrap_or(0);
|
||||
}
|
||||
|
||||
Some(line_start_utf8 + utf8_pos)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn position(&self, offset: u32) -> Position {
|
||||
let line = match self.line_starts.binary_search(&offset) {
|
||||
Ok(line) => line,
|
||||
Err(line) => line - 1,
|
||||
};
|
||||
|
||||
let line_start = self.line_starts[line];
|
||||
let character = offset - line_start;
|
||||
|
||||
Position::new(u32::try_from(line).unwrap_or(0), character)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum LanguageId {
|
||||
HtmlDjango,
|
||||
Other,
|
||||
Python,
|
||||
}
|
||||
|
||||
impl From<&str> for LanguageId {
|
||||
fn from(language_id: &str) -> Self {
|
||||
match language_id {
|
||||
"django-html" | "htmldjango" => Self::HtmlDjango,
|
||||
"python" => Self::Python,
|
||||
_ => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for LanguageId {
|
||||
fn from(language_id: String) -> Self {
|
||||
Self::from(language_id.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageId> for djls_workspace::FileKind {
|
||||
fn from(language_id: LanguageId) -> Self {
|
||||
match language_id {
|
||||
LanguageId::Python => Self::Python,
|
||||
LanguageId::HtmlDjango => Self::Template,
|
||||
LanguageId::Other => Self::Other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ClosingBrace {
|
||||
None,
|
||||
PartialClose, // just }
|
||||
FullClose, // %}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateTagContext {
|
||||
pub partial_tag: String,
|
||||
pub closing_brace: ClosingBrace,
|
||||
pub needs_leading_space: bool,
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
mod document;
|
||||
mod store;
|
||||
mod utils;
|
||||
|
||||
pub use store::Store;
|
||||
pub use utils::get_project_path;
|
|
@ -1,539 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Result;
|
||||
use camino::Utf8PathBuf;
|
||||
use djls_project::TemplateTags;
|
||||
use djls_workspace::{FileId, FileKind, FileStore, TextSource, Vfs};
|
||||
use tower_lsp_server::lsp_types::CompletionItem;
|
||||
use tower_lsp_server::lsp_types::CompletionItemKind;
|
||||
use tower_lsp_server::lsp_types::CompletionResponse;
|
||||
use tower_lsp_server::lsp_types::Diagnostic;
|
||||
use tower_lsp_server::lsp_types::DiagnosticSeverity;
|
||||
use tower_lsp_server::lsp_types::DidChangeTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::DidCloseTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
|
||||
use tower_lsp_server::lsp_types::Documentation;
|
||||
use tower_lsp_server::lsp_types::InsertTextFormat;
|
||||
use tower_lsp_server::lsp_types::MarkupContent;
|
||||
use tower_lsp_server::lsp_types::MarkupKind;
|
||||
use tower_lsp_server::lsp_types::Position;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
|
||||
|
||||
use super::document::{ClosingBrace, LanguageId, LineIndex, TextDocument};
|
||||
|
||||
pub struct Store {
|
||||
vfs: Arc<Vfs>,
|
||||
file_store: Arc<Mutex<FileStore>>,
|
||||
file_ids: HashMap<String, FileId>,
|
||||
line_indices: HashMap<FileId, LineIndex>,
|
||||
versions: HashMap<String, i32>,
|
||||
documents: HashMap<String, TextDocument>,
|
||||
}
|
||||
|
||||
impl Default for Store {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
vfs: Arc::new(Vfs::default()),
|
||||
file_store: Arc::new(Mutex::new(FileStore::new())),
|
||||
file_ids: HashMap::new(),
|
||||
line_indices: HashMap::new(),
|
||||
versions: HashMap::new(),
|
||||
documents: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Store {
|
||||
pub fn handle_did_open(&mut self, params: &DidOpenTextDocumentParams) -> Result<()> {
|
||||
let uri_str = params.text_document.uri.to_string();
|
||||
let uri = params.text_document.uri.clone();
|
||||
let version = params.text_document.version;
|
||||
let content = params.text_document.text.clone();
|
||||
let language_id = LanguageId::from(params.text_document.language_id.as_str());
|
||||
let kind = FileKind::from(language_id.clone());
|
||||
|
||||
// Convert URI to Url for VFS
|
||||
let vfs_url =
|
||||
url::Url::parse(&uri.to_string()).map_err(|e| anyhow!("Invalid URI: {}", e))?;
|
||||
|
||||
// Convert to path - simplified for now, just use URI string
|
||||
let path = Utf8PathBuf::from(uri.as_str());
|
||||
|
||||
// Store content in VFS
|
||||
let text_source = TextSource::Overlay(Arc::from(content.as_str()));
|
||||
let file_id = self.vfs.intern_file(vfs_url, path, kind, text_source);
|
||||
|
||||
// Set overlay content in VFS
|
||||
self.vfs.set_overlay(file_id, Arc::from(content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Create TextDocument metadata
|
||||
let document = TextDocument::new(uri_str.clone(), version, language_id.clone(), file_id);
|
||||
self.documents.insert(uri_str.clone(), document);
|
||||
|
||||
// Cache mappings and indices
|
||||
self.file_ids.insert(uri_str.clone(), file_id);
|
||||
self.line_indices.insert(file_id, LineIndex::new(&content));
|
||||
self.versions.insert(uri_str, version);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_did_change(&mut self, params: &DidChangeTextDocumentParams) -> Result<()> {
|
||||
let uri_str = params.text_document.uri.as_str().to_string();
|
||||
let version = params.text_document.version;
|
||||
|
||||
// Look up FileId
|
||||
let file_id = self
|
||||
.file_ids
|
||||
.get(&uri_str)
|
||||
.copied()
|
||||
.ok_or_else(|| anyhow!("Document not found: {}", uri_str))?;
|
||||
|
||||
// Get current content from VFS
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let current_content = snapshot
|
||||
.get_text(file_id)
|
||||
.ok_or_else(|| anyhow!("File content not found: {}", uri_str))?;
|
||||
|
||||
// Get current line index for position calculations
|
||||
let line_index = self
|
||||
.line_indices
|
||||
.get(&file_id)
|
||||
.ok_or_else(|| anyhow!("Line index not found for: {}", uri_str))?;
|
||||
|
||||
// Apply text changes using the new function
|
||||
let new_content =
|
||||
apply_text_changes(¤t_content, ¶ms.content_changes, line_index)?;
|
||||
|
||||
// Update TextDocument version
|
||||
if let Some(document) = self.documents.get_mut(&uri_str) {
|
||||
document.version = version;
|
||||
}
|
||||
|
||||
// Update VFS with new content
|
||||
self.vfs
|
||||
.set_overlay(file_id, Arc::from(new_content.as_str()))?;
|
||||
|
||||
// Sync VFS snapshot to FileStore for Salsa tracking
|
||||
let snapshot = self.vfs.snapshot();
|
||||
let mut file_store = self.file_store.lock().unwrap();
|
||||
file_store.apply_vfs_snapshot(&snapshot);
|
||||
|
||||
// Update cached line index and version
|
||||
self.line_indices
|
||||
.insert(file_id, LineIndex::new(&new_content));
|
||||
self.versions.insert(uri_str, version);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn handle_did_close(&mut self, params: &DidCloseTextDocumentParams) {
|
||||
let uri_str = params.text_document.uri.as_str();
|
||||
|
||||
// Remove TextDocument metadata
|
||||
self.documents.remove(uri_str);
|
||||
|
||||
// Look up FileId and remove mappings
|
||||
if let Some(file_id) = self.file_ids.remove(uri_str) {
|
||||
self.line_indices.remove(&file_id);
|
||||
}
|
||||
self.versions.remove(uri_str);
|
||||
|
||||
// Note: We don't remove from VFS as it might be useful for caching
|
||||
// The VFS will handle cleanup internally
|
||||
}
|
||||
|
||||
pub fn get_file_id(&self, uri: &str) -> Option<FileId> {
|
||||
self.file_ids.get(uri).copied()
|
||||
}
|
||||
|
||||
pub fn get_line_index(&self, file_id: FileId) -> Option<&LineIndex> {
|
||||
self.line_indices.get(&file_id)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_version(&self, uri: &str) -> Option<i32> {
|
||||
self.versions.get(uri).copied()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn is_version_valid(&self, uri: &str, version: i32) -> bool {
|
||||
self.get_version(uri) == Some(version)
|
||||
}
|
||||
|
||||
// TextDocument helper methods
|
||||
pub fn get_document(&self, uri: &str) -> Option<&TextDocument> {
|
||||
self.documents.get(uri)
|
||||
}
|
||||
|
||||
pub fn get_document_mut(&mut self, uri: &str) -> Option<&mut TextDocument> {
|
||||
self.documents.get_mut(uri)
|
||||
}
|
||||
|
||||
pub fn get_completions(
|
||||
&self,
|
||||
uri: &str,
|
||||
position: Position,
|
||||
tags: &TemplateTags,
|
||||
) -> Option<CompletionResponse> {
|
||||
// Check if this is a Django template using TextDocument metadata
|
||||
let document = self.get_document(uri)?;
|
||||
if document.language_id != LanguageId::HtmlDjango {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Try to get cached AST from FileStore for better context analysis
|
||||
// This demonstrates using the cached AST, though we still fall back to string parsing
|
||||
let file_id = document.file_id();
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
if let Some(_ast) = file_store.get_template_ast(file_id) {
|
||||
// TODO: In a future enhancement, we could use the AST to provide
|
||||
// more intelligent completions based on the current node context
|
||||
// For now, we continue with the existing string-based approach
|
||||
}
|
||||
|
||||
// Get template tag context from document
|
||||
let vfs_snapshot = self.vfs.snapshot();
|
||||
let line_index = self.get_line_index(file_id)?;
|
||||
let context = document.get_template_tag_context(&vfs_snapshot, line_index, position)?;
|
||||
|
||||
let mut completions: Vec<CompletionItem> = tags
|
||||
.iter()
|
||||
.filter(|tag| {
|
||||
context.partial_tag.is_empty() || tag.name().starts_with(&context.partial_tag)
|
||||
})
|
||||
.map(|tag| {
|
||||
let leading_space = if context.needs_leading_space { " " } else { "" };
|
||||
CompletionItem {
|
||||
label: tag.name().to_string(),
|
||||
kind: Some(CompletionItemKind::KEYWORD),
|
||||
detail: Some(format!("Template tag from {}", tag.library())),
|
||||
documentation: tag.doc().as_ref().map(|doc| {
|
||||
Documentation::MarkupContent(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: (*doc).to_string(),
|
||||
})
|
||||
}),
|
||||
insert_text: Some(match context.closing_brace {
|
||||
ClosingBrace::None => format!("{}{} %}}", leading_space, tag.name()),
|
||||
ClosingBrace::PartialClose => format!("{}{} %", leading_space, tag.name()),
|
||||
ClosingBrace::FullClose => format!("{}{} ", leading_space, tag.name()),
|
||||
}),
|
||||
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if completions.is_empty() {
|
||||
None
|
||||
} else {
|
||||
completions.sort_by(|a, b| a.label.cmp(&b.label));
|
||||
Some(CompletionResponse::Array(completions))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get template parsing diagnostics for a file.
|
||||
///
|
||||
/// This method uses the cached template errors from Salsa to generate LSP diagnostics.
|
||||
/// The errors are only re-computed when the file content changes, providing efficient
|
||||
/// incremental error reporting.
|
||||
pub fn get_template_diagnostics(&self, uri: &str) -> Vec<Diagnostic> {
|
||||
let Some(document) = self.get_document(uri) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// Only process template files
|
||||
if document.language_id != LanguageId::HtmlDjango {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let file_id = document.file_id();
|
||||
let Some(_line_index) = self.get_line_index(file_id) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
// Get cached template errors from FileStore
|
||||
let file_store = self.file_store.lock().unwrap();
|
||||
let errors = file_store.get_template_errors(file_id);
|
||||
|
||||
// Convert template errors to LSP diagnostics
|
||||
errors
|
||||
.iter()
|
||||
.map(|error| {
|
||||
// For now, we'll place all errors at the start of the file
|
||||
// In a future enhancement, we could use error spans for precise locations
|
||||
let range = Range {
|
||||
start: Position {
|
||||
line: 0,
|
||||
character: 0,
|
||||
},
|
||||
end: Position {
|
||||
line: 0,
|
||||
character: 0,
|
||||
},
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
source: Some("djls-templates".to_string()),
|
||||
message: error.clone(),
|
||||
..Default::default()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply text changes to content, handling multiple changes correctly
|
||||
fn apply_text_changes(
|
||||
content: &str,
|
||||
changes: &[TextDocumentContentChangeEvent],
|
||||
line_index: &LineIndex,
|
||||
) -> Result<String> {
|
||||
if changes.is_empty() {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
// Check for full document replacement first
|
||||
for change in changes {
|
||||
if change.range.is_none() {
|
||||
return Ok(change.text.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Sort changes by start position in reverse order (end to start)
|
||||
let mut sorted_changes = changes.to_vec();
|
||||
sorted_changes.sort_by(|a, b| {
|
||||
match (a.range, b.range) {
|
||||
(Some(range_a), Some(range_b)) => {
|
||||
// Primary sort: by line (reverse)
|
||||
let line_cmp = range_b.start.line.cmp(&range_a.start.line);
|
||||
if line_cmp == std::cmp::Ordering::Equal {
|
||||
// Secondary sort: by character (reverse)
|
||||
range_b.start.character.cmp(&range_a.start.character)
|
||||
} else {
|
||||
line_cmp
|
||||
}
|
||||
}
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
}
|
||||
});
|
||||
|
||||
let mut result = content.to_string();
|
||||
|
||||
for change in &sorted_changes {
|
||||
if let Some(range) = change.range {
|
||||
// Convert UTF-16 positions to UTF-8 offsets
|
||||
let start_offset = line_index
|
||||
.offset_utf16(range.start, &result)
|
||||
.ok_or_else(|| anyhow!("Invalid start position: {:?}", range.start))?;
|
||||
let end_offset = line_index
|
||||
.offset_utf16(range.end, &result)
|
||||
.ok_or_else(|| anyhow!("Invalid end position: {:?}", range.end))?;
|
||||
|
||||
if start_offset as usize > result.len() || end_offset as usize > result.len() {
|
||||
return Err(anyhow!(
|
||||
"Offset out of bounds: start={}, end={}, len={}",
|
||||
start_offset,
|
||||
end_offset,
|
||||
result.len()
|
||||
));
|
||||
}
|
||||
|
||||
// Apply the change
|
||||
result.replace_range(start_offset as usize..end_offset as usize, &change.text);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
#[test]
|
||||
fn test_apply_single_character_insertion() {
|
||||
let content = "Hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 6), Position::new(0, 6))),
|
||||
range_length: None,
|
||||
text: "beautiful ".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "Hello beautiful world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_single_character_deletion() {
|
||||
let content = "Hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 5), Position::new(0, 6))),
|
||||
range_length: None,
|
||||
text: String::new(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "Helloworld");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_multiple_changes_in_reverse_order() {
|
||||
let content = "line 1\nline 2\nline 3";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert "new " at position (1, 0) and "another " at position (0, 0)
|
||||
let changes = vec![
|
||||
TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(0, 0))),
|
||||
range_length: None,
|
||||
text: "another ".to_string(),
|
||||
},
|
||||
TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(1, 0), Position::new(1, 0))),
|
||||
range_length: None,
|
||||
text: "new ".to_string(),
|
||||
},
|
||||
];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "another line 1\nnew line 2\nline 3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_multiline_replacement() {
|
||||
let content = "line 1\nline 2\nline 3";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(2, 6))),
|
||||
range_length: None,
|
||||
text: "completely new content".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "completely new content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_full_document_replacement() {
|
||||
let content = "old content";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: "brand new content".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "brand new content");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_basic() {
|
||||
let content = "hello world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// ASCII characters should have 1:1 UTF-8:UTF-16 mapping
|
||||
let pos = Position::new(0, 6);
|
||||
let offset = line_index.offset_utf16(pos, content).unwrap();
|
||||
assert_eq!(offset, 6);
|
||||
assert_eq!(&content[6..7], "w");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_with_emoji() {
|
||||
let content = "hello 👋 world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// 👋 is 2 UTF-16 code units but 4 UTF-8 bytes
|
||||
let pos_after_emoji = Position::new(0, 8); // UTF-16 position after "hello 👋"
|
||||
let offset = line_index.offset_utf16(pos_after_emoji, content).unwrap();
|
||||
|
||||
// Should point to the space before "world"
|
||||
assert_eq!(offset, 10); // UTF-8 byte offset
|
||||
assert_eq!(&content[10..11], " ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16_line_index_multiline() {
|
||||
let content = "first line\nsecond line";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
let pos = Position::new(1, 7); // Position at 'l' in "line" on second line
|
||||
let offset = line_index.offset_utf16(pos, content).unwrap();
|
||||
assert_eq!(offset, 18); // 11 (first line + \n) + 7
|
||||
assert_eq!(&content[18..19], "l");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_changes_with_emoji() {
|
||||
let content = "hello 👋 world";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert text after the space following the emoji (UTF-16 position 9)
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 9), Position::new(0, 9))),
|
||||
range_length: None,
|
||||
text: "beautiful ".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "hello 👋 beautiful world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_line_index_utf16_tracking() {
|
||||
let content = "a👋b";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Check UTF-16 line starts are tracked correctly
|
||||
assert_eq!(line_index.line_starts_utf16, vec![0]);
|
||||
assert_eq!(line_index.length_utf16, 4); // 'a' (1) + 👋 (2) + 'b' (1) = 4 UTF-16 units
|
||||
assert_eq!(line_index.length, 6); // 'a' (1) + 👋 (4) + 'b' (1) = 6 UTF-8 bytes
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edge_case_changes_at_boundaries() {
|
||||
let content = "abc";
|
||||
let line_index = LineIndex::new(content);
|
||||
|
||||
// Insert at beginning
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 0), Position::new(0, 0))),
|
||||
range_length: None,
|
||||
text: "start".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "startabc");
|
||||
|
||||
// Insert at end
|
||||
let line_index = LineIndex::new(content);
|
||||
let changes = vec![TextDocumentContentChangeEvent {
|
||||
range: Some(Range::new(Position::new(0, 3), Position::new(0, 3))),
|
||||
range_length: None,
|
||||
text: "end".to_string(),
|
||||
}];
|
||||
|
||||
let result = apply_text_changes(content, &changes, &line_index).unwrap();
|
||||
assert_eq!(result, "abcend");
|
||||
}
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use percent_encoding::percent_decode_str;
|
||||
use tower_lsp_server::lsp_types::InitializeParams;
|
||||
use tower_lsp_server::lsp_types::Uri;
|
||||
|
||||
/// Determines the project root path from initialization parameters.
|
||||
///
|
||||
/// Tries the current directory first, then falls back to the first workspace folder.
|
||||
pub fn get_project_path(params: &InitializeParams) -> Option<PathBuf> {
|
||||
// Try current directory first
|
||||
std::env::current_dir().ok().or_else(|| {
|
||||
// Fall back to the first workspace folder URI
|
||||
params
|
||||
.workspace_folders
|
||||
.as_ref()
|
||||
.and_then(|folders| folders.first())
|
||||
.and_then(|folder| uri_to_pathbuf(&folder.uri))
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts a `file:` URI into an absolute `PathBuf`.
|
||||
fn uri_to_pathbuf(uri: &Uri) -> Option<PathBuf> {
|
||||
// Check if the scheme is "file"
|
||||
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path part as a string
|
||||
let encoded_path_str = uri.path().as_str();
|
||||
|
||||
// Decode the percent-encoded path string
|
||||
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
|
||||
let path_str = decoded_path_cow.as_ref();
|
||||
|
||||
#[cfg(windows)]
|
||||
let path_str = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path_str.strip_prefix('/').unwrap_or(path_str)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path_str))
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue