mirror of
https://github.com/joshuadavidthomas/django-language-server.git
synced 2025-09-04 17:30:37 +00:00
Improve documentation and consolidate path/URL utilities
- Added comprehensive module-level documentation to all djls-workspace modules - Consolidated scattered URL/path conversion utilities into paths module - Added documentation explaining the 'why' for key types and abstractions - Added #[must_use] annotations to constructors and getters - Focused on explaining architecture and design decisions rather than obvious behavior
This commit is contained in:
parent
2dd779bcda
commit
f3fb8e7045
12 changed files with 295 additions and 83 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -513,6 +513,7 @@ dependencies = [
|
|||
"djls-project",
|
||||
"djls-templates",
|
||||
"notify",
|
||||
"percent-encoding",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
|
|
|
@ -39,9 +39,8 @@ use djls_conf::Settings;
|
|||
use djls_project::DjangoProject;
|
||||
use djls_workspace::{
|
||||
db::{Database, SourceFile},
|
||||
Buffers, FileSystem, OsFileSystem, TextDocument, WorkspaceFileSystem,
|
||||
paths, Buffers, FileSystem, OsFileSystem, TextDocument, WorkspaceFileSystem,
|
||||
};
|
||||
use percent_encoding::percent_decode_str;
|
||||
use salsa::{Setter, StorageHandle};
|
||||
use tower_lsp_server::lsp_types;
|
||||
use url::Url;
|
||||
|
@ -171,25 +170,7 @@ impl Session {
|
|||
|
||||
/// Converts a `file:` URI into an absolute `PathBuf`.
|
||||
fn uri_to_pathbuf(uri: &lsp_types::Uri) -> Option<PathBuf> {
|
||||
// Check if the scheme is "file"
|
||||
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path part as a string
|
||||
let encoded_path_str = uri.path().as_str();
|
||||
|
||||
// Decode the percent-encoded path string
|
||||
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
|
||||
let path_str = decoded_path_cow.as_ref();
|
||||
|
||||
#[cfg(windows)]
|
||||
let path_str = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path_str.strip_prefix('/').unwrap_or(path_str)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path_str))
|
||||
paths::lsp_uri_to_path(uri)
|
||||
}
|
||||
|
||||
pub fn project(&self) -> Option<&DjangoProject> {
|
||||
|
@ -353,29 +334,6 @@ impl Session {
|
|||
f(&db)
|
||||
}
|
||||
|
||||
/// Convert a URL to a PathBuf for file operations.
|
||||
///
|
||||
/// This is needed to convert between LSP URLs and file paths for
|
||||
/// SourceFile creation and tracking.
|
||||
pub fn url_to_path(&self, url: &Url) -> Option<PathBuf> {
|
||||
// Only handle file:// URLs
|
||||
if url.scheme() != "file" {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Decode and convert to PathBuf
|
||||
let path = percent_decode_str(url.path()).decode_utf8().ok()?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let path = path.strip_prefix('/').unwrap_or(&path);
|
||||
|
||||
Some(PathBuf::from(path.as_ref()))
|
||||
}
|
||||
|
||||
// ===== Document Lifecycle Management =====
|
||||
// These methods encapsulate the two-layer architecture coordination:
|
||||
// Layer 1 (overlays) and Layer 2 (Salsa revision tracking)
|
||||
|
||||
/// Handle opening a document - sets buffer and creates file.
|
||||
///
|
||||
/// This method coordinates both layers:
|
||||
|
@ -390,12 +348,12 @@ impl Session {
|
|||
// Layer 2: Create file and bump revision if it already exists
|
||||
// This is crucial: if the file was already read from disk, we need to
|
||||
// invalidate Salsa's cache so it re-reads through the buffer system
|
||||
if let Some(path) = self.url_to_path(&url) {
|
||||
if let Some(path) = paths::url_to_path(&url) {
|
||||
self.with_db_mut(|db| {
|
||||
// Check if file already exists (was previously read from disk)
|
||||
let already_exists = db.has_file(&path);
|
||||
let file = db.get_or_create_file(path.clone());
|
||||
|
||||
|
||||
if already_exists {
|
||||
// File was already read - bump revision to invalidate cache
|
||||
let current_rev = file.revision(db);
|
||||
|
@ -407,7 +365,6 @@ impl Session {
|
|||
current_rev,
|
||||
new_rev
|
||||
);
|
||||
|
||||
} else {
|
||||
// New file - starts at revision 0
|
||||
tracing::debug!(
|
||||
|
@ -433,7 +390,7 @@ impl Session {
|
|||
self.buffers.update(url.clone(), document);
|
||||
|
||||
// Layer 2: Bump revision to trigger invalidation
|
||||
if let Some(path) = self.url_to_path(&url) {
|
||||
if let Some(path) = paths::url_to_path(&url) {
|
||||
self.notify_file_changed(path);
|
||||
}
|
||||
}
|
||||
|
@ -460,7 +417,7 @@ impl Session {
|
|||
|
||||
// Layer 2: Bump revision to trigger re-read from disk
|
||||
// We keep the file alive for potential re-opening
|
||||
if let Some(path) = self.url_to_path(url) {
|
||||
if let Some(path) = paths::url_to_path(url) {
|
||||
self.notify_file_changed(path);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ use tempfile::TempDir;
|
|||
use tower_lsp_server::lsp_types::{
|
||||
DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
||||
InitializeParams, InitializedParams, TextDocumentContentChangeEvent, TextDocumentIdentifier,
|
||||
TextDocumentItem, Uri, VersionedTextDocumentIdentifier, WorkspaceFolder,
|
||||
TextDocumentItem, VersionedTextDocumentIdentifier, WorkspaceFolder,
|
||||
};
|
||||
use tower_lsp_server::LanguageServer;
|
||||
use url::Url;
|
||||
|
@ -35,7 +35,7 @@ impl TestServer {
|
|||
let workspace_root = temp_dir.path().to_path_buf();
|
||||
|
||||
// Set up logging
|
||||
let (non_blocking, guard) = tracing_appender::non_blocking(std::io::sink());
|
||||
let (_non_blocking, guard) = tracing_appender::non_blocking(std::io::sink());
|
||||
|
||||
// Create server (guard is moved into server, so we return it too)
|
||||
let server = DjangoLanguageServer::new(guard);
|
||||
|
@ -73,7 +73,7 @@ impl TestServer {
|
|||
|
||||
/// Helper to create a file URL in the test workspace
|
||||
fn workspace_url(&self, name: &str) -> Url {
|
||||
Url::from_file_path(self.workspace_file(name)).unwrap()
|
||||
djls_workspace::paths::path_to_url(&self.workspace_file(name)).unwrap()
|
||||
}
|
||||
|
||||
/// Open a document in the LSP server
|
||||
|
|
|
@ -11,6 +11,7 @@ anyhow = { workspace = true }
|
|||
camino = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tower-lsp-server = { workspace = true }
|
||||
|
|
|
@ -24,6 +24,7 @@ pub struct Buffers {
|
|||
|
||||
impl Buffers {
|
||||
/// Create a new empty buffer storage
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
inner: Arc::new(DashMap::new()),
|
||||
|
@ -41,23 +42,28 @@ impl Buffers {
|
|||
}
|
||||
|
||||
/// Close a document and return it if it was open
|
||||
#[must_use]
|
||||
pub fn close(&self, url: &Url) -> Option<TextDocument> {
|
||||
self.inner.remove(url).map(|(_, doc)| doc)
|
||||
}
|
||||
|
||||
/// Get a document if it's open
|
||||
#[must_use]
|
||||
pub fn get(&self, url: &Url) -> Option<TextDocument> {
|
||||
self.inner.get(url).map(|entry| entry.clone())
|
||||
}
|
||||
|
||||
/// Check if a document is open
|
||||
#[must_use]
|
||||
pub fn contains(&self, url: &Url) -> bool {
|
||||
self.inner.contains_key(url)
|
||||
}
|
||||
|
||||
/// Iterate over all open buffers (for debugging)
|
||||
pub fn iter(&self) -> impl Iterator<Item = (Url, TextDocument)> + '_ {
|
||||
self.inner.iter().map(|entry| (entry.key().clone(), entry.value().clone()))
|
||||
self.inner
|
||||
.iter()
|
||||
.map(|entry| (entry.key().clone(), entry.value().clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -65,4 +71,5 @@ impl Default for Buffers {
|
|||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -406,7 +406,6 @@ mod tests {
|
|||
use salsa::Setter;
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
use url::Url;
|
||||
|
||||
// Simple in-memory filesystem for testing
|
||||
struct InMemoryFileSystem {
|
||||
|
@ -485,7 +484,7 @@ mod tests {
|
|||
assert!(ast1.errors.is_empty(), "Should have no errors");
|
||||
|
||||
// Add an overlay with updated content
|
||||
let url = Url::from_file_path(&template_path).unwrap();
|
||||
let url = crate::paths::path_to_url(&template_path).unwrap();
|
||||
let updated_document = TextDocument::new(
|
||||
"{% block content %}Updated from overlay{% endblock %}".to_string(),
|
||||
2,
|
||||
|
@ -544,7 +543,7 @@ mod tests {
|
|||
assert!(Arc::ptr_eq(&ast1, &ast2), "Should return cached result");
|
||||
|
||||
// Update overlay content
|
||||
let url = Url::from_file_path(&template_path).unwrap();
|
||||
let url = crate::paths::path_to_url(&template_path).unwrap();
|
||||
let updated_document = TextDocument::new(
|
||||
"{% if false %}Changed{% endif %}".to_string(),
|
||||
2,
|
||||
|
|
|
@ -1,9 +1,21 @@
|
|||
//! LSP text document representation with efficient line indexing
|
||||
//!
|
||||
//! [`TextDocument`] stores open file content with version tracking for the LSP protocol.
|
||||
//! Pre-computed line indices enable O(1) position lookups, which is critical for
|
||||
//! performance when handling frequent position-based operations like hover, completion,
|
||||
//! and diagnostics.
|
||||
|
||||
use crate::language::LanguageId;
|
||||
use crate::template::ClosingBrace;
|
||||
use crate::template::TemplateTagContext;
|
||||
use tower_lsp_server::lsp_types::Position;
|
||||
use tower_lsp_server::lsp_types::Range;
|
||||
|
||||
/// In-memory representation of an open document in the LSP.
|
||||
///
|
||||
/// Combines document content with metadata needed for LSP operations,
|
||||
/// including version tracking for synchronization and pre-computed line
|
||||
/// indices for efficient position lookups.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TextDocument {
|
||||
/// The document's content
|
||||
|
@ -18,6 +30,7 @@ pub struct TextDocument {
|
|||
|
||||
impl TextDocument {
|
||||
/// Create a new TextDocument with the given content
|
||||
#[must_use]
|
||||
pub fn new(content: String, version: i32, language_id: LanguageId) -> Self {
|
||||
let line_index = LineIndex::new(&content);
|
||||
Self {
|
||||
|
@ -29,20 +42,24 @@ impl TextDocument {
|
|||
}
|
||||
|
||||
/// Get the document's content
|
||||
#[must_use]
|
||||
pub fn content(&self) -> &str {
|
||||
&self.content
|
||||
}
|
||||
|
||||
/// Get the version number
|
||||
#[must_use]
|
||||
pub fn version(&self) -> i32 {
|
||||
self.version
|
||||
}
|
||||
|
||||
/// Get the language identifier
|
||||
#[must_use]
|
||||
pub fn language_id(&self) -> LanguageId {
|
||||
self.language_id.clone()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn line_index(&self) -> &LineIndex {
|
||||
&self.line_index
|
||||
}
|
||||
|
@ -126,6 +143,11 @@ impl TextDocument {
|
|||
}
|
||||
}
|
||||
|
||||
/// Pre-computed line start positions for efficient position/offset conversion.
|
||||
///
|
||||
/// Computing line positions on every lookup would be O(n) where n is the document size.
|
||||
/// By pre-computing during document creation/updates, we get O(1) lookups for line starts
|
||||
/// and O(log n) for position-to-offset conversions via binary search.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LineIndex {
|
||||
pub line_starts: Vec<u32>,
|
||||
|
|
|
@ -6,9 +6,8 @@
|
|||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
|
||||
use crate::buffers::Buffers;
|
||||
use crate::{buffers::Buffers, paths};
|
||||
|
||||
/// Trait for file system operations
|
||||
///
|
||||
|
@ -80,6 +79,7 @@ pub struct WorkspaceFileSystem {
|
|||
|
||||
impl WorkspaceFileSystem {
|
||||
/// Create a new [`WorkspaceFileSystem`] with the given buffer storage and fallback
|
||||
#[must_use]
|
||||
pub fn new(buffers: Buffers, disk: Arc<dyn FileSystem>) -> Self {
|
||||
Self { buffers, disk }
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ impl WorkspaceFileSystem {
|
|||
|
||||
impl FileSystem for WorkspaceFileSystem {
|
||||
fn read_to_string(&self, path: &Path) -> io::Result<String> {
|
||||
if let Some(url) = path_to_url(path) {
|
||||
if let Some(url) = paths::path_to_url(path) {
|
||||
if let Some(document) = self.buffers.get(&url) {
|
||||
return Ok(document.content().to_string());
|
||||
}
|
||||
|
@ -96,12 +96,12 @@ impl FileSystem for WorkspaceFileSystem {
|
|||
}
|
||||
|
||||
fn exists(&self, path: &Path) -> bool {
|
||||
path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|
||||
paths::path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|
||||
|| self.disk.exists(path)
|
||||
}
|
||||
|
||||
fn is_file(&self, path: &Path) -> bool {
|
||||
path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|
||||
paths::path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|
||||
|| self.disk.is_file(path)
|
||||
}
|
||||
|
||||
|
@ -122,31 +122,13 @@ impl FileSystem for WorkspaceFileSystem {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convert a file path to URL for overlay lookup
|
||||
///
|
||||
/// This is a simplified conversion - in a full implementation,
|
||||
/// you might want more robust path-to-URL conversion
|
||||
fn path_to_url(path: &Path) -> Option<Url> {
|
||||
// For absolute paths, use them directly without canonicalization
|
||||
// This ensures consistency with how URLs are created when storing overlays
|
||||
if path.is_absolute() {
|
||||
return Url::from_file_path(path).ok();
|
||||
}
|
||||
|
||||
// Only try to canonicalize for relative paths
|
||||
if let Ok(absolute_path) = std::fs::canonicalize(path) {
|
||||
return Url::from_file_path(absolute_path).ok();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::buffers::Buffers;
|
||||
use crate::document::TextDocument;
|
||||
use crate::language::LanguageId;
|
||||
use url::Url;
|
||||
|
||||
/// In-memory file system for testing
|
||||
pub struct InMemoryFileSystem {
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
//! Language identification for document routing
|
||||
//!
|
||||
//! Maps LSP language identifiers to internal [`FileKind`] for analyzer routing.
|
||||
//! Language IDs come from the LSP client and determine how files are processed.
|
||||
|
||||
use crate::FileKind;
|
||||
|
||||
/// Language identifier as reported by the LSP client.
|
||||
///
|
||||
/// These identifiers follow VS Code's language ID conventions and determine
|
||||
/// which analyzers and features are available for a document.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum LanguageId {
|
||||
Html,
|
||||
|
|
|
@ -1,8 +1,23 @@
|
|||
//! Workspace management for the Django Language Server
|
||||
//!
|
||||
//! This crate provides the core workspace functionality including document management,
|
||||
//! file system abstractions, and Salsa integration for incremental computation of
|
||||
//! Django projects.
|
||||
//!
|
||||
//! # Key Components
|
||||
//!
|
||||
//! - [`Buffers`] - Thread-safe storage for open documents
|
||||
//! - [`Database`] - Salsa database for incremental computation
|
||||
//! - [`TextDocument`] - LSP document representation with efficient indexing
|
||||
//! - [`FileSystem`] - Abstraction layer for file operations with overlay support
|
||||
//! - [`paths`] - Consistent URL/path conversion utilities
|
||||
|
||||
mod buffers;
|
||||
pub mod db;
|
||||
mod document;
|
||||
mod fs;
|
||||
mod language;
|
||||
pub mod paths;
|
||||
mod template;
|
||||
|
||||
pub use buffers::Buffers;
|
||||
|
|
200
crates/djls-workspace/src/paths.rs
Normal file
200
crates/djls-workspace/src/paths.rs
Normal file
|
@ -0,0 +1,200 @@
|
|||
//! Path and URL conversion utilities
|
||||
//!
|
||||
//! This module provides consistent conversion between file paths and URLs,
|
||||
//! handling platform-specific differences and encoding issues.
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use tower_lsp_server::lsp_types;
|
||||
use url::Url;
|
||||
|
||||
/// Convert a file:// URL to a `PathBuf`
|
||||
///
|
||||
/// Handles percent-encoding and platform-specific path formats (e.g., Windows drives).
|
||||
#[must_use]
|
||||
pub fn url_to_path(url: &Url) -> Option<PathBuf> {
|
||||
// Only handle file:// URLs
|
||||
if url.scheme() != "file" {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Get the path component and decode percent-encoding
|
||||
let path = percent_encoding::percent_decode_str(url.path())
|
||||
.decode_utf8()
|
||||
.ok()?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let path = {
|
||||
// Remove leading '/' for paths like /C:/...
|
||||
path.strip_prefix('/').unwrap_or(&path)
|
||||
};
|
||||
|
||||
Some(PathBuf::from(path.as_ref()))
|
||||
}
|
||||
|
||||
/// Convert an LSP URI to a `PathBuf`
|
||||
///
|
||||
/// This is a convenience wrapper that parses the LSP URI string and converts it.
|
||||
pub fn lsp_uri_to_path(lsp_uri: &lsp_types::Uri) -> Option<PathBuf> {
|
||||
// Parse the URI string as a URL
|
||||
let url = Url::parse(lsp_uri.as_str()).ok()?;
|
||||
url_to_path(&url)
|
||||
}
|
||||
|
||||
/// Convert a Path to a file:// URL
|
||||
///
|
||||
/// Handles both absolute and relative paths. Relative paths are resolved
|
||||
/// to absolute paths before conversion.
|
||||
#[must_use]
|
||||
pub fn path_to_url(path: &Path) -> Option<Url> {
|
||||
// For absolute paths, convert directly
|
||||
if path.is_absolute() {
|
||||
return Url::from_file_path(path).ok();
|
||||
}
|
||||
|
||||
// For relative paths, try to make them absolute first
|
||||
if let Ok(absolute_path) = std::fs::canonicalize(path) {
|
||||
return Url::from_file_path(absolute_path).ok();
|
||||
}
|
||||
|
||||
// If canonicalization fails, try converting as-is (might fail)
|
||||
Url::from_file_path(path).ok()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_url_to_path_basic() {
|
||||
let url = Url::parse("file:///home/user/file.txt").unwrap();
|
||||
let path = url_to_path(&url).unwrap();
|
||||
assert_eq!(path, PathBuf::from("/home/user/file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_to_path_with_spaces() {
|
||||
let url = Url::parse("file:///home/user/my%20file.txt").unwrap();
|
||||
let path = url_to_path(&url).unwrap();
|
||||
assert_eq!(path, PathBuf::from("/home/user/my file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_to_path_non_file_scheme() {
|
||||
let url = Url::parse("https://example.com/file.txt").unwrap();
|
||||
assert!(url_to_path(&url).is_none());
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_url_to_path_windows() {
|
||||
let url = Url::parse("file:///C:/Users/user/file.txt").unwrap();
|
||||
let path = url_to_path(&url).unwrap();
|
||||
assert_eq!(path, PathBuf::from("C:/Users/user/file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_to_url_absolute() {
|
||||
let path = if cfg!(windows) {
|
||||
PathBuf::from("C:/Users/user/file.txt")
|
||||
} else {
|
||||
PathBuf::from("/home/user/file.txt")
|
||||
};
|
||||
|
||||
let url = path_to_url(&path).unwrap();
|
||||
assert_eq!(url.scheme(), "file");
|
||||
assert!(url.path().contains("file.txt"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_round_trip() {
|
||||
let original_path = if cfg!(windows) {
|
||||
PathBuf::from("C:/Users/user/test file.txt")
|
||||
} else {
|
||||
PathBuf::from("/home/user/test file.txt")
|
||||
};
|
||||
|
||||
let url = path_to_url(&original_path).unwrap();
|
||||
let converted_path = url_to_path(&url).unwrap();
|
||||
|
||||
assert_eq!(original_path, converted_path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_with_localhost() {
|
||||
// Some systems use file://localhost/path format
|
||||
let url = Url::parse("file://localhost/home/user/file.txt").unwrap();
|
||||
let path = url_to_path(&url);
|
||||
|
||||
// Current implementation might not handle this correctly
|
||||
// since it only checks scheme, not host
|
||||
if let Some(p) = path {
|
||||
assert_eq!(p, PathBuf::from("/home/user/file.txt"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_with_empty_host() {
|
||||
// Standard file:///path format (three slashes, empty host)
|
||||
let url = Url::parse("file:///home/user/file.txt").unwrap();
|
||||
let path = url_to_path(&url).unwrap();
|
||||
assert_eq!(path, PathBuf::from("/home/user/file.txt"));
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn test_unc_path_to_url() {
|
||||
// UNC paths like \\server\share\file.txt
|
||||
let unc_path = PathBuf::from(r"\\server\share\file.txt");
|
||||
let url = path_to_url(&unc_path);
|
||||
|
||||
// Check if UNC paths are handled
|
||||
if let Some(u) = url {
|
||||
// UNC paths should convert to file://server/share/file.txt
|
||||
assert!(u.to_string().contains("server"));
|
||||
assert!(u.to_string().contains("share"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_relative_path_with_dotdot() {
|
||||
// Test relative paths with .. that might not exist
|
||||
let path = PathBuf::from("../some/nonexistent/path.txt");
|
||||
let url = path_to_url(&path);
|
||||
|
||||
// This might fail if the path doesn't exist and can't be canonicalized
|
||||
// Current implementation falls back to trying direct conversion
|
||||
assert!(url.is_none() || url.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_with_special_chars() {
|
||||
// Test paths with special characters that need encoding
|
||||
let path = PathBuf::from("/home/user/file with spaces & special!.txt");
|
||||
let url = path_to_url(&path).unwrap();
|
||||
|
||||
// Should be properly percent-encoded
|
||||
assert!(url.as_str().contains("%20") || url.as_str().contains("with%20spaces"));
|
||||
|
||||
// Round-trip should work
|
||||
let back = url_to_path(&url).unwrap();
|
||||
assert_eq!(back, path);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_with_query_or_fragment() {
|
||||
// URLs with query parameters or fragments should probably be rejected
|
||||
let url_with_query = Url::parse("file:///path/file.txt?query=param").unwrap();
|
||||
let url_with_fragment = Url::parse("file:///path/file.txt#section").unwrap();
|
||||
|
||||
// These should still work, extracting just the path part
|
||||
let path1 = url_to_path(&url_with_query);
|
||||
let path2 = url_to_path(&url_with_fragment);
|
||||
|
||||
if let Some(p) = path1 {
|
||||
assert_eq!(p, PathBuf::from("/path/file.txt"));
|
||||
}
|
||||
if let Some(p) = path2 {
|
||||
assert_eq!(p, PathBuf::from("/path/file.txt"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,13 +1,32 @@
|
|||
//! Django template context detection for completions
|
||||
//!
|
||||
//! Detects cursor position context within Django template tags to provide
|
||||
//! appropriate completions and auto-closing behavior.
|
||||
|
||||
/// Tracks what closing characters are needed to complete a template tag.
|
||||
///
|
||||
/// Used to determine whether the completion system needs to insert
|
||||
/// closing braces when completing a Django template tag.
|
||||
#[derive(Debug)]
|
||||
pub enum ClosingBrace {
|
||||
/// No closing brace present - need to add full `%}` or `}}`
|
||||
None,
|
||||
PartialClose, // just }
|
||||
FullClose, // %}
|
||||
/// Partial close present (just `}`) - need to add `%` or second `}`
|
||||
PartialClose,
|
||||
/// Full close present (`%}` or `}}`) - no closing needed
|
||||
FullClose,
|
||||
}
|
||||
|
||||
/// Cursor context within a Django template tag for completion support.
|
||||
///
|
||||
/// Captures the state around the cursor position to provide intelligent
|
||||
/// completions and determine what text needs to be inserted.
|
||||
#[derive(Debug)]
|
||||
pub struct TemplateTagContext {
|
||||
/// The partial tag text before the cursor (e.g., "loa" for "{% loa|")
|
||||
pub partial_tag: String,
|
||||
/// What closing characters are already present after the cursor
|
||||
pub closing_brace: ClosingBrace,
|
||||
/// Whether a space is needed before the completion (true if cursor is right after `{%`)
|
||||
pub needs_leading_space: bool,
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue