This commit is contained in:
Josh Thomas 2025-09-03 15:26:20 +00:00 committed by GitHub
commit 6308082d23
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 2926 additions and 644 deletions

486
Cargo.lock generated
View file

@ -140,9 +140,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.9.2"
version = "2.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29"
checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d"
dependencies = [
"serde",
]
@ -169,16 +169,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]]
name = "cfg-if"
version = "1.0.1"
name = "camino"
version = "1.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
checksum = "dd0b03af37dad7a14518b7691d81acb0f8222604ad3d1b02f6b4bed5188c0cd5"
[[package]]
name = "cfg-if"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
[[package]]
name = "clap"
version = "4.5.45"
version = "4.5.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318"
checksum = "2c5e4fcf9c21d2e544ca1ee9d8552de13019a42aa7dbf32747fa7aaf1df76e57"
dependencies = [
"clap_builder",
"clap_derive",
@ -186,9 +192,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.44"
version = "4.5.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8"
checksum = "fecb53a0e6fcfb055f686001bc2e2592fa527efaf38dbe81a6a9563562e57d41"
dependencies = [
"anstream",
"anstyle",
@ -403,6 +409,17 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "djls"
version = "5.2.0-alpha"
@ -425,7 +442,7 @@ dependencies = [
"directories",
"serde",
"tempfile",
"thiserror 2.0.15",
"thiserror 2.0.16",
"toml",
]
@ -453,20 +470,25 @@ name = "djls-server"
version = "0.0.0"
dependencies = [
"anyhow",
"camino",
"dashmap",
"djls-conf",
"djls-dev",
"djls-project",
"djls-templates",
"djls-workspace",
"percent-encoding",
"pyo3",
"salsa",
"serde",
"serde_json",
"tempfile",
"tokio",
"tower-lsp-server",
"tracing",
"tracing-appender",
"tracing-subscriber",
"url",
]
[[package]]
@ -477,10 +499,29 @@ dependencies = [
"insta",
"serde",
"tempfile",
"thiserror 2.0.15",
"thiserror 2.0.16",
"toml",
]
[[package]]
name = "djls-workspace"
version = "0.0.0"
dependencies = [
"anyhow",
"camino",
"dashmap",
"djls-project",
"djls-templates",
"notify",
"percent-encoding",
"salsa",
"tempfile",
"tokio",
"tower-lsp-server",
"tracing",
"url",
]
[[package]]
name = "dlv-list"
version = "0.5.2"
@ -564,6 +605,24 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "form_urlencoded"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf"
dependencies = [
"percent-encoding",
]
[[package]]
name = "fsevent-sys"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
dependencies = [
"libc",
]
[[package]]
name = "futures"
version = "0.3.31"
@ -719,10 +778,117 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "indexmap"
version = "2.10.0"
name = "icu_collections"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
dependencies = [
"displaydoc",
"potential_utf",
"yoke",
"zerofrom",
"zerovec",
]
[[package]]
name = "icu_locale_core"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
dependencies = [
"displaydoc",
"litemap",
"tinystr",
"writeable",
"zerovec",
]
[[package]]
name = "icu_normalizer"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
dependencies = [
"displaydoc",
"icu_collections",
"icu_normalizer_data",
"icu_properties",
"icu_provider",
"smallvec",
"zerovec",
]
[[package]]
name = "icu_normalizer_data"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
[[package]]
name = "icu_properties"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
dependencies = [
"displaydoc",
"icu_collections",
"icu_locale_core",
"icu_properties_data",
"icu_provider",
"potential_utf",
"zerotrie",
"zerovec",
]
[[package]]
name = "icu_properties_data"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
[[package]]
name = "icu_provider"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
dependencies = [
"displaydoc",
"icu_locale_core",
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"zerofrom",
"zerotrie",
"zerovec",
]
[[package]]
name = "idna"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de"
dependencies = [
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna_adapter"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
dependencies = [
"icu_normalizer",
"icu_properties",
]
[[package]]
name = "indexmap"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9"
dependencies = [
"equivalent",
"hashbrown 0.15.5",
@ -734,6 +900,26 @@ version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
[[package]]
name = "inotify"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
dependencies = [
"bitflags 2.9.3",
"inotify-sys",
"libc",
]
[[package]]
name = "inotify-sys"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
dependencies = [
"libc",
]
[[package]]
name = "insta"
version = "1.43.1"
@ -757,11 +943,11 @@ dependencies = [
[[package]]
name = "io-uring"
version = "0.7.9"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b"
dependencies = [
"bitflags 2.9.2",
"bitflags 2.9.3",
"cfg-if",
"libc",
]
@ -789,6 +975,26 @@ dependencies = [
"serde",
]
[[package]]
name = "kqueue"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a"
dependencies = [
"kqueue-sys",
"libc",
]
[[package]]
name = "kqueue-sys"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
dependencies = [
"bitflags 1.3.2",
"libc",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
@ -807,7 +1013,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3"
dependencies = [
"bitflags 2.9.2",
"bitflags 2.9.3",
"libc",
]
@ -817,6 +1023,12 @@ version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
[[package]]
name = "litemap"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]]
name = "lock_api"
version = "0.4.13"
@ -886,10 +1098,35 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
dependencies = [
"libc",
"log",
"wasi 0.11.1+wasi-snapshot-preview1",
"windows-sys 0.59.0",
]
[[package]]
name = "notify"
version = "8.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3"
dependencies = [
"bitflags 2.9.3",
"fsevent-sys",
"inotify",
"kqueue",
"libc",
"log",
"mio",
"notify-types",
"walkdir",
"windows-sys 0.60.2",
]
[[package]]
name = "notify-types"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
@ -990,9 +1227,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "percent-encoding"
version = "2.3.1"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "pest"
@ -1001,7 +1238,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323"
dependencies = [
"memchr",
"thiserror 2.0.15",
"thiserror 2.0.16",
"ucd-trie",
]
@ -1056,6 +1293,15 @@ version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
[[package]]
name = "potential_utf"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
dependencies = [
"zerovec",
]
[[package]]
name = "powerfmt"
version = "0.2.0"
@ -1174,7 +1420,7 @@ version = "0.5.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
dependencies = [
"bitflags 2.9.2",
"bitflags 2.9.3",
]
[[package]]
@ -1185,19 +1431,19 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac"
dependencies = [
"getrandom 0.2.16",
"libredox",
"thiserror 2.0.15",
"thiserror 2.0.16",
]
[[package]]
name = "regex"
version = "1.11.1"
version = "1.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
"regex-automata 0.4.10",
"regex-syntax 0.8.6",
]
[[package]]
@ -1211,13 +1457,13 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.9"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.5",
"regex-syntax 0.8.6",
]
[[package]]
@ -1228,9 +1474,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.8.5"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
[[package]]
name = "ron"
@ -1239,7 +1485,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94"
dependencies = [
"base64",
"bitflags 2.9.2",
"bitflags 2.9.3",
"serde",
"serde_derive",
]
@ -1273,7 +1519,7 @@ version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
dependencies = [
"bitflags 2.9.2",
"bitflags 2.9.3",
"errno",
"libc",
"linux-raw-sys",
@ -1329,6 +1575,15 @@ dependencies = [
"synstructure",
]
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
@ -1378,9 +1633,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.142"
version = "1.0.143"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a"
dependencies = [
"itoa",
"memchr",
@ -1465,6 +1720,12 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "strsim"
version = "0.11.1"
@ -1507,15 +1768,15 @@ checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
[[package]]
name = "tempfile"
version = "3.20.0"
version = "3.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e"
dependencies = [
"fastrand",
"getrandom 0.3.3",
"once_cell",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.60.2",
]
[[package]]
@ -1535,11 +1796,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.15"
version = "2.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80d76d3f064b981389ecb4b6b7f45a0bf9fdac1d5b9204c7bd6714fecc302850"
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0"
dependencies = [
"thiserror-impl 2.0.15",
"thiserror-impl 2.0.16",
]
[[package]]
@ -1555,9 +1816,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.15"
version = "2.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d29feb33e986b6ea906bd9c3559a856983f92371b3eaa5e83782a351623de0"
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960"
dependencies = [
"proc-macro2",
"quote",
@ -1613,6 +1874,16 @@ dependencies = [
"crunchy",
]
[[package]]
name = "tinystr"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
dependencies = [
"displaydoc",
"zerovec",
]
[[package]]
name = "tokio"
version = "1.47.1"
@ -1859,6 +2130,24 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
[[package]]
name = "url"
version = "2.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
"serde",
]
[[package]]
name = "utf8_iter"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "utf8parse"
version = "0.2.2"
@ -1877,6 +2166,16 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "walkdir"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
@ -1919,6 +2218,15 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22"
dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
@ -2089,9 +2397,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.12"
version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
dependencies = [
"memchr",
]
@ -2108,9 +2416,15 @@ version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
dependencies = [
"bitflags 2.9.2",
"bitflags 2.9.3",
]
[[package]]
name = "writeable"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "yaml-rust2"
version = "0.10.3"
@ -2121,3 +2435,81 @@ dependencies = [
"encoding_rs",
"hashlink",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive",
"zerofrom",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerofrom"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerotrie"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
dependencies = [
"displaydoc",
"yoke",
"zerofrom",
]
[[package]]
name = "zerovec"
version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
dependencies = [
"yoke",
"zerofrom",
"zerovec-derive",
]
[[package]]
name = "zerovec-derive"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View file

@ -9,6 +9,7 @@ djls-dev = { path = "crates/djls-dev" }
djls-project = { path = "crates/djls-project" }
djls-server = { path = "crates/djls-server" }
djls-templates = { path = "crates/djls-templates" }
djls-workspace = { path = "crates/djls-workspace" }
# core deps, pin exact versions
pyo3 = "0.25.0"
@ -17,9 +18,12 @@ salsa = "0.23.0"
tower-lsp-server = { version = "0.22.0", features = ["proposed"] }
anyhow = "1.0"
camino = "1.1"
clap = { version = "4.5", features = ["derive"] }
config = { version ="0.15", features = ["toml"] }
dashmap = "6.1"
directories = "6.0"
notify = "8.2"
percent-encoding = "2.3"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -29,6 +33,7 @@ toml = "0.9"
tracing = "0.1"
tracing-appender = "0.2"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "time"] }
url = "2.5"
which = "8.0"
# testing

View file

@ -11,8 +11,11 @@ default = []
djls-conf = { workspace = true }
djls-project = { workspace = true }
djls-templates = { workspace = true }
djls-workspace = { workspace = true }
anyhow = { workspace = true }
camino = { workspace = true }
dashmap = { workspace = true }
percent-encoding = { workspace = true }
pyo3 = { workspace = true }
salsa = { workspace = true }
@ -23,9 +26,13 @@ tower-lsp-server = { workspace = true }
tracing = { workspace = true }
tracing-appender = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true }
[build-dependencies]
djls-dev = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }
[lints]
workspace = true

View file

@ -123,45 +123,38 @@ macro_rules! request {
#[allow(dead_code)]
pub mod messages {
use tower_lsp_server::lsp_types::MessageActionItem;
use tower_lsp_server::lsp_types::MessageType;
use tower_lsp_server::lsp_types::ShowDocumentParams;
use tower_lsp_server::lsp_types;
use super::get_client;
use super::Display;
use super::Error;
notify!(log_message, message_type: MessageType, message: impl Display + Send + 'static);
notify!(show_message, message_type: MessageType, message: impl Display + Send + 'static);
request!(show_message_request, message_type: MessageType, message: impl Display + Send + 'static, actions: Option<Vec<MessageActionItem>> ; Option<MessageActionItem>);
request!(show_document, params: ShowDocumentParams ; bool);
notify!(log_message, message_type: lsp_types::MessageType, message: impl Display + Send + 'static);
notify!(show_message, message_type: lsp_types::MessageType, message: impl Display + Send + 'static);
request!(show_message_request, message_type: lsp_types::MessageType, message: impl Display + Send + 'static, actions: Option<Vec<lsp_types::MessageActionItem>> ; Option<lsp_types::MessageActionItem>);
request!(show_document, params: lsp_types::ShowDocumentParams ; bool);
}
#[allow(dead_code)]
pub mod diagnostics {
use tower_lsp_server::lsp_types::Diagnostic;
use tower_lsp_server::lsp_types::Uri;
use tower_lsp_server::lsp_types;
use super::get_client;
notify!(publish_diagnostics, uri: Uri, diagnostics: Vec<Diagnostic>, version: Option<i32>);
notify!(publish_diagnostics, uri: lsp_types::Uri, diagnostics: Vec<lsp_types::Diagnostic>, version: Option<i32>);
notify_discard!(workspace_diagnostic_refresh,);
}
#[allow(dead_code)]
pub mod workspace {
use tower_lsp_server::lsp_types::ApplyWorkspaceEditResponse;
use tower_lsp_server::lsp_types::ConfigurationItem;
use tower_lsp_server::lsp_types::LSPAny;
use tower_lsp_server::lsp_types::WorkspaceEdit;
use tower_lsp_server::lsp_types::WorkspaceFolder;
use tower_lsp_server::lsp_types;
use super::get_client;
use super::Error;
request!(apply_edit, edit: WorkspaceEdit ; ApplyWorkspaceEditResponse);
request!(configuration, items: Vec<ConfigurationItem> ; Vec<LSPAny>);
request!(workspace_folders, ; Option<Vec<WorkspaceFolder>>);
request!(apply_edit, edit: lsp_types::WorkspaceEdit ; lsp_types::ApplyWorkspaceEditResponse);
request!(configuration, items: Vec<lsp_types::ConfigurationItem> ; Vec<lsp_types::LSPAny>);
request!(workspace_folders, ; Option<Vec<lsp_types::WorkspaceFolder>>);
}
#[allow(dead_code)]
@ -176,19 +169,18 @@ pub mod editor {
#[allow(dead_code)]
pub mod capabilities {
use tower_lsp_server::lsp_types::Registration;
use tower_lsp_server::lsp_types::Unregistration;
use tower_lsp_server::lsp_types;
use super::get_client;
notify_discard!(register_capability, registrations: Vec<Registration>);
notify_discard!(unregister_capability, unregisterations: Vec<Unregistration>);
notify_discard!(register_capability, registrations: Vec<lsp_types::Registration>);
notify_discard!(unregister_capability, unregisterations: Vec<lsp_types::Unregistration>);
}
#[allow(dead_code)]
pub mod monitoring {
use serde::Serialize;
use tower_lsp_server::lsp_types::ProgressToken;
use tower_lsp_server::lsp_types;
use tower_lsp_server::Progress;
use super::get_client;
@ -201,22 +193,24 @@ pub mod monitoring {
}
}
pub fn progress<T: Into<String> + Send>(token: ProgressToken, title: T) -> Option<Progress> {
pub fn progress<T: Into<String> + Send>(
token: lsp_types::ProgressToken,
title: T,
) -> Option<Progress> {
get_client().map(|client| client.progress(token, title))
}
}
#[allow(dead_code)]
pub mod protocol {
use tower_lsp_server::lsp_types::notification::Notification;
use tower_lsp_server::lsp_types::request::Request;
use tower_lsp_server::lsp_types;
use super::get_client;
use super::Error;
pub fn send_notification<N>(params: N::Params)
where
N: Notification,
N: lsp_types::notification::Notification,
N::Params: Send + 'static,
{
if let Some(client) = get_client() {
@ -228,7 +222,7 @@ pub mod protocol {
pub async fn send_request<R>(params: R::Params) -> Result<R::Result, Error>
where
R: Request,
R: lsp_types::request::Request,
R::Params: Send + 'static,
R::Result: Send + 'static,
{

View file

@ -1,22 +0,0 @@
use salsa::Database;
#[salsa::db]
#[derive(Clone, Default)]
pub struct ServerDatabase {
storage: salsa::Storage<Self>,
}
impl ServerDatabase {
/// Create a new database from storage
pub fn new(storage: salsa::Storage<Self>) -> Self {
Self { storage }
}
}
impl std::fmt::Debug for ServerDatabase {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("ServerDatabase").finish_non_exhaustive()
}
}
impl Database for ServerDatabase {}

View file

@ -1,10 +1,8 @@
mod client;
mod db;
mod logging;
mod queue;
mod server;
mod session;
mod workspace;
pub mod server;
pub mod session;
use std::io::IsTerminal;
@ -12,7 +10,8 @@ use anyhow::Result;
use tower_lsp_server::LspService;
use tower_lsp_server::Server;
use crate::server::DjangoLanguageServer;
pub use crate::server::DjangoLanguageServer;
pub use crate::session::Session;
pub fn run() -> Result<()> {
if std::io::stdin().is_terminal() {

View file

@ -15,7 +15,7 @@
use std::sync::Arc;
use tower_lsp_server::lsp_types::MessageType;
use tower_lsp_server::lsp_types;
use tracing::field::Visit;
use tracing::Level;
use tracing_appender::non_blocking::WorkerGuard;
@ -32,13 +32,13 @@ use tracing_subscriber::Registry;
/// that are sent to the client. It filters events by level to avoid overwhelming
/// the client with verbose trace logs.
pub struct LspLayer {
send_message: Arc<dyn Fn(MessageType, String) + Send + Sync>,
send_message: Arc<dyn Fn(lsp_types::MessageType, String) + Send + Sync>,
}
impl LspLayer {
pub fn new<F>(send_message: F) -> Self
where
F: Fn(MessageType, String) + Send + Sync + 'static,
F: Fn(lsp_types::MessageType, String) + Send + Sync + 'static,
{
Self {
send_message: Arc::new(send_message),
@ -82,10 +82,10 @@ where
let metadata = event.metadata();
let message_type = match *metadata.level() {
Level::ERROR => MessageType::ERROR,
Level::WARN => MessageType::WARNING,
Level::INFO => MessageType::INFO,
Level::DEBUG => MessageType::LOG,
Level::ERROR => lsp_types::MessageType::ERROR,
Level::WARN => lsp_types::MessageType::WARNING,
Level::INFO => lsp_types::MessageType::INFO,
Level::DEBUG => lsp_types::MessageType::LOG,
Level::TRACE => {
// Skip TRACE level - too verbose for LSP client
// TODO: Add MessageType::Debug in LSP 3.18.0
@ -112,7 +112,7 @@ where
/// Returns a `WorkerGuard` that must be kept alive for the file logging to work.
pub fn init_tracing<F>(send_message: F) -> WorkerGuard
where
F: Fn(MessageType, String) + Send + Sync + 'static,
F: Fn(lsp_types::MessageType, String) + Send + Sync + 'static,
{
let file_appender = tracing_appender::rolling::daily("/tmp", "djls.log");
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);

View file

@ -1,29 +1,13 @@
use std::future::Future;
use std::sync::Arc;
use djls_workspace::paths;
use tokio::sync::RwLock;
use tower_lsp_server::jsonrpc::Result as LspResult;
use tower_lsp_server::lsp_types::CompletionOptions;
use tower_lsp_server::lsp_types::CompletionParams;
use tower_lsp_server::lsp_types::CompletionResponse;
use tower_lsp_server::lsp_types::DidChangeConfigurationParams;
use tower_lsp_server::lsp_types::DidChangeTextDocumentParams;
use tower_lsp_server::lsp_types::DidCloseTextDocumentParams;
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
use tower_lsp_server::lsp_types::InitializeParams;
use tower_lsp_server::lsp_types::InitializeResult;
use tower_lsp_server::lsp_types::InitializedParams;
use tower_lsp_server::lsp_types::OneOf;
use tower_lsp_server::lsp_types::SaveOptions;
use tower_lsp_server::lsp_types::ServerCapabilities;
use tower_lsp_server::lsp_types::ServerInfo;
use tower_lsp_server::lsp_types::TextDocumentSyncCapability;
use tower_lsp_server::lsp_types::TextDocumentSyncKind;
use tower_lsp_server::lsp_types::TextDocumentSyncOptions;
use tower_lsp_server::lsp_types::WorkspaceFoldersServerCapabilities;
use tower_lsp_server::lsp_types::WorkspaceServerCapabilities;
use tower_lsp_server::lsp_types;
use tower_lsp_server::LanguageServer;
use tracing_appender::non_blocking::WorkerGuard;
use url::Url;
use crate::queue::Queue;
use crate::session::Session;
@ -91,7 +75,10 @@ impl DjangoLanguageServer {
}
impl LanguageServer for DjangoLanguageServer {
async fn initialize(&self, params: InitializeParams) -> LspResult<InitializeResult> {
async fn initialize(
&self,
params: lsp_types::InitializeParams,
) -> LspResult<lsp_types::InitializeResult> {
tracing::info!("Initializing server...");
let session = Session::new(&params);
@ -101,9 +88,9 @@ impl LanguageServer for DjangoLanguageServer {
*session_lock = Some(session);
}
Ok(InitializeResult {
capabilities: ServerCapabilities {
completion_provider: Some(CompletionOptions {
Ok(lsp_types::InitializeResult {
capabilities: lsp_types::ServerCapabilities {
completion_provider: Some(lsp_types::CompletionOptions {
resolve_provider: Some(false),
trigger_characters: Some(vec![
"{".to_string(),
@ -112,25 +99,25 @@ impl LanguageServer for DjangoLanguageServer {
]),
..Default::default()
}),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
workspace: Some(lsp_types::WorkspaceServerCapabilities {
workspace_folders: Some(lsp_types::WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
change_notifications: Some(lsp_types::OneOf::Left(true)),
}),
file_operations: None,
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Options(
lsp_types::TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
change: Some(lsp_types::TextDocumentSyncKind::INCREMENTAL),
will_save: Some(false),
will_save_wait_until: Some(false),
save: Some(SaveOptions::default().into()),
save: Some(lsp_types::SaveOptions::default().into()),
},
)),
..Default::default()
},
server_info: Some(ServerInfo {
server_info: Some(lsp_types::ServerInfo {
name: SERVER_NAME.to_string(),
version: Some(SERVER_VERSION.to_string()),
}),
@ -139,7 +126,7 @@ impl LanguageServer for DjangoLanguageServer {
}
#[allow(clippy::too_many_lines)]
async fn initialized(&self, _params: InitializedParams) {
async fn initialized(&self, _params: lsp_types::InitializedParams) {
tracing::info!("Server received initialized notification.");
self.with_session_task(|session_arc| async move {
@ -214,55 +201,98 @@ impl LanguageServer for DjangoLanguageServer {
Ok(())
}
async fn did_open(&self, params: DidOpenTextDocumentParams) {
async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) {
tracing::info!("Opened document: {:?}", params.text_document.uri);
self.with_session_mut(|session| {
let db = session.db();
session.documents_mut().handle_did_open(&db, &params);
// Convert LSP types to our types
let url =
Url::parse(&params.text_document.uri.to_string()).expect("Valid URI from LSP");
let language_id =
djls_workspace::LanguageId::from(params.text_document.language_id.as_str());
let document = djls_workspace::TextDocument::new(
params.text_document.text,
params.text_document.version,
language_id,
);
session.open_document(&url, document);
})
.await;
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) {
tracing::info!("Changed document: {:?}", params.text_document.uri);
self.with_session_mut(|session| {
let db = session.db();
let _ = session.documents_mut().handle_did_change(&db, &params);
let url =
Url::parse(&params.text_document.uri.to_string()).expect("Valid URI from LSP");
let new_version = params.text_document.version;
let changes = params.content_changes;
match session.apply_document_changes(&url, changes.clone(), new_version) {
Ok(()) => {}
Err(err) => {
tracing::warn!("{}", err);
// Recovery: handle full content changes only
if let Some(change) = changes.into_iter().next() {
let document = djls_workspace::TextDocument::new(
change.text,
new_version,
djls_workspace::LanguageId::Other,
);
session.update_document(&url, document);
}
}
}
})
.await;
}
async fn did_close(&self, params: DidCloseTextDocumentParams) {
async fn did_close(&self, params: lsp_types::DidCloseTextDocumentParams) {
tracing::info!("Closed document: {:?}", params.text_document.uri);
self.with_session_mut(|session| {
session.documents_mut().handle_did_close(&params);
let url =
Url::parse(&params.text_document.uri.to_string()).expect("Valid URI from LSP");
if session.close_document(&url).is_none() {
tracing::warn!("Attempted to close document without overlay: {}", url);
}
})
.await;
}
async fn completion(&self, params: CompletionParams) -> LspResult<Option<CompletionResponse>> {
Ok(self
.with_session(|session| {
if let Some(project) = session.project() {
if let Some(tags) = project.template_tags() {
let db = session.db();
return session.documents().get_completions(
&db,
params.text_document_position.text_document.uri.as_str(),
params.text_document_position.position,
tags,
);
async fn completion(
&self,
params: lsp_types::CompletionParams,
) -> LspResult<Option<lsp_types::CompletionResponse>> {
let response = self
.with_session_mut(|session| {
let lsp_uri = &params.text_document_position.text_document.uri;
let url = Url::parse(&lsp_uri.to_string()).expect("Valid URI from LSP");
let position = params.text_document_position.position;
tracing::debug!("Completion requested for {} at {:?}", url, position);
if let Some(path) = paths::url_to_path(&url) {
let content = session.file_content(path);
if content.is_empty() {
tracing::debug!("File {} has no content", url);
} else {
tracing::debug!("Using content for completion in {}", url);
// TODO: Implement actual completion logic using content
}
}
None
})
.await)
.await;
Ok(response)
}
async fn did_change_configuration(&self, _params: DidChangeConfigurationParams) {
async fn did_change_configuration(&self, _params: lsp_types::DidChangeConfigurationParams) {
tracing::info!("Configuration change detected. Reloading settings...");
let project_path = self

View file

@ -1,55 +1,134 @@
//! # Salsa [`StorageHandle`] Pattern for LSP
//!
//! This module implements a thread-safe Salsa database wrapper for use with
//! tower-lsp's async runtime. The key challenge is that tower-lsp requires
//! `Send + Sync + 'static` bounds, but Salsa's `Storage` contains thread-local
//! state and is not `Send`.
//!
//! ## The Solution: [`StorageHandle`]
//!
//! Salsa provides [`StorageHandle`] which IS `Send + Sync` because it contains
//! no thread-local state. We store the handle and create `Storage`/`Database`
//! instances on-demand.
//!
//! ## The Mutation Challenge
//!
//! When mutating Salsa inputs (e.g., updating file revisions), Salsa must
//! ensure exclusive access to prevent race conditions. It does this via
//! `cancel_others()` which:
//!
//! 1. Sets a cancellation flag (causes other threads to panic with `Cancelled`)
//! 2. Waits for all `StorageHandle` clones to drop
//! 3. Proceeds with the mutation
//!
//! If we accidentally clone the handle instead of taking ownership, step 2
//! never completes → deadlock!
//!
//! ## The Pattern
//!
//! - **Reads**: Clone the handle freely ([`with_db`](Session::with_db))
//! - **Mutations**: Take exclusive ownership ([`with_db_mut`](Session::with_db_mut) via [`take_db_handle_for_mutation`](Session::take_db_handle_for_mutation))
//!
//! The explicit method names make the intent clear and prevent accidental misuse.
//!
//! [`StorageHandle`]: salsa::StorageHandle
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use dashmap::DashMap;
use djls_conf::Settings;
use djls_project::DjangoProject;
use djls_workspace::db::Database;
use djls_workspace::db::SourceFile;
use djls_workspace::paths;
use djls_workspace::Buffers;
use djls_workspace::FileSystem;
use djls_workspace::OsFileSystem;
use djls_workspace::TextDocument;
use djls_workspace::WorkspaceFileSystem;
use salsa::StorageHandle;
use tower_lsp_server::lsp_types::ClientCapabilities;
use tower_lsp_server::lsp_types::InitializeParams;
use tower_lsp_server::lsp_types;
use url::Url;
use crate::db::ServerDatabase;
use crate::workspace::Store;
#[derive(Default)]
/// LSP Session with thread-safe Salsa database access.
///
/// Uses Salsa's [`StorageHandle`] pattern to maintain `Send + Sync + 'static`
/// compatibility required by tower-lsp. The handle can be safely shared
/// across threads and async boundaries.
///
/// See [this Salsa Zulip discussion](https://salsa.zulipchat.com/#narrow/channel/145099-Using-Salsa/topic/.E2.9C.94.20Advice.20on.20using.20salsa.20from.20Sync.20.2B.20Send.20context/with/495497515)
/// for more information about [`StorageHandle`].
///
/// ## Architecture
///
/// Two-layer system inspired by Ruff/Ty:
/// - **Layer 1**: In-memory overlays (LSP document edits)
/// - **Layer 2**: Salsa database (incremental computation cache)
///
/// ## Salsa Mutation Protocol
///
/// When mutating Salsa inputs (like changing file revisions), we must ensure
/// exclusive access to prevent race conditions. Salsa enforces this through
/// its `cancel_others()` mechanism, which waits for all [`StorageHandle`] clones
/// to drop before allowing mutations.
///
/// We use explicit methods (`take_db_handle_for_mutation`/`restore_db_handle`)
/// to make this ownership transfer clear and prevent accidental deadlocks.
///
/// [`StorageHandle`]: salsa::StorageHandle
pub struct Session {
/// The Django project configuration
project: Option<DjangoProject>,
documents: Store,
/// LSP server settings
settings: Settings,
#[allow(dead_code)]
client_capabilities: ClientCapabilities,
/// Layer 1: Shared buffer storage for open documents
///
/// This implements Ruff's two-layer architecture where Layer 1 contains
/// open document buffers that take precedence over disk files. The buffers
/// are shared between Session (which manages them) and
/// [`WorkspaceFileSystem`](djls_workspace::WorkspaceFileSystem) (which reads from them).
///
/// Key properties:
/// - Thread-safe via the Buffers abstraction
/// - Contains full [`TextDocument`](djls_workspace::TextDocument) with content, version, and metadata
/// - Never becomes Salsa inputs - only intercepted at read time
buffers: Buffers,
/// A thread-safe Salsa database handle that can be shared between threads.
/// File system abstraction with buffer interception
///
/// This implements the insight from [this Salsa Zulip discussion](https://salsa.zulipchat.com/#narrow/channel/145099-Using-Salsa/topic/.E2.9C.94.20Advice.20on.20using.20salsa.20from.20Sync.20.2B.20Send.20context/with/495497515)
/// where we're using the `StorageHandle` to create a thread-safe handle that can be
/// shared between threads. When we need to use it, we clone the handle to get a new reference.
/// This [`WorkspaceFileSystem`](djls_workspace::WorkspaceFileSystem) bridges Layer 1 (buffers) and Layer 2 (Salsa).
/// It intercepts [`FileSystem::read_to_string()`](djls_workspace::FileSystem::read_to_string()) calls to return buffer
/// content when available, falling back to disk otherwise.
file_system: Arc<dyn FileSystem>,
/// Shared file tracking across all Database instances
///
/// This handle allows us to create database instances as needed.
/// Even though we're using a single-threaded runtime, we still need
/// this to be thread-safe because of LSP trait requirements.
/// This is the canonical Salsa pattern from the lazy-input example.
/// The [`DashMap`] provides O(1) lookups and is shared via Arc across
/// all Database instances created from [`StorageHandle`](salsa::StorageHandle).
files: Arc<DashMap<PathBuf, SourceFile>>,
#[allow(dead_code)]
client_capabilities: lsp_types::ClientCapabilities,
/// Layer 2: Thread-safe Salsa database handle for pure computation
///
/// Usage:
/// ```rust,ignore
/// // Use the StorageHandle in Session
/// let db_handle = StorageHandle::new(None);
/// where we're using the [`StorageHandle`](salsa::StorageHandle) to create a thread-safe handle that can be
/// shared between threads.
///
/// // Clone it to pass to different threads
/// let db_handle_clone = db_handle.clone();
///
/// // Use it in an async context
/// async_fn(move || {
/// // Get a database from the handle
/// let storage = db_handle_clone.into_storage();
/// let db = ServerDatabase::new(storage);
///
/// // Use the database
/// db.some_query(args)
/// });
/// ```
db_handle: StorageHandle<ServerDatabase>,
/// The database receives file content via the [`FileSystem`](djls_workspace::FileSystem) trait, which
/// is intercepted by our [`WorkspaceFileSystem`](djls_workspace::WorkspaceFileSystem) to provide overlay content.
/// This maintains proper separation between Layer 1 and Layer 2.
db_handle: StorageHandle<Database>,
}
impl Session {
pub fn new(params: &InitializeParams) -> Self {
let project_path = crate::workspace::get_project_path(params);
pub fn new(params: &lsp_types::InitializeParams) -> Self {
let project_path = Self::get_project_path(params);
let (project, settings) = if let Some(path) = &project_path {
let settings =
@ -62,15 +141,43 @@ impl Session {
(None, Settings::default())
};
let buffers = Buffers::new();
let files = Arc::new(DashMap::new());
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(OsFileSystem),
));
let db_handle = Database::new(file_system.clone(), files.clone())
.storage()
.clone()
.into_zalsa_handle();
Self {
client_capabilities: params.capabilities.clone(),
project,
documents: Store::default(),
settings,
db_handle: StorageHandle::new(None),
buffers,
file_system,
files,
client_capabilities: params.capabilities.clone(),
db_handle,
}
}
/// Determines the project root path from initialization parameters.
///
/// Tries the current directory first, then falls back to the first workspace folder.
fn get_project_path(params: &lsp_types::InitializeParams) -> Option<PathBuf> {
// Try current directory first
std::env::current_dir().ok().or_else(|| {
// Fall back to the first workspace folder URI
params
.workspace_folders
.as_ref()
.and_then(|folders| folders.first())
.and_then(|folder| paths::lsp_uri_to_path(&folder.uri))
})
}
#[must_use]
pub fn project(&self) -> Option<&DjangoProject> {
self.project.as_ref()
}
@ -79,14 +186,7 @@ impl Session {
&mut self.project
}
pub fn documents(&self) -> &Store {
&self.documents
}
pub fn documents_mut(&mut self) -> &mut Store {
&mut self.documents
}
#[must_use]
pub fn settings(&self) -> &Settings {
&self.settings
}
@ -95,12 +195,330 @@ impl Session {
self.settings = settings;
}
/// Get a database instance directly from the session
// TODO: Explore an abstraction around [`salsa::StorageHandle`] and the following two methods
// to make it easy in the future to avoid deadlocks. For now, this is simpler and TBH may be
// all we ever need, but still.. might be a nice CYA for future me
/// Takes exclusive ownership of the database handle for mutation operations.
///
/// This creates a usable database from the handle, which can be used
/// to query and update data in the database.
pub fn db(&self) -> ServerDatabase {
/// This method extracts the [`StorageHandle`](salsa::StorageHandle) from the session, replacing it
/// with a temporary placeholder. This ensures there's exactly one handle
/// active during mutations, preventing deadlocks in Salsa's `cancel_others()`.
///
/// ## Why Not Clone?
///
/// Cloning would create multiple handles. When Salsa needs to mutate inputs,
/// it calls `cancel_others()` which waits for all handles to drop. With
/// multiple handles, this wait would never complete → deadlock.
///
/// ## Panics
///
/// This is an internal method that should only be called by
/// [`with_db_mut`](Session::with_db_mut). Multiple concurrent calls would panic when trying
/// to take an already-taken handle.
fn take_db_handle_for_mutation(&mut self) -> StorageHandle<Database> {
std::mem::replace(&mut self.db_handle, StorageHandle::new(None))
}
/// Restores the database handle after a mutation operation completes.
///
/// This should be called with the handle extracted from the database
/// after mutations are complete. It updates the session's handle to
/// reflect any changes made during the mutation.
fn restore_db_handle(&mut self, handle: StorageHandle<Database>) {
self.db_handle = handle;
}
/// Execute a closure with mutable access to the database.
///
/// This method implements Salsa's required protocol for mutations:
/// 1. Takes exclusive ownership of the [`StorageHandle`](salsa::StorageHandle)
/// (no clones exist)
/// 2. Creates a temporary Database for the operation
/// 3. Executes your closure with `&mut Database`
/// 4. Extracts and restores the updated handle
///
/// ## Example
///
/// ```rust,ignore
/// session.with_db_mut(|db| {
/// let file = db.get_or_create_file(path);
/// file.set_revision(db).to(new_revision); // Mutation requires exclusive access
/// });
/// ```
///
/// ## Why This Pattern?
///
/// This ensures that when Salsa needs to modify inputs (via setters like
/// `set_revision`), it has exclusive access. The internal `cancel_others()`
/// call will succeed because we guarantee only one handle exists.
pub fn with_db_mut<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Database) -> R,
{
let handle = self.take_db_handle_for_mutation();
let storage = handle.into_storage();
let mut db = Database::from_storage(storage, self.file_system.clone(), self.files.clone());
let result = f(&mut db);
// The database may have changed during mutations, so we need
// to extract its current handle state
let new_handle = db.storage().clone().into_zalsa_handle();
self.restore_db_handle(new_handle);
result
}
/// Execute a closure with read-only access to the database.
///
/// For read-only operations, we can safely clone the [`StorageHandle`](salsa::StorageHandle)
/// since Salsa allows multiple concurrent readers. This is more
/// efficient than taking exclusive ownership.
///
/// ## Example
///
/// ```rust,ignore
/// let content = session.with_db(|db| {
/// let file = db.get_file(path)?;
/// source_text(db, file).to_string() // Read-only query
/// });
/// ```
pub fn with_db<F, R>(&self, f: F) -> R
where
F: FnOnce(&Database) -> R,
{
// For reads, cloning is safe and efficient
let storage = self.db_handle.clone().into_storage();
ServerDatabase::new(storage)
let db = Database::from_storage(storage, self.file_system.clone(), self.files.clone());
f(&db)
}
/// Handle opening a document - sets buffer and creates file.
///
/// This method coordinates both layers:
/// - Layer 1: Stores the document content in buffers
/// - Layer 2: Creates the [`SourceFile`](djls_workspace::SourceFile) in Salsa (if path is resolvable)
pub fn open_document(&mut self, url: &Url, document: TextDocument) {
tracing::debug!("Opening document: {}", url);
// Layer 1: Set buffer
self.buffers.open(url.clone(), document);
// Layer 2: Create file and touch if it already exists
// This is crucial: if the file was already read from disk, we need to
// invalidate Salsa's cache so it re-reads through the buffer system
if let Some(path) = paths::url_to_path(url) {
self.with_db_mut(|db| {
// Check if file already exists (was previously read from disk)
let already_exists = db.has_file(&path);
let file = db.get_or_create_file(path.clone());
if already_exists {
// File was already read - touch to invalidate cache
db.touch_file(&path);
} else {
// New file - starts at revision 0
tracing::debug!(
"Created new SourceFile for {}: revision {}",
path.display(),
file.revision(db)
);
}
});
}
}
/// Handle document changes - updates buffer and bumps revision.
///
/// This method coordinates both layers:
/// - Layer 1: Updates the document content in buffers
/// - Layer 2: Bumps the file revision to trigger Salsa invalidation
pub fn update_document(&mut self, url: &Url, document: TextDocument) {
let version = document.version();
tracing::debug!("Updating document: {} (version {})", url, version);
// Layer 1: Update buffer
self.buffers.update(url.clone(), document);
// Layer 2: Touch file to trigger invalidation
if let Some(path) = paths::url_to_path(url) {
self.with_db_mut(|db| db.touch_file(&path));
}
}
/// Apply incremental changes to an open document.
///
/// This encapsulates the full update cycle: retrieving the document,
/// applying changes, updating the buffer, and bumping Salsa revision.
///
/// Returns an error if the document is not currently open.
pub fn apply_document_changes(
&mut self,
url: &Url,
changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
new_version: i32,
) -> Result<(), String> {
if let Some(mut document) = self.buffers.get(url) {
document.update(changes, new_version);
self.update_document(url, document);
Ok(())
} else {
Err(format!("Document not open: {url}"))
}
}
/// Handle closing a document - removes buffer and bumps revision.
///
/// This method coordinates both layers:
/// - Layer 1: Removes the buffer (falls back to disk)
/// - Layer 2: Bumps revision to trigger re-read from disk
///
/// Returns the removed document if it existed.
pub fn close_document(&mut self, url: &Url) -> Option<TextDocument> {
tracing::debug!("Closing document: {}", url);
// Layer 1: Remove buffer
let removed = self.buffers.close(url);
if let Some(ref doc) = removed {
tracing::debug!(
"Removed buffer for closed document: {} (was version {})",
url,
doc.version()
);
}
// Layer 2: Touch file to trigger re-read from disk
// We keep the file alive for potential re-opening
if let Some(path) = paths::url_to_path(url) {
self.with_db_mut(|db| db.touch_file(&path));
}
removed
}
/// Get the current content of a file (from overlay or disk).
///
/// This is the safe way to read file content through the system.
/// The file is created if it doesn't exist, and content is read
/// through the `FileSystem` abstraction (overlay first, then disk).
pub fn file_content(&mut self, path: PathBuf) -> String {
use djls_workspace::db::source_text;
self.with_db_mut(|db| {
let file = db.get_or_create_file(path);
source_text(db, file).to_string()
})
}
/// Get the current revision of a file, if it's being tracked.
///
/// Returns None if the file hasn't been created yet.
pub fn file_revision(&mut self, path: &Path) -> Option<u64> {
self.with_db_mut(|db| {
db.has_file(path).then(|| {
let file = db.get_or_create_file(path.to_path_buf());
file.revision(db)
})
})
}
/// Check if a file is currently being tracked in Salsa.
pub fn has_file(&mut self, path: &Path) -> bool {
self.with_db(|db| db.has_file(path))
}
}
impl Default for Session {
fn default() -> Self {
let buffers = Buffers::new();
let files = Arc::new(DashMap::new());
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(OsFileSystem),
));
let db_handle = Database::new(file_system.clone(), files.clone())
.storage()
.clone()
.into_zalsa_handle();
Self {
project: None,
settings: Settings::default(),
db_handle,
file_system,
files,
buffers,
client_capabilities: lsp_types::ClientCapabilities::default(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use djls_workspace::LanguageId;
#[test]
fn test_revision_invalidation_chain() {
let mut session = Session::default();
let path = PathBuf::from("/test/template.html");
let url = Url::parse("file:///test/template.html").unwrap();
// Open document with initial content
let document = TextDocument::new(
"<h1>Original Content</h1>".to_string(),
1,
LanguageId::Other,
);
session.open_document(&url, document);
let content1 = session.file_content(path.clone());
assert_eq!(content1, "<h1>Original Content</h1>");
// Update document with new content
let updated_document =
TextDocument::new("<h1>Updated Content</h1>".to_string(), 2, LanguageId::Other);
session.update_document(&url, updated_document);
// Read content again (should get new overlay content due to invalidation)
let content2 = session.file_content(path.clone());
assert_eq!(content2, "<h1>Updated Content</h1>");
assert_ne!(content1, content2);
// Close document (removes overlay, bumps revision)
session.close_document(&url);
// Read content again (should now read from disk, which returns empty for missing files)
let content3 = session.file_content(path.clone());
assert_eq!(content3, ""); // No file on disk, returns empty
}
#[test]
fn test_with_db_mut_preserves_files() {
let mut session = Session::default();
let path1 = PathBuf::from("/test/file1.py");
let path2 = PathBuf::from("/test/file2.py");
session.file_content(path1.clone());
session.file_content(path2.clone());
// Verify files are preserved across operations
assert!(session.has_file(&path1));
assert!(session.has_file(&path2));
// Files should persist even after multiple operations
let content1 = session.file_content(path1.clone());
let content2 = session.file_content(path2.clone());
// Both should return empty (no disk content)
assert_eq!(content1, "");
assert_eq!(content2, "");
assert!(session.has_file(&path1));
assert!(session.has_file(&path2));
}
}

View file

@ -1,216 +0,0 @@
use salsa::Database;
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
use tower_lsp_server::lsp_types::Position;
use tower_lsp_server::lsp_types::Range;
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
#[salsa::input(debug)]
pub struct TextDocument {
#[returns(ref)]
pub uri: String,
#[returns(ref)]
pub contents: String,
#[returns(ref)]
pub index: LineIndex,
pub version: i32,
pub language_id: LanguageId,
}
impl TextDocument {
pub fn from_did_open_params(db: &dyn Database, params: &DidOpenTextDocumentParams) -> Self {
let uri = params.text_document.uri.to_string();
let contents = params.text_document.text.clone();
let version = params.text_document.version;
let language_id = LanguageId::from(params.text_document.language_id.as_str());
let index = LineIndex::new(&contents);
TextDocument::new(db, uri, contents, index, version, language_id)
}
pub fn with_changes(
self,
db: &dyn Database,
changes: &[TextDocumentContentChangeEvent],
new_version: i32,
) -> Self {
let mut new_contents = self.contents(db).to_string();
for change in changes {
if let Some(range) = change.range {
let index = LineIndex::new(&new_contents);
if let (Some(start_offset), Some(end_offset)) = (
index.offset(range.start).map(|o| o as usize),
index.offset(range.end).map(|o| o as usize),
) {
let mut updated_content = String::with_capacity(
new_contents.len() - (end_offset - start_offset) + change.text.len(),
);
updated_content.push_str(&new_contents[..start_offset]);
updated_content.push_str(&change.text);
updated_content.push_str(&new_contents[end_offset..]);
new_contents = updated_content;
}
} else {
// Full document update
new_contents.clone_from(&change.text);
}
}
let index = LineIndex::new(&new_contents);
TextDocument::new(
db,
self.uri(db).to_string(),
new_contents,
index,
new_version,
self.language_id(db),
)
}
#[allow(dead_code)]
pub fn get_text(self, db: &dyn Database) -> String {
self.contents(db).to_string()
}
#[allow(dead_code)]
pub fn get_text_range(self, db: &dyn Database, range: Range) -> Option<String> {
let index = self.index(db);
let start = index.offset(range.start)? as usize;
let end = index.offset(range.end)? as usize;
let contents = self.contents(db);
Some(contents[start..end].to_string())
}
pub fn get_line(self, db: &dyn Database, line: u32) -> Option<String> {
let index = self.index(db);
let start = index.line_starts.get(line as usize)?;
let end = index
.line_starts
.get(line as usize + 1)
.copied()
.unwrap_or(index.length);
let contents = self.contents(db);
Some(contents[*start as usize..end as usize].to_string())
}
#[allow(dead_code)]
pub fn line_count(self, db: &dyn Database) -> usize {
self.index(db).line_starts.len()
}
pub fn get_template_tag_context(
self,
db: &dyn Database,
position: Position,
) -> Option<TemplateTagContext> {
let line = self.get_line(db, position.line)?;
let char_pos: usize = position.character.try_into().ok()?;
let prefix = &line[..char_pos];
let rest_of_line = &line[char_pos..];
let rest_trimmed = rest_of_line.trim_start();
prefix.rfind("{%").map(|tag_start| {
// Check if we're immediately after {% with no space
let needs_leading_space = prefix.ends_with("{%");
let closing_brace = if rest_trimmed.starts_with("%}") {
ClosingBrace::FullClose
} else if rest_trimmed.starts_with('}') {
ClosingBrace::PartialClose
} else {
ClosingBrace::None
};
TemplateTagContext {
partial_tag: prefix[tag_start + 2..].trim().to_string(),
closing_brace,
needs_leading_space,
}
})
}
}
#[derive(Clone, Debug)]
pub struct LineIndex {
line_starts: Vec<u32>,
length: u32,
}
impl LineIndex {
pub fn new(text: &str) -> Self {
let mut line_starts = vec![0];
let mut pos = 0;
for c in text.chars() {
pos += u32::try_from(c.len_utf8()).unwrap_or(0);
if c == '\n' {
line_starts.push(pos);
}
}
Self {
line_starts,
length: pos,
}
}
pub fn offset(&self, position: Position) -> Option<u32> {
let line_start = self.line_starts.get(position.line as usize)?;
Some(line_start + position.character)
}
#[allow(dead_code)]
pub fn position(&self, offset: u32) -> Position {
let line = match self.line_starts.binary_search(&offset) {
Ok(line) => line,
Err(line) => line - 1,
};
let line_start = self.line_starts[line];
let character = offset - line_start;
Position::new(u32::try_from(line).unwrap_or(0), character)
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum LanguageId {
HtmlDjango,
Other,
Python,
}
impl From<&str> for LanguageId {
fn from(language_id: &str) -> Self {
match language_id {
"django-html" | "htmldjango" => Self::HtmlDjango,
"python" => Self::Python,
_ => Self::Other,
}
}
}
impl From<String> for LanguageId {
fn from(language_id: String) -> Self {
Self::from(language_id.as_str())
}
}
#[derive(Debug)]
pub enum ClosingBrace {
None,
PartialClose, // just }
FullClose, // %}
}
#[derive(Debug)]
pub struct TemplateTagContext {
pub partial_tag: String,
pub closing_brace: ClosingBrace,
pub needs_leading_space: bool,
}

View file

@ -1,6 +0,0 @@
mod document;
mod store;
mod utils;
pub use store::Store;
pub use utils::get_project_path;

View file

@ -1,158 +0,0 @@
use std::collections::HashMap;
use anyhow::anyhow;
use anyhow::Result;
use djls_project::TemplateTags;
use salsa::Database;
use tower_lsp_server::lsp_types::CompletionItem;
use tower_lsp_server::lsp_types::CompletionItemKind;
use tower_lsp_server::lsp_types::CompletionResponse;
use tower_lsp_server::lsp_types::DidChangeTextDocumentParams;
use tower_lsp_server::lsp_types::DidCloseTextDocumentParams;
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
use tower_lsp_server::lsp_types::Documentation;
use tower_lsp_server::lsp_types::InsertTextFormat;
use tower_lsp_server::lsp_types::MarkupContent;
use tower_lsp_server::lsp_types::MarkupKind;
use tower_lsp_server::lsp_types::Position;
use super::document::ClosingBrace;
use super::document::LanguageId;
use super::document::TextDocument;
#[derive(Debug, Default)]
pub struct Store {
documents: HashMap<String, TextDocument>,
versions: HashMap<String, i32>,
}
impl Store {
pub fn handle_did_open(&mut self, db: &dyn Database, params: &DidOpenTextDocumentParams) {
let uri = params.text_document.uri.to_string();
let version = params.text_document.version;
let document = TextDocument::from_did_open_params(db, params);
self.add_document(document, uri.clone());
self.versions.insert(uri, version);
}
pub fn handle_did_change(
&mut self,
db: &dyn Database,
params: &DidChangeTextDocumentParams,
) -> Result<()> {
let uri = params.text_document.uri.as_str().to_string();
let version = params.text_document.version;
let document = self
.get_document(&uri)
.ok_or_else(|| anyhow!("Document not found: {}", uri))?;
let new_document = document.with_changes(db, &params.content_changes, version);
self.documents.insert(uri.clone(), new_document);
self.versions.insert(uri, version);
Ok(())
}
pub fn handle_did_close(&mut self, params: &DidCloseTextDocumentParams) {
self.remove_document(params.text_document.uri.as_str());
}
fn add_document(&mut self, document: TextDocument, uri: String) {
self.documents.insert(uri, document);
}
fn remove_document(&mut self, uri: &str) {
self.documents.remove(uri);
self.versions.remove(uri);
}
fn get_document(&self, uri: &str) -> Option<&TextDocument> {
self.documents.get(uri)
}
#[allow(dead_code)]
fn get_document_mut(&mut self, uri: &str) -> Option<&mut TextDocument> {
self.documents.get_mut(uri)
}
#[allow(dead_code)]
pub fn get_all_documents(&self) -> impl Iterator<Item = &TextDocument> {
self.documents.values()
}
#[allow(dead_code)]
pub fn get_documents_by_language<'db>(
&'db self,
db: &'db dyn Database,
language_id: LanguageId,
) -> impl Iterator<Item = &'db TextDocument> + 'db {
self.documents
.values()
.filter(move |doc| doc.language_id(db) == language_id)
}
#[allow(dead_code)]
pub fn get_version(&self, uri: &str) -> Option<i32> {
self.versions.get(uri).copied()
}
#[allow(dead_code)]
pub fn is_version_valid(&self, uri: &str, version: i32) -> bool {
self.get_version(uri) == Some(version)
}
pub fn get_completions(
&self,
db: &dyn Database,
uri: &str,
position: Position,
tags: &TemplateTags,
) -> Option<CompletionResponse> {
let document = self.get_document(uri)?;
if document.language_id(db) != LanguageId::HtmlDjango {
return None;
}
let context = document.get_template_tag_context(db, position)?;
let mut completions: Vec<CompletionItem> = tags
.iter()
.filter(|tag| {
context.partial_tag.is_empty() || tag.name().starts_with(&context.partial_tag)
})
.map(|tag| {
let leading_space = if context.needs_leading_space { " " } else { "" };
CompletionItem {
label: tag.name().to_string(),
kind: Some(CompletionItemKind::KEYWORD),
detail: Some(format!("Template tag from {}", tag.library())),
documentation: tag.doc().as_ref().map(|doc| {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: (*doc).to_string(),
})
}),
insert_text: Some(match context.closing_brace {
ClosingBrace::None => format!("{}{} %}}", leading_space, tag.name()),
ClosingBrace::PartialClose => format!("{}{} %", leading_space, tag.name()),
ClosingBrace::FullClose => format!("{}{} ", leading_space, tag.name()),
}),
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
..Default::default()
}
})
.collect();
if completions.is_empty() {
None
} else {
completions.sort_by(|a, b| a.label.cmp(&b.label));
Some(CompletionResponse::Array(completions))
}
}
}

View file

@ -1,43 +0,0 @@
use std::path::PathBuf;
use percent_encoding::percent_decode_str;
use tower_lsp_server::lsp_types::InitializeParams;
use tower_lsp_server::lsp_types::Uri;
/// Determines the project root path from initialization parameters.
///
/// Tries the current directory first, then falls back to the first workspace folder.
pub fn get_project_path(params: &InitializeParams) -> Option<PathBuf> {
// Try current directory first
std::env::current_dir().ok().or_else(|| {
// Fall back to the first workspace folder URI
params
.workspace_folders
.as_ref()
.and_then(|folders| folders.first())
.and_then(|folder| uri_to_pathbuf(&folder.uri))
})
}
/// Converts a `file:` URI into an absolute `PathBuf`.
fn uri_to_pathbuf(uri: &Uri) -> Option<PathBuf> {
// Check if the scheme is "file"
if uri.scheme().is_none_or(|s| s.as_str() != "file") {
return None;
}
// Get the path part as a string
let encoded_path_str = uri.path().as_str();
// Decode the percent-encoded path string
let decoded_path_cow = percent_decode_str(encoded_path_str).decode_utf8_lossy();
let path_str = decoded_path_cow.as_ref();
#[cfg(windows)]
let path_str = {
// Remove leading '/' for paths like /C:/...
path_str.strip_prefix('/').unwrap_or(path_str)
};
Some(PathBuf::from(path_str))
}

View file

@ -0,0 +1,448 @@
//! Integration tests for the LSP server's overlay → revision → invalidation flow
//!
//! These tests verify the complete two-layer architecture:
//! - Layer 1: LSP overlays (in-memory document state)
//! - Layer 2: Salsa database with revision tracking
//!
//! The tests ensure that document changes properly invalidate cached queries
//! and that overlays take precedence over disk content.
use std::path::PathBuf;
use std::sync::Arc;
use djls_server::DjangoLanguageServer;
use tempfile::TempDir;
use tower_lsp_server::lsp_types::DidChangeTextDocumentParams;
use tower_lsp_server::lsp_types::DidCloseTextDocumentParams;
use tower_lsp_server::lsp_types::DidOpenTextDocumentParams;
use tower_lsp_server::lsp_types::InitializeParams;
use tower_lsp_server::lsp_types::InitializedParams;
use tower_lsp_server::lsp_types::TextDocumentContentChangeEvent;
use tower_lsp_server::lsp_types::TextDocumentIdentifier;
use tower_lsp_server::lsp_types::TextDocumentItem;
use tower_lsp_server::lsp_types::VersionedTextDocumentIdentifier;
use tower_lsp_server::lsp_types::WorkspaceFolder;
use tower_lsp_server::LanguageServer;
use url::Url;
/// Test helper that manages an LSP server instance for testing
struct TestServer {
server: DjangoLanguageServer,
_temp_dir: TempDir,
workspace_root: PathBuf,
}
impl TestServer {
/// Create a new test server with a temporary workspace
async fn new() -> Self {
// Create temporary directory for test workspace
let temp_dir = TempDir::new().expect("Failed to create temp dir");
let workspace_root = temp_dir.path().to_path_buf();
// Set up logging
let (_non_blocking, guard) = tracing_appender::non_blocking(std::io::sink());
// Create server (guard is moved into server, so we return it too)
let server = DjangoLanguageServer::new(guard);
// Initialize the server
let workspace_folder = WorkspaceFolder {
uri: format!("file://{}", workspace_root.display())
.parse()
.unwrap(),
name: "test_workspace".to_string(),
};
let init_params = InitializeParams {
workspace_folders: Some(vec![workspace_folder]),
..Default::default()
};
server
.initialize(init_params)
.await
.expect("Failed to initialize");
server.initialized(InitializedParams {}).await;
Self {
server,
_temp_dir: temp_dir,
workspace_root,
}
}
/// Helper to create a file path in the test workspace
fn workspace_file(&self, name: &str) -> PathBuf {
self.workspace_root.join(name)
}
/// Helper to create a file URL in the test workspace
fn workspace_url(&self, name: &str) -> Url {
djls_workspace::paths::path_to_url(&self.workspace_file(name)).unwrap()
}
/// Open a document in the LSP server
async fn open_document(&self, file_name: &str, content: &str, version: i32) {
let params = DidOpenTextDocumentParams {
text_document: TextDocumentItem {
uri: self.workspace_url(file_name).to_string().parse().unwrap(),
language_id: if file_name.ends_with(".html") {
"html".to_string()
} else if file_name.ends_with(".py") {
"python".to_string()
} else {
"plaintext".to_string()
},
version,
text: content.to_string(),
},
};
self.server.did_open(params).await;
}
/// Change a document in the LSP server
async fn change_document(&self, file_name: &str, new_content: &str, version: i32) {
let params = DidChangeTextDocumentParams {
text_document: VersionedTextDocumentIdentifier {
uri: self.workspace_url(file_name).to_string().parse().unwrap(),
version,
},
content_changes: vec![TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: new_content.to_string(),
}],
};
self.server.did_change(params).await;
}
/// Close a document in the LSP server
async fn close_document(&self, file_name: &str) {
let params = DidCloseTextDocumentParams {
text_document: TextDocumentIdentifier {
uri: self.workspace_url(file_name).to_string().parse().unwrap(),
},
};
self.server.did_close(params).await;
}
/// Get the content of a file through the session's query system
async fn get_file_content(&self, file_name: &str) -> String {
let path = self.workspace_file(file_name);
self.server
.with_session_mut(|session| session.file_content(path))
.await
}
/// Write a file to disk in the test workspace
fn write_file(&self, file_name: &str, content: &str) {
let path = self.workspace_file(file_name);
std::fs::write(path, content).expect("Failed to write test file");
}
/// Get the revision of a file
async fn get_file_revision(&self, file_name: &str) -> Option<u64> {
let path = self.workspace_file(file_name);
self.server
.with_session_mut(|session| session.file_revision(&path))
.await
}
}
#[tokio::test]
async fn test_full_lsp_lifecycle() {
let server = TestServer::new().await;
let file_name = "test.html";
// Write initial content to disk
server.write_file(file_name, "<h1>Disk Content</h1>");
// 1. Test did_open creates overlay and file
server
.open_document(file_name, "<h1>Overlay Content</h1>", 1)
.await;
// Verify overlay content is returned (not disk content)
let content = server.get_file_content(file_name).await;
assert_eq!(content, "<h1>Overlay Content</h1>");
// Verify file was created with revision 0
let revision = server.get_file_revision(file_name).await;
assert_eq!(revision, Some(0));
// 2. Test did_change updates overlay and bumps revision
server
.change_document(file_name, "<h1>Updated Content</h1>", 2)
.await;
// Verify content changed
let content = server.get_file_content(file_name).await;
assert_eq!(content, "<h1>Updated Content</h1>");
// Verify revision was bumped
let revision = server.get_file_revision(file_name).await;
assert_eq!(revision, Some(1));
// 3. Test did_close removes overlay and bumps revision
server.close_document(file_name).await;
// Verify content now comes from disk (empty since file doesn't exist)
let content = server.get_file_content(file_name).await;
assert_eq!(content, "<h1>Disk Content</h1>");
// Verify revision was bumped again
let revision = server.get_file_revision(file_name).await;
assert_eq!(revision, Some(2));
}
#[tokio::test]
async fn test_overlay_precedence() {
let server = TestServer::new().await;
let file_name = "template.html";
// Write content to disk
server.write_file(file_name, "{% block content %}Disk{% endblock %}");
// Read content before overlay - should get disk content
let content = server.get_file_content(file_name).await;
assert_eq!(content, "{% block content %}Disk{% endblock %}");
// Open document with different content
server
.open_document(file_name, "{% block content %}Overlay{% endblock %}", 1)
.await;
// Verify overlay content takes precedence
let content = server.get_file_content(file_name).await;
assert_eq!(content, "{% block content %}Overlay{% endblock %}");
// Close document
server.close_document(file_name).await;
// Verify we're back to disk content
let content = server.get_file_content(file_name).await;
assert_eq!(content, "{% block content %}Disk{% endblock %}");
}
#[tokio::test]
async fn test_template_parsing_with_overlays() {
let server = TestServer::new().await;
let file_name = "template.html";
// Write initial template to disk
server.write_file(file_name, "{% if true %}Original{% endif %}");
// Open with different template content
server
.open_document(
file_name,
"{% for item in items %}{{ item }}{% endfor %}",
1,
)
.await;
use djls_workspace::db::parse_template;
// Parse template through the session
let workspace_path = server.workspace_file(file_name);
let ast = server
.server
.with_session_mut(|session| {
session.with_db_mut(|db| {
let file = db.get_or_create_file(workspace_path);
parse_template(db, file)
})
})
.await;
// Verify we parsed the overlay content (for loop), not disk content (if statement)
assert!(ast.is_some());
let ast = ast.unwrap();
let ast_str = format!("{:?}", ast.ast);
assert!(ast_str.contains("for") || ast_str.contains("For"));
assert!(!ast_str.contains("if") && !ast_str.contains("If"));
}
#[tokio::test]
async fn test_multiple_documents_independent() {
let server = TestServer::new().await;
// Open multiple documents
server.open_document("file1.html", "Content 1", 1).await;
server.open_document("file2.html", "Content 2", 1).await;
server.open_document("file3.html", "Content 3", 1).await;
// Change one document
server.change_document("file2.html", "Updated 2", 2).await;
// Verify only file2 was updated
assert_eq!(server.get_file_content("file1.html").await, "Content 1");
assert_eq!(server.get_file_content("file2.html").await, "Updated 2");
assert_eq!(server.get_file_content("file3.html").await, "Content 3");
// Verify revision changes
assert_eq!(server.get_file_revision("file1.html").await, Some(0));
assert_eq!(server.get_file_revision("file2.html").await, Some(1));
assert_eq!(server.get_file_revision("file3.html").await, Some(0));
}
#[tokio::test]
async fn test_concurrent_overlay_updates() {
let server = Arc::new(TestServer::new().await);
// Open initial documents
for i in 0..5 {
server
.open_document(&format!("file{}.html", i), &format!("Initial {}", i), 1)
.await;
}
// Spawn concurrent tasks to update different documents
let mut handles = vec![];
for i in 0..5 {
let server_clone = Arc::clone(&server);
let handle = tokio::spawn(async move {
// Each task updates its document multiple times
for version in 2..10 {
server_clone
.change_document(
&format!("file{}.html", i),
&format!("Updated {} v{}", i, version),
version,
)
.await;
// Small delay to encourage interleaving
tokio::time::sleep(tokio::time::Duration::from_millis(1)).await;
}
});
handles.push(handle);
}
// Wait for all tasks to complete
for handle in handles {
handle.await.expect("Task failed");
}
// Verify final state of all documents
for i in 0..5 {
let content = server.get_file_content(&format!("file{}.html", i)).await;
assert_eq!(content, format!("Updated {} v9", i));
// Each document should have had 8 changes (versions 2-9)
let revision = server.get_file_revision(&format!("file{}.html", i)).await;
assert_eq!(revision, Some(8));
}
}
#[tokio::test]
async fn test_caching_behavior() {
let server = TestServer::new().await;
// Open three template files
server
.open_document("template1.html", "{% block a %}1{% endblock %}", 1)
.await;
server
.open_document("template2.html", "{% block b %}2{% endblock %}", 1)
.await;
server
.open_document("template3.html", "{% block c %}3{% endblock %}", 1)
.await;
// Parse all templates once to populate cache
for i in 1..=3 {
let _ = server
.get_file_content(&format!("template{}.html", i))
.await;
}
// Store initial revisions
let rev1_before = server.get_file_revision("template1.html").await.unwrap();
let rev2_before = server.get_file_revision("template2.html").await.unwrap();
let rev3_before = server.get_file_revision("template3.html").await.unwrap();
// Change only template2
server
.change_document("template2.html", "{% block b %}CHANGED{% endblock %}", 2)
.await;
// Verify only template2's revision changed
let rev1_after = server.get_file_revision("template1.html").await.unwrap();
let rev2_after = server.get_file_revision("template2.html").await.unwrap();
let rev3_after = server.get_file_revision("template3.html").await.unwrap();
assert_eq!(
rev1_before, rev1_after,
"template1 revision should not change"
);
assert_eq!(
rev2_before + 1,
rev2_after,
"template2 revision should increment"
);
assert_eq!(
rev3_before, rev3_after,
"template3 revision should not change"
);
// Verify content
assert_eq!(
server.get_file_content("template1.html").await,
"{% block a %}1{% endblock %}"
);
assert_eq!(
server.get_file_content("template2.html").await,
"{% block b %}CHANGED{% endblock %}"
);
assert_eq!(
server.get_file_content("template3.html").await,
"{% block c %}3{% endblock %}"
);
}
#[tokio::test]
async fn test_revision_tracking_across_lifecycle() {
let server = TestServer::new().await;
let file_name = "tracked.html";
// Create file on disk
server.write_file(file_name, "Initial");
// Open document - should create file with revision 0
server.open_document(file_name, "Opened", 1).await;
assert_eq!(server.get_file_revision(file_name).await, Some(0));
// Change document multiple times
for i in 2..=5 {
server
.change_document(file_name, &format!("Change {}", i), i)
.await;
assert_eq!(
server.get_file_revision(file_name).await,
Some((i - 1) as u64),
"Revision should be {} after change {}",
i - 1,
i
);
}
// Close document - should bump revision one more time
server.close_document(file_name).await;
assert_eq!(server.get_file_revision(file_name).await, Some(5));
// Re-open document - file already exists, should bump revision to invalidate cache
server.open_document(file_name, "Reopened", 10).await;
assert_eq!(
server.get_file_revision(file_name).await,
Some(6),
"Revision should bump on re-open to invalidate cache"
);
// Change again
server.change_document(file_name, "Final", 11).await;
assert_eq!(server.get_file_revision(file_name).await, Some(7));
}

View file

@ -5,7 +5,7 @@ use crate::tokens::Token;
use crate::tokens::TokenStream;
use crate::tokens::TokenType;
#[derive(Clone, Debug, Default, Serialize)]
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize)]
pub struct Ast {
nodelist: Vec<Node>,
line_offsets: LineOffsets,
@ -36,7 +36,7 @@ impl Ast {
}
}
#[derive(Clone, Debug, Serialize)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub struct LineOffsets(pub Vec<u32>);
impl LineOffsets {
@ -75,7 +75,7 @@ impl Default for LineOffsets {
}
}
#[derive(Clone, Debug, Serialize)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize)]
pub enum Node {
Tag {
name: String,

View file

@ -1,11 +1,11 @@
mod ast;
pub mod ast;
mod error;
mod lexer;
mod parser;
mod tagspecs;
mod tokens;
use ast::Ast;
pub use ast::Ast;
pub use error::QuickFix;
pub use error::TemplateError;
use lexer::Lexer;

View file

@ -0,0 +1,25 @@
[package]
name = "djls-workspace"
version = "0.0.0"
edition = "2021"
[dependencies]
djls-templates = { workspace = true }
djls-project = { workspace = true }
anyhow = { workspace = true }
camino = { workspace = true }
dashmap = { workspace = true }
notify = { workspace = true }
percent-encoding = { workspace = true }
salsa = { workspace = true }
tokio = { workspace = true }
tower-lsp-server = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
[dev-dependencies]
tempfile = { workspace = true }
[lints]
workspace = true

View file

@ -0,0 +1,75 @@
//! Shared buffer storage for open documents
//!
//! This module provides the [`Buffers`] type which represents the in-memory
//! content of open files. These buffers are shared between the `Session`
//! (which manages document lifecycle) and the [`WorkspaceFileSystem`](crate::fs::WorkspaceFileSystem) (which
//! reads from them).
use std::sync::Arc;
use dashmap::DashMap;
use url::Url;
use crate::document::TextDocument;
/// Shared buffer storage between `Session` and [`FileSystem`].
///
/// Buffers represent the in-memory content of open files that takes
/// precedence over disk content when reading through the [`FileSystem`].
/// This is the key abstraction that makes the sharing between Session
/// and [`WorkspaceFileSystem`] explicit and type-safe.
///
/// The [`WorkspaceFileSystem`] holds a clone of this structure and checks
/// it before falling back to disk reads.
///
/// [`FileSystem`]: crate::fs::FileSystem
/// [`WorkspaceFileSystem`]: crate::fs::WorkspaceFileSystem
#[derive(Clone, Debug)]
pub struct Buffers {
inner: Arc<DashMap<Url, TextDocument>>,
}
impl Buffers {
#[must_use]
pub fn new() -> Self {
Self {
inner: Arc::new(DashMap::new()),
}
}
pub fn open(&self, url: Url, document: TextDocument) {
self.inner.insert(url, document);
}
pub fn update(&self, url: Url, document: TextDocument) {
self.inner.insert(url, document);
}
#[must_use]
pub fn close(&self, url: &Url) -> Option<TextDocument> {
self.inner.remove(url).map(|(_, doc)| doc)
}
#[must_use]
pub fn get(&self, url: &Url) -> Option<TextDocument> {
self.inner.get(url).map(|entry| entry.clone())
}
/// Check if a document is open
#[must_use]
pub fn contains(&self, url: &Url) -> bool {
self.inner.contains_key(url)
}
pub fn iter(&self) -> impl Iterator<Item = (Url, TextDocument)> + '_ {
self.inner
.iter()
.map(|entry| (entry.key().clone(), entry.value().clone()))
}
}
impl Default for Buffers {
fn default() -> Self {
Self::new()
}
}

View file

@ -0,0 +1,480 @@
//! Salsa database for incremental computation.
//!
//! This module provides the [`Database`] which integrates with Salsa for
//! incremental computation of Django template parsing and analysis.
//!
//! ## Architecture
//!
//! The system uses a two-layer approach:
//! 1. **Buffer layer** ([`Buffers`]) - Stores open document content in memory
//! 2. **Salsa layer** ([`Database`]) - Tracks files and computes derived queries
//!
//! When Salsa needs file content, it calls [`source_text`] which:
//! 1. Creates a dependency on the file's revision (critical!)
//! 2. Reads through [`WorkspaceFileSystem`] which checks buffers first
//! 3. Falls back to disk if no buffer exists
//!
//! ## The Revision Dependency
//!
//! The [`source_text`] function **must** call `file.revision(db)` to create
//! a Salsa dependency. Without this, revision changes won't invalidate queries:
//!
//! ```ignore
//! let _ = file.revision(db); // Creates the dependency chain!
//! ```
//!
//! [`Buffers`]: crate::buffers::Buffers
//! [`WorkspaceFileSystem`]: crate::fs::WorkspaceFileSystem
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
#[cfg(test)]
use std::sync::Mutex;
use dashmap::DashMap;
use salsa::Setter;
use crate::FileKind;
use crate::FileSystem;
/// Database trait that provides file system access for Salsa queries
#[salsa::db]
pub trait Db: salsa::Database {
/// Get the file system for reading files.
fn fs(&self) -> Option<Arc<dyn FileSystem>>;
/// Read file content through the file system.
///
/// Checks buffers first via [`WorkspaceFileSystem`](crate::fs::WorkspaceFileSystem),
/// then falls back to disk.
fn read_file_content(&self, path: &Path) -> std::io::Result<String>;
}
/// Salsa database for incremental computation.
///
/// Tracks files and computes derived queries incrementally. Integrates with
/// [`WorkspaceFileSystem`](crate::fs::WorkspaceFileSystem) to read file content,
/// which checks buffers before falling back to disk.
#[salsa::db]
#[derive(Clone)]
pub struct Database {
storage: salsa::Storage<Self>,
// TODO: does this need to be an Option?
/// File system for reading file content (checks buffers first, then disk).
fs: Option<Arc<dyn FileSystem>>,
/// Maps paths to [`SourceFile`] entities for O(1) lookup.
files: Arc<DashMap<PathBuf, SourceFile>>,
// The logs are only used for testing and demonstrating reuse:
#[cfg(test)]
logs: Arc<Mutex<Option<Vec<String>>>>,
}
#[cfg(test)]
impl Default for Database {
fn default() -> Self {
let logs = <Arc<Mutex<Option<Vec<String>>>>>::default();
Self {
storage: salsa::Storage::new(Some(Box::new({
let logs = logs.clone();
move |event| {
eprintln!("Event: {event:?}");
// Log interesting events, if logging is enabled
if let Some(logs) = &mut *logs.lock().unwrap() {
// only log interesting events
if let salsa::EventKind::WillExecute { .. } = event.kind {
logs.push(format!("Event: {event:?}"));
}
}
}
}))),
fs: None,
files: Arc::new(DashMap::new()),
logs,
}
}
}
impl Database {
pub fn new(file_system: Arc<dyn FileSystem>, files: Arc<DashMap<PathBuf, SourceFile>>) -> Self {
Self {
storage: salsa::Storage::new(None),
fs: Some(file_system),
files,
#[cfg(test)]
logs: Arc::new(Mutex::new(None)),
}
}
pub fn from_storage(
storage: salsa::Storage<Self>,
file_system: Arc<dyn FileSystem>,
files: Arc<DashMap<PathBuf, SourceFile>>,
) -> Self {
Self {
storage,
fs: Some(file_system),
files,
#[cfg(test)]
logs: Arc::new(Mutex::new(None)),
}
}
/// Read file content through the file system.
pub fn read_file_content(&self, path: &Path) -> std::io::Result<String> {
if let Some(fs) = &self.fs {
fs.read_to_string(path)
} else {
std::fs::read_to_string(path)
}
}
/// Get or create a [`SourceFile`] for the given path.
///
/// Files are created with an initial revision of 0 and tracked in the [`Database`]'s
/// `DashMap`. The `Arc` ensures cheap cloning while maintaining thread safety.
pub fn get_or_create_file(&mut self, path: PathBuf) -> SourceFile {
if let Some(file_ref) = self.files.get(&path) {
// Copy the value (SourceFile is Copy) and drop the guard immediately
let file = *file_ref;
drop(file_ref); // Explicitly drop the guard to release the lock
return file;
}
// File doesn't exist, so we need to create it
let kind = FileKind::from_path(&path);
let file = SourceFile::new(self, kind, Arc::from(path.to_string_lossy().as_ref()), 0);
self.files.insert(path.clone(), file);
file
}
/// Check if a file is being tracked without creating it.
///
/// This is primarily used for testing to verify that files have been
/// created without affecting the database state.
pub fn has_file(&self, path: &Path) -> bool {
self.files.contains_key(path)
}
/// Touch a file to mark it as modified, triggering re-evaluation of dependent queries.
///
/// Similar to Unix `touch`, this updates the file's revision number to signal
/// that cached query results depending on this file should be invalidated.
///
/// This is typically called when:
/// - A file is opened in the editor (if it was previously cached from disk)
/// - A file's content is modified
/// - A file's buffer is closed (reverting to disk content)
pub fn touch_file(&mut self, path: &Path) {
// Get the file if it exists
let Some(file_ref) = self.files.get(path) else {
tracing::debug!("File {} not tracked, skipping touch", path.display());
return;
};
let file = *file_ref;
drop(file_ref); // Explicitly drop to release the lock
let current_rev = file.revision(self);
let new_rev = current_rev + 1;
file.set_revision(self).to(new_rev);
tracing::debug!(
"Touched {}: revision {} -> {}",
path.display(),
current_rev,
new_rev
);
}
/// Get a reference to the storage for handle extraction.
///
/// This is used by `Session` to extract the [`StorageHandle`](salsa::StorageHandle) after mutations.
pub fn storage(&self) -> &salsa::Storage<Self> {
&self.storage
}
/// Consume the database and return its storage.
///
/// This is used when you need to take ownership of the storage.
pub fn into_storage(self) -> salsa::Storage<Self> {
self.storage
}
}
#[salsa::db]
impl salsa::Database for Database {}
#[salsa::db]
impl Db for Database {
fn fs(&self) -> Option<Arc<dyn FileSystem>> {
self.fs.clone()
}
fn read_file_content(&self, path: &Path) -> std::io::Result<String> {
match &self.fs {
Some(fs) => fs.read_to_string(path),
None => std::fs::read_to_string(path), // Fallback to direct disk access
}
}
}
/// Represents a single file without storing its content.
///
/// [`SourceFile`] is a Salsa input entity that tracks a file's path, revision, and
/// classification for analysis routing. Following Ruff's pattern, content is NOT
/// stored here but read on-demand through the `source_text` tracked function.
#[salsa::input]
pub struct SourceFile {
/// The file's classification for analysis routing
pub kind: FileKind,
/// The file path
#[returns(ref)]
pub path: Arc<str>,
/// The revision number for invalidation tracking
pub revision: u64,
}
/// Read file content, creating a Salsa dependency on the file's revision.
#[salsa::tracked]
pub fn source_text(db: &dyn Db, file: SourceFile) -> Arc<str> {
// This line creates the Salsa dependency on revision! Without this call,
// revision changes won't trigger invalidation
let _ = file.revision(db);
let path = Path::new(file.path(db).as_ref());
match db.read_file_content(path) {
Ok(content) => Arc::from(content),
Err(_) => {
Arc::from("") // Return empty string for missing files
}
}
}
/// Represents a file path for Salsa tracking.
///
/// [`FilePath`] is a Salsa input entity that tracks a file path for use in
/// path-based queries. This allows Salsa to properly track dependencies
/// on files identified by path rather than by SourceFile input.
#[salsa::input]
pub struct FilePath {
/// The file path as a string
#[returns(ref)]
pub path: Arc<str>,
}
/// Container for a parsed Django template AST.
///
/// [`TemplateAst`] wraps the parsed AST from djls-templates along with any parsing errors.
/// This struct is designed to be cached by Salsa and shared across multiple consumers
/// without re-parsing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TemplateAst {
/// The parsed AST from djls-templates
pub ast: djls_templates::Ast,
/// Any errors encountered during parsing (stored as strings for simplicity)
pub errors: Vec<String>,
}
/// Parse a Django template file into an AST.
///
/// This Salsa tracked function parses template files on-demand and caches the results.
/// The parse is only re-executed when the file's content changes (detected via content changes).
///
/// Returns `None` for non-template files.
#[salsa::tracked]
pub fn parse_template(db: &dyn Db, file: SourceFile) -> Option<Arc<TemplateAst>> {
// Only parse template files
if file.kind(db) != FileKind::Template {
return None;
}
let text_arc = source_text(db, file);
let text = text_arc.as_ref();
// Call the pure parsing function from djls-templates
match djls_templates::parse_template(text) {
Ok((ast, errors)) => {
// Convert errors to strings
let error_strings = errors.into_iter().map(|e| e.to_string()).collect();
Some(Arc::new(TemplateAst {
ast,
errors: error_strings,
}))
}
Err(err) => {
// Even on fatal errors, return an empty AST with the error
Some(Arc::new(TemplateAst {
ast: djls_templates::Ast::default(),
errors: vec![err.to_string()],
}))
}
}
}
/// Parse a Django template file by path using the file system.
///
/// This Salsa tracked function reads file content through the FileSystem, which automatically
/// checks overlays before falling back to disk, implementing Ruff's two-layer architecture.
///
/// Returns `None` for non-template files or if file cannot be read.
#[salsa::tracked]
pub fn parse_template_by_path(db: &dyn Db, file_path: FilePath) -> Option<Arc<TemplateAst>> {
// Read file content through the FileSystem (checks overlays first)
let path = Path::new(file_path.path(db).as_ref());
let Ok(text) = db.read_file_content(path) else {
return None;
};
// Call the parsing function from djls-templates
// TODO: Move this whole function into djls-templates
match djls_templates::parse_template(&text) {
Ok((ast, errors)) => {
// Convert errors to strings
let error_strings = errors.into_iter().map(|e| e.to_string()).collect();
Some(Arc::new(TemplateAst {
ast,
errors: error_strings,
}))
}
Err(err) => {
// Even on fatal errors, return an empty AST with the error
Some(Arc::new(TemplateAst {
ast: djls_templates::Ast::default(),
errors: vec![err.to_string()],
}))
}
}
}
#[cfg(test)]
mod tests {
use dashmap::DashMap;
use salsa::Setter;
use super::*;
use crate::buffers::Buffers;
use crate::document::TextDocument;
use crate::fs::InMemoryFileSystem;
use crate::fs::WorkspaceFileSystem;
use crate::language::LanguageId;
#[test]
fn test_parse_template_with_overlay() {
// Create a memory filesystem with initial template content
let mut memory_fs = InMemoryFileSystem::new();
let template_path = PathBuf::from("/test/template.html");
memory_fs.add_file(
template_path.clone(),
"{% block content %}Original{% endblock %}".to_string(),
);
// Create overlay storage
let buffers = Buffers::new();
// Create WorkspaceFileSystem that checks overlays first
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(memory_fs),
));
// Create database with the file system
let files = Arc::new(DashMap::new());
let mut db = Database::new(file_system, files);
// Create a SourceFile for the template
let file = db.get_or_create_file(template_path.clone());
// Parse template - should get original content from disk
let ast1 = parse_template(&db, file).expect("Should parse template");
assert!(ast1.errors.is_empty(), "Should have no errors");
// Add an overlay with updated content
let url = crate::paths::path_to_url(&template_path).unwrap();
let updated_document = TextDocument::new(
"{% block content %}Updated from overlay{% endblock %}".to_string(),
2,
LanguageId::Other,
);
buffers.open(url, updated_document);
// Bump the file revision to trigger re-parse
file.set_revision(&mut db).to(1);
// Parse again - should now get overlay content
let ast2 = parse_template(&db, file).expect("Should parse template");
assert!(ast2.errors.is_empty(), "Should have no errors");
// Verify the content changed (we can't directly check the text,
// but the AST should be different)
// The AST will have different content in the block
assert_ne!(
format!("{:?}", ast1.ast),
format!("{:?}", ast2.ast),
"AST should change when overlay is added"
);
}
#[test]
fn test_parse_template_invalidation_on_revision_change() {
// Create a memory filesystem
let mut memory_fs = InMemoryFileSystem::new();
let template_path = PathBuf::from("/test/template.html");
memory_fs.add_file(
template_path.clone(),
"{% if true %}Initial{% endif %}".to_string(),
);
// Create overlay storage
let buffers = Buffers::new();
// Create WorkspaceFileSystem
let file_system = Arc::new(WorkspaceFileSystem::new(
buffers.clone(),
Arc::new(memory_fs),
));
// Create database
let files = Arc::new(DashMap::new());
let mut db = Database::new(file_system, files);
// Create a SourceFile for the template
let file = db.get_or_create_file(template_path.clone());
// Parse template first time
let ast1 = parse_template(&db, file).expect("Should parse");
// Parse again without changing revision - should return same Arc (cached)
let ast2 = parse_template(&db, file).expect("Should parse");
assert!(Arc::ptr_eq(&ast1, &ast2), "Should return cached result");
// Update overlay content
let url = crate::paths::path_to_url(&template_path).unwrap();
let updated_document = TextDocument::new(
"{% if false %}Changed{% endif %}".to_string(),
2,
LanguageId::Other,
);
buffers.open(url, updated_document);
// Bump revision to trigger invalidation
file.set_revision(&mut db).to(1);
// Parse again - should get different result due to invalidation
let ast3 = parse_template(&db, file).expect("Should parse");
assert!(
!Arc::ptr_eq(&ast1, &ast3),
"Should re-execute after revision change"
);
// Content should be different
assert_ne!(
format!("{:?}", ast1.ast),
format!("{:?}", ast3.ast),
"AST should be different after content change"
);
}
}

View file

@ -0,0 +1,240 @@
//! LSP text document representation with efficient line indexing
//!
//! [`TextDocument`] stores open file content with version tracking for the LSP protocol.
//! Pre-computed line indices enable O(1) position lookups, which is critical for
//! performance when handling frequent position-based operations like hover, completion,
//! and diagnostics.
use tower_lsp_server::lsp_types::Position;
use tower_lsp_server::lsp_types::Range;
use crate::language::LanguageId;
use crate::template::ClosingBrace;
use crate::template::TemplateTagContext;
/// In-memory representation of an open document in the LSP.
///
/// Combines document content with metadata needed for LSP operations,
/// including version tracking for synchronization and pre-computed line
/// indices for efficient position lookups.
#[derive(Clone, Debug)]
pub struct TextDocument {
/// The document's content
content: String,
/// The version number of this document (from LSP)
version: i32,
/// The language identifier (python, htmldjango, etc.)
language_id: LanguageId,
/// Line index for efficient position lookups
line_index: LineIndex,
}
impl TextDocument {
#[must_use]
pub fn new(content: String, version: i32, language_id: LanguageId) -> Self {
let line_index = LineIndex::new(&content);
Self {
content,
version,
language_id,
line_index,
}
}
#[must_use]
pub fn content(&self) -> &str {
&self.content
}
#[must_use]
pub fn version(&self) -> i32 {
self.version
}
#[must_use]
pub fn language_id(&self) -> LanguageId {
self.language_id.clone()
}
#[must_use]
pub fn line_index(&self) -> &LineIndex {
&self.line_index
}
#[must_use]
pub fn get_line(&self, line: u32) -> Option<String> {
let line_start = *self.line_index.line_starts.get(line as usize)?;
let line_end = self
.line_index
.line_starts
.get(line as usize + 1)
.copied()
.unwrap_or(self.line_index.length);
Some(self.content[line_start as usize..line_end as usize].to_string())
}
#[must_use]
pub fn get_text_range(&self, range: Range) -> Option<String> {
let start_offset = self.line_index.offset(range.start)? as usize;
let end_offset = self.line_index.offset(range.end)? as usize;
Some(self.content[start_offset..end_offset].to_string())
}
/// Update the document content with LSP text changes
pub fn update(
&mut self,
changes: Vec<tower_lsp_server::lsp_types::TextDocumentContentChangeEvent>,
version: i32,
) {
// For now, we'll just handle full document updates
// TODO: Handle incremental updates
for change in changes {
// TextDocumentContentChangeEvent has a `text` field that's a String, not Option<String>
self.content = change.text;
self.line_index = LineIndex::new(&self.content);
}
self.version = version;
}
#[must_use]
pub fn get_template_tag_context(&self, position: Position) -> Option<TemplateTagContext> {
let start = self.line_index.line_starts.get(position.line as usize)?;
let end = self
.line_index
.line_starts
.get(position.line as usize + 1)
.copied()
.unwrap_or(self.line_index.length);
let line = &self.content[*start as usize..end as usize];
let char_pos: usize = position.character.try_into().ok()?;
let prefix = &line[..char_pos];
let rest_of_line = &line[char_pos..];
let rest_trimmed = rest_of_line.trim_start();
prefix.rfind("{%").map(|tag_start| {
// Check if we're immediately after {% with no space
let needs_leading_space = prefix.ends_with("{%");
let closing_brace = if rest_trimmed.starts_with("%}") {
ClosingBrace::FullClose
} else if rest_trimmed.starts_with('}') {
ClosingBrace::PartialClose
} else {
ClosingBrace::None
};
TemplateTagContext {
partial_tag: prefix[tag_start + 2..].trim().to_string(),
needs_leading_space,
closing_brace,
}
})
}
#[must_use]
pub fn position_to_offset(&self, position: Position) -> Option<u32> {
self.line_index.offset(position)
}
#[must_use]
pub fn offset_to_position(&self, offset: u32) -> Position {
self.line_index.position(offset)
}
}
/// Pre-computed line start positions for efficient position/offset conversion.
///
/// Computing line positions on every lookup would be O(n) where n is the document size.
/// By pre-computing during document creation/updates, we get O(1) lookups for line starts
/// and O(log n) for position-to-offset conversions via binary search.
#[derive(Clone, Debug)]
pub struct LineIndex {
pub line_starts: Vec<u32>,
pub line_starts_utf16: Vec<u32>,
pub length: u32,
pub length_utf16: u32,
}
impl LineIndex {
#[must_use]
pub fn new(text: &str) -> Self {
let mut line_starts = vec![0];
let mut line_starts_utf16 = vec![0];
let mut pos_utf8 = 0;
let mut pos_utf16 = 0;
for c in text.chars() {
pos_utf8 += u32::try_from(c.len_utf8()).unwrap_or(0);
pos_utf16 += u32::try_from(c.len_utf16()).unwrap_or(0);
if c == '\n' {
line_starts.push(pos_utf8);
line_starts_utf16.push(pos_utf16);
}
}
Self {
line_starts,
line_starts_utf16,
length: pos_utf8,
length_utf16: pos_utf16,
}
}
#[must_use]
pub fn offset(&self, position: Position) -> Option<u32> {
let line_start = self.line_starts.get(position.line as usize)?;
Some(line_start + position.character)
}
/// Convert UTF-16 LSP position to UTF-8 byte offset
pub fn offset_utf16(&self, position: Position, text: &str) -> Option<u32> {
let line_start_utf8 = self.line_starts.get(position.line as usize)?;
let _line_start_utf16 = self.line_starts_utf16.get(position.line as usize)?;
// If position is at start of line, return UTF-8 line start
if position.character == 0 {
return Some(*line_start_utf8);
}
// Find the line text
let next_line_start = self
.line_starts
.get(position.line as usize + 1)
.copied()
.unwrap_or(self.length);
let line_text = text.get(*line_start_utf8 as usize..next_line_start as usize)?;
// Convert UTF-16 character offset to UTF-8 byte offset within the line
let mut utf16_pos = 0;
let mut utf8_pos = 0;
for c in line_text.chars() {
if utf16_pos >= position.character {
break;
}
utf16_pos += u32::try_from(c.len_utf16()).unwrap_or(0);
utf8_pos += u32::try_from(c.len_utf8()).unwrap_or(0);
}
Some(line_start_utf8 + utf8_pos)
}
#[allow(dead_code)]
#[must_use]
pub fn position(&self, offset: u32) -> Position {
let line = match self.line_starts.binary_search(&offset) {
Ok(line) => line,
Err(line) => line - 1,
};
let line_start = self.line_starts[line];
let character = offset - line_start;
Position::new(u32::try_from(line).unwrap_or(0), character)
}
}

View file

@ -0,0 +1,250 @@
//! Virtual file system abstraction
//!
//! This module provides the [`FileSystem`] trait that abstracts file I/O operations.
//! This allows the LSP to work with both real files and in-memory overlays.
#[cfg(test)]
use std::collections::HashMap;
use std::io;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use crate::buffers::Buffers;
use crate::paths;
/// Trait for file system operations
pub trait FileSystem: Send + Sync {
/// Read the entire contents of a file
fn read_to_string(&self, path: &Path) -> io::Result<String>;
/// Check if a path exists
fn exists(&self, path: &Path) -> bool;
/// Check if a path is a file
fn is_file(&self, path: &Path) -> bool;
/// Check if a path is a directory
fn is_directory(&self, path: &Path) -> bool;
/// List directory contents
fn read_directory(&self, path: &Path) -> io::Result<Vec<PathBuf>>;
/// Get file metadata (size, modified time, etc.)
fn metadata(&self, path: &Path) -> io::Result<std::fs::Metadata>;
}
/// In-memory file system for testing
#[cfg(test)]
pub struct InMemoryFileSystem {
files: HashMap<PathBuf, String>,
}
#[cfg(test)]
impl InMemoryFileSystem {
pub fn new() -> Self {
Self {
files: HashMap::new(),
}
}
pub fn add_file(&mut self, path: PathBuf, content: String) {
self.files.insert(path, content);
}
}
#[cfg(test)]
impl FileSystem for InMemoryFileSystem {
fn read_to_string(&self, path: &Path) -> io::Result<String> {
self.files
.get(path)
.cloned()
.ok_or_else(|| io::Error::new(io::ErrorKind::NotFound, "File not found"))
}
fn exists(&self, path: &Path) -> bool {
self.files.contains_key(path)
}
fn is_file(&self, path: &Path) -> bool {
self.files.contains_key(path)
}
fn is_directory(&self, _path: &Path) -> bool {
// Simplified for testing - no directories in memory filesystem
false
}
fn read_directory(&self, _path: &Path) -> io::Result<Vec<PathBuf>> {
// Simplified for testing
Ok(Vec::new())
}
fn metadata(&self, _path: &Path) -> io::Result<std::fs::Metadata> {
Err(io::Error::new(
io::ErrorKind::Unsupported,
"Metadata not supported in memory filesystem",
))
}
}
/// Standard file system implementation that uses [`std::fs`].
pub struct OsFileSystem;
impl FileSystem for OsFileSystem {
fn read_to_string(&self, path: &Path) -> io::Result<String> {
std::fs::read_to_string(path)
}
fn exists(&self, path: &Path) -> bool {
path.exists()
}
fn is_file(&self, path: &Path) -> bool {
path.is_file()
}
fn is_directory(&self, path: &Path) -> bool {
path.is_dir()
}
fn read_directory(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
std::fs::read_dir(path)?
.map(|entry| entry.map(|e| e.path()))
.collect()
}
fn metadata(&self, path: &Path) -> io::Result<std::fs::Metadata> {
std::fs::metadata(path)
}
}
/// LSP file system that intercepts reads for buffered files.
///
/// This implements a two-layer architecture where Layer 1 (open [`Buffers`])
/// takes precedence over Layer 2 (Salsa database). When a file is read,
/// this system first checks for a buffer (in-memory content from
/// [`TextDocument`](crate::document::TextDocument)) and returns that content.
/// If no buffer exists, it falls back to reading from disk.
///
/// This type is used by the [`Database`](crate::db::Database) to ensure all file reads go
/// through the buffer system first.
pub struct WorkspaceFileSystem {
/// In-memory buffers that take precedence over disk files
buffers: Buffers,
/// Fallback file system for disk operations
disk: Arc<dyn FileSystem>,
}
impl WorkspaceFileSystem {
#[must_use]
pub fn new(buffers: Buffers, disk: Arc<dyn FileSystem>) -> Self {
Self { buffers, disk }
}
}
impl FileSystem for WorkspaceFileSystem {
fn read_to_string(&self, path: &Path) -> io::Result<String> {
if let Some(url) = paths::path_to_url(path) {
if let Some(document) = self.buffers.get(&url) {
return Ok(document.content().to_string());
}
}
self.disk.read_to_string(path)
}
fn exists(&self, path: &Path) -> bool {
paths::path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|| self.disk.exists(path)
}
fn is_file(&self, path: &Path) -> bool {
paths::path_to_url(path).is_some_and(|url| self.buffers.contains(&url))
|| self.disk.is_file(path)
}
fn is_directory(&self, path: &Path) -> bool {
// Overlays are never directories, so just delegate
self.disk.is_directory(path)
}
fn read_directory(&self, path: &Path) -> io::Result<Vec<PathBuf>> {
// Overlays are never directories, so just delegate
self.disk.read_directory(path)
}
fn metadata(&self, path: &Path) -> io::Result<std::fs::Metadata> {
// For overlays, we could synthesize metadata, but for simplicity,
// fall back to disk. This might need refinement for edge cases.
self.disk.metadata(path)
}
}
#[cfg(test)]
mod tests {
use url::Url;
use super::*;
use crate::buffers::Buffers;
use crate::document::TextDocument;
use crate::language::LanguageId;
#[test]
fn test_lsp_filesystem_overlay_precedence() {
let mut memory_fs = InMemoryFileSystem::new();
memory_fs.add_file(
std::path::PathBuf::from("/test/file.py"),
"original content".to_string(),
);
let buffers = Buffers::new();
let lsp_fs = WorkspaceFileSystem::new(buffers.clone(), Arc::new(memory_fs));
// Before adding buffer, should read from fallback
let path = std::path::Path::new("/test/file.py");
assert_eq!(lsp_fs.read_to_string(path).unwrap(), "original content");
// Add buffer - this simulates having an open document with changes
let url = Url::from_file_path("/test/file.py").unwrap();
let document = TextDocument::new("overlay content".to_string(), 1, LanguageId::Python);
buffers.open(url, document);
// Now should read from buffer
assert_eq!(lsp_fs.read_to_string(path).unwrap(), "overlay content");
}
#[test]
fn test_lsp_filesystem_fallback_when_no_overlay() {
let mut memory_fs = InMemoryFileSystem::new();
memory_fs.add_file(
std::path::PathBuf::from("/test/file.py"),
"disk content".to_string(),
);
let buffers = Buffers::new();
let lsp_fs = WorkspaceFileSystem::new(buffers, Arc::new(memory_fs));
// Should fall back to disk when no buffer exists
let path = std::path::Path::new("/test/file.py");
assert_eq!(lsp_fs.read_to_string(path).unwrap(), "disk content");
}
#[test]
fn test_lsp_filesystem_other_operations_delegate() {
let mut memory_fs = InMemoryFileSystem::new();
memory_fs.add_file(
std::path::PathBuf::from("/test/file.py"),
"content".to_string(),
);
let buffers = Buffers::new();
let lsp_fs = WorkspaceFileSystem::new(buffers, Arc::new(memory_fs));
let path = std::path::Path::new("/test/file.py");
// These should delegate to the fallback filesystem
assert!(lsp_fs.exists(path));
assert!(lsp_fs.is_file(path));
assert!(!lsp_fs.is_directory(path));
}
}

View file

@ -0,0 +1,48 @@
//! Language identification for document routing
//!
//! Maps LSP language identifiers to internal [`FileKind`] for analyzer routing.
//! Language IDs come from the LSP client and determine how files are processed.
use crate::FileKind;
/// Language identifier as reported by the LSP client.
///
/// These identifiers follow VS Code's language ID conventions and determine
/// which analyzers and features are available for a document. Converts to
/// [`FileKind`] to route files to appropriate analyzers (Python vs Template).
#[derive(Clone, Debug, PartialEq)]
pub enum LanguageId {
Html,
HtmlDjango,
Other,
PlainText,
Python,
}
impl From<&str> for LanguageId {
fn from(language_id: &str) -> Self {
match language_id {
"django-html" | "htmldjango" => Self::HtmlDjango,
"html" => Self::Html,
"plaintext" => Self::PlainText,
"python" => Self::Python,
_ => Self::Other,
}
}
}
impl From<String> for LanguageId {
fn from(language_id: String) -> Self {
Self::from(language_id.as_str())
}
}
impl From<LanguageId> for FileKind {
fn from(language_id: LanguageId) -> Self {
match language_id {
LanguageId::Python => Self::Python,
LanguageId::HtmlDjango => Self::Template,
LanguageId::Html | LanguageId::PlainText | LanguageId::Other => Self::Other,
}
}
}

View file

@ -0,0 +1,79 @@
//! Workspace management for the Django Language Server
//!
//! This crate provides the core workspace functionality including document management,
//! file system abstractions, and Salsa integration for incremental computation of
//! Django projects.
//!
//! # Key Components
//!
//! - [`Buffers`] - Thread-safe storage for open documents
//! - [`Database`] - Salsa database for incremental computation
//! - [`TextDocument`] - LSP document representation with efficient indexing
//! - [`FileSystem`] - Abstraction layer for file operations with overlay support
//! - [`paths`] - Consistent URL/path conversion utilities
mod buffers;
pub mod db;
mod document;
mod fs;
mod language;
pub mod paths;
mod template;
use std::path::Path;
pub use buffers::Buffers;
pub use db::Database;
pub use document::TextDocument;
pub use fs::FileSystem;
pub use fs::OsFileSystem;
pub use fs::WorkspaceFileSystem;
pub use language::LanguageId;
/// Stable, compact identifier for files across the subsystem.
///
/// [`FileId`] decouples file identity from paths/URIs, providing efficient keys for maps and
/// Salsa inputs. Once assigned to a file (via its URI), a [`FileId`] remains stable for the
/// lifetime of the system.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct FileId(u32);
impl FileId {
/// Create a [`FileId`] from a raw u32 value.
#[must_use]
pub fn from_raw(raw: u32) -> Self {
FileId(raw)
}
/// Get the underlying u32 index value.
#[must_use]
#[allow(dead_code)]
pub fn index(self) -> u32 {
self.0
}
}
/// File classification for routing to analyzers.
///
/// [`FileKind`] determines how a file should be processed by downstream analyzers.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum FileKind {
/// Python source file
Python,
/// Django template file
Template,
/// Other file type
Other,
}
impl FileKind {
/// Determine [`FileKind`] from a file path extension.
#[must_use]
pub fn from_path(path: &Path) -> Self {
match path.extension().and_then(|s| s.to_str()) {
Some("py") => FileKind::Python,
Some("html" | "htm") => FileKind::Template,
_ => FileKind::Other,
}
}
}

View file

@ -0,0 +1,203 @@
//! Path and URL conversion utilities
//!
//! This module provides consistent conversion between file paths and URLs,
//! handling platform-specific differences and encoding issues.
use std::path::Path;
use std::path::PathBuf;
use tower_lsp_server::lsp_types;
use url::Url;
/// Convert a `file://` URL to a [`PathBuf`].
///
/// Handles percent-encoding and platform-specific path formats (e.g., Windows drives).
#[must_use]
pub fn url_to_path(url: &Url) -> Option<PathBuf> {
// Only handle file:// URLs
if url.scheme() != "file" {
return None;
}
// Get the path component and decode percent-encoding
let path = percent_encoding::percent_decode_str(url.path())
.decode_utf8()
.ok()?;
#[cfg(windows)]
let path = {
// Remove leading '/' for paths like /C:/...
path.strip_prefix('/').unwrap_or(&path)
};
Some(PathBuf::from(path.as_ref()))
}
/// Convert an LSP URI to a [`PathBuf`].
///
/// This is a convenience wrapper that parses the LSP URI string and converts it.
#[must_use]
pub fn lsp_uri_to_path(lsp_uri: &lsp_types::Uri) -> Option<PathBuf> {
// Parse the URI string as a URL
let url = Url::parse(lsp_uri.as_str()).ok()?;
url_to_path(&url)
}
/// Convert a [`Path`] to a `file://` URL
///
/// Handles both absolute and relative paths. Relative paths are resolved
/// to absolute paths before conversion.
#[must_use]
pub fn path_to_url(path: &Path) -> Option<Url> {
// For absolute paths, convert directly
if path.is_absolute() {
return Url::from_file_path(path).ok();
}
// For relative paths, try to make them absolute first
if let Ok(absolute_path) = std::fs::canonicalize(path) {
return Url::from_file_path(absolute_path).ok();
}
// If canonicalization fails, try converting as-is (might fail)
Url::from_file_path(path).ok()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_url_to_path_basic() {
let url = Url::parse("file:///home/user/file.txt").unwrap();
let path = url_to_path(&url).unwrap();
assert_eq!(path, PathBuf::from("/home/user/file.txt"));
}
#[test]
fn test_url_to_path_with_spaces() {
let url = Url::parse("file:///home/user/my%20file.txt").unwrap();
let path = url_to_path(&url).unwrap();
assert_eq!(path, PathBuf::from("/home/user/my file.txt"));
}
#[test]
fn test_url_to_path_non_file_scheme() {
let url = Url::parse("https://example.com/file.txt").unwrap();
assert!(url_to_path(&url).is_none());
}
#[cfg(windows)]
#[test]
fn test_url_to_path_windows() {
let url = Url::parse("file:///C:/Users/user/file.txt").unwrap();
let path = url_to_path(&url).unwrap();
assert_eq!(path, PathBuf::from("C:/Users/user/file.txt"));
}
#[test]
fn test_path_to_url_absolute() {
let path = if cfg!(windows) {
PathBuf::from("C:/Users/user/file.txt")
} else {
PathBuf::from("/home/user/file.txt")
};
let url = path_to_url(&path).unwrap();
assert_eq!(url.scheme(), "file");
assert!(url.path().contains("file.txt"));
}
#[test]
fn test_round_trip() {
let original_path = if cfg!(windows) {
PathBuf::from("C:/Users/user/test file.txt")
} else {
PathBuf::from("/home/user/test file.txt")
};
let url = path_to_url(&original_path).unwrap();
let converted_path = url_to_path(&url).unwrap();
assert_eq!(original_path, converted_path);
}
#[test]
fn test_url_with_localhost() {
// Some systems use file://localhost/path format
let url = Url::parse("file://localhost/home/user/file.txt").unwrap();
let path = url_to_path(&url);
// Current implementation might not handle this correctly
// since it only checks scheme, not host
if let Some(p) = path {
assert_eq!(p, PathBuf::from("/home/user/file.txt"));
}
}
#[test]
fn test_url_with_empty_host() {
// Standard file:///path format (three slashes, empty host)
let url = Url::parse("file:///home/user/file.txt").unwrap();
let path = url_to_path(&url).unwrap();
assert_eq!(path, PathBuf::from("/home/user/file.txt"));
}
#[cfg(windows)]
#[test]
fn test_unc_path_to_url() {
// UNC paths like \\server\share\file.txt
let unc_path = PathBuf::from(r"\\server\share\file.txt");
let url = path_to_url(&unc_path);
// Check if UNC paths are handled
if let Some(u) = url {
// UNC paths should convert to file://server/share/file.txt
assert!(u.to_string().contains("server"));
assert!(u.to_string().contains("share"));
}
}
#[test]
fn test_relative_path_with_dotdot() {
// Test relative paths with .. that might not exist
let path = PathBuf::from("../some/nonexistent/path.txt");
let url = path_to_url(&path);
// This might fail if the path doesn't exist and can't be canonicalized
// Current implementation falls back to trying direct conversion
assert!(url.is_none() || url.is_some());
}
#[test]
fn test_path_with_special_chars() {
// Test paths with special characters that need encoding
let path = PathBuf::from("/home/user/file with spaces & special!.txt");
let url = path_to_url(&path).unwrap();
// Should be properly percent-encoded
assert!(url.as_str().contains("%20") || url.as_str().contains("with%20spaces"));
// Round-trip should work
let back = url_to_path(&url).unwrap();
assert_eq!(back, path);
}
#[test]
fn test_url_with_query_or_fragment() {
// URLs with query parameters or fragments should probably be rejected
let url_with_query = Url::parse("file:///path/file.txt?query=param").unwrap();
let url_with_fragment = Url::parse("file:///path/file.txt#section").unwrap();
// These should still work, extracting just the path part
let path1 = url_to_path(&url_with_query);
let path2 = url_to_path(&url_with_fragment);
if let Some(p) = path1 {
assert_eq!(p, PathBuf::from("/path/file.txt"));
}
if let Some(p) = path2 {
assert_eq!(p, PathBuf::from("/path/file.txt"));
}
}
}

View file

@ -0,0 +1,34 @@
//! Django template context detection for completions
//!
//! Detects cursor position context within Django template tags to provide
//! appropriate completions and auto-closing behavior.
// TODO: is this module in the right spot or even needed?
/// Tracks what closing characters are needed to complete a template tag.
///
/// Used to determine whether the completion system needs to insert
/// closing braces when completing a Django template tag.
#[derive(Debug)]
pub enum ClosingBrace {
/// No closing brace present - need to add full `%}` or `}}`
None,
/// Partial close present (just `}`) - need to add `%` or second `}`
PartialClose,
/// Full close present (`%}` or `}}`) - no closing needed
FullClose,
}
/// Cursor context within a Django template tag for completion support.
///
/// Captures the state around the cursor position to provide intelligent
/// completions and determine what text needs to be inserted.
#[derive(Debug)]
pub struct TemplateTagContext {
/// The partial tag text before the cursor (e.g., "loa" for "{% loa|")
pub partial_tag: String,
/// What closing characters are already present after the cursor
pub closing_brace: ClosingBrace,
/// Whether a space is needed before the completion (true if cursor is right after `{%`)
pub needs_leading_space: bool,
}