Add semantic tokens for undef and unused labels

This commit is contained in:
Patrick Förster 2023-03-18 09:45:15 +01:00
parent f8baa93267
commit 9b513cfda5
6 changed files with 221 additions and 18 deletions

23
Cargo.lock generated
View file

@ -93,6 +93,12 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5dd14596c0e5b954530d0e6f1fd99b89c03e313aa2086e8da4303701a09e1cf"
[[package]]
name = "block-buffer"
version = "0.10.4"
@ -179,7 +185,7 @@ version = "3.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"clap_lex 0.2.4",
"indexmap",
"textwrap",
@ -191,7 +197,7 @@ version = "4.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3d7ae14b20b94cb02149ed21a86c423859cbe18dc7ed69845cace50e52b40a5"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"clap_derive",
"clap_lex 0.3.2",
"is-terminal",
@ -724,7 +730,7 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"inotify-sys",
"libc",
]
@ -843,7 +849,7 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"libc",
]
@ -931,7 +937,7 @@ version = "0.94.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"serde",
"serde_json",
"serde_repr",
@ -998,7 +1004,7 @@ version = "5.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"crossbeam-channel",
"filetime",
"fsevent-sys",
@ -1301,7 +1307,7 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@ -1357,7 +1363,7 @@ version = "0.36.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd5c6ff11fecd55b40746d1995a02f2eb375bf8c00d192d521ee09f42bef37bc"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"errno",
"io-lifetimes",
"libc",
@ -1597,6 +1603,7 @@ version = "5.4.0"
dependencies = [
"anyhow",
"assert_unordered",
"bitflags 2.0.1",
"chrono",
"clap 4.1.8",
"criterion",

View file

@ -36,7 +36,7 @@ doctest = false
[dependencies]
anyhow = "1.0.69"
chrono = { version = "0.4.23", default-features = false, features = ["std"] }
clap = { version = "4.1.6", features = ["derive"] }
clap = { version = "4.1.8", features = ["derive"] }
crossbeam-channel = "0.5.6"
dashmap = "5.4.0"
dirs = "4.0.0"
@ -69,6 +69,7 @@ thiserror = "1.0.38"
threadpool = "1.8.1"
titlecase = "2.2.1"
unicode-normalization = "0.1.22"
bitflags = "2.0.1"
[dependencies.salsa]
git = "https://github.com/salsa-rs/salsa"

View file

@ -10,5 +10,6 @@ pub mod inlay_hint;
pub mod link;
pub mod reference;
pub mod rename;
pub mod semantic_tokens;
pub mod symbol;
pub mod workspace_command;

View file

@ -0,0 +1,109 @@
mod label;
use bitflags::bitflags;
use lsp_types::{
Position, Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
SemanticTokensLegend, Url,
};
use rowan::TextRange;
use crate::{
db::Workspace,
util::{line_index::LineIndex, line_index_ext::LineIndexExt},
Db,
};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
#[repr(u32)]
pub enum TokenKind {
Label = 0,
}
bitflags! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct TokenModifiers: u32 {
const NONE = 0;
const UNDEFINED = 1;
const UNUSED = 2;
}
}
pub fn legend() -> SemanticTokensLegend {
SemanticTokensLegend {
token_types: vec![SemanticTokenType::new("label")],
token_modifiers: vec![
SemanticTokenModifier::new("undefined"),
SemanticTokenModifier::new("unused"),
],
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct Token {
pub range: TextRange,
pub kind: TokenKind,
pub modifiers: TokenModifiers,
}
#[derive(Debug, Default)]
pub struct TokenBuilder {
tokens: Vec<Token>,
}
impl TokenBuilder {
pub fn push(&mut self, token: Token) {
self.tokens.push(token);
}
pub fn finish(mut self, line_index: &LineIndex) -> SemanticTokens {
let mut data = Vec::new();
self.tokens.sort_by_key(|token| token.range.start());
let mut last_pos = Position::new(0, 0);
for token in self.tokens {
let range = line_index.line_col_lsp_range(token.range);
let length = range.end.character - range.start.character;
let token_type = token.kind as u32;
let token_modifiers_bitset = token.modifiers.bits();
if range.start.line > last_pos.line {
let delta_line = range.start.line - last_pos.line;
let delta_start = range.start.character;
data.push(SemanticToken {
delta_line,
delta_start,
length,
token_type,
token_modifiers_bitset,
});
} else {
let delta_line = 0;
let delta_start = last_pos.character - range.start.character;
data.push(SemanticToken {
delta_line,
delta_start,
length,
token_type,
token_modifiers_bitset,
});
}
last_pos = range.end;
}
SemanticTokens {
result_id: None,
data,
}
}
}
pub fn find_all(db: &dyn Db, uri: &Url, viewport: Range) -> Option<SemanticTokens> {
let workspace = Workspace::get(db);
let document = workspace.lookup_uri(db, uri)?;
let viewport = document.line_index(db).offset_lsp_range(viewport);
let mut builder = TokenBuilder::default();
label::find(db, document, viewport, &mut builder);
Some(builder.finish(document.line_index(db)))
}

View file

@ -0,0 +1,73 @@
use rowan::TextRange;
use crate::{
db::{analysis::label, Document, Workspace},
Db,
};
use super::{Token, TokenBuilder, TokenKind, TokenModifiers};
pub fn find(
db: &dyn Db,
document: Document,
viewport: TextRange,
builder: &mut TokenBuilder,
) -> Option<()> {
let labels = document.parse(db).as_tex()?.analyze(db).labels(db);
for label in labels
.iter()
.filter(|label| viewport.intersect(label.range(db)).is_some())
{
let name = label.name(db).text(db);
let modifiers = match label.origin(db) {
label::Origin::Definition(_) => {
if !is_label_referenced(db, document, name) {
TokenModifiers::UNUSED
} else {
TokenModifiers::NONE
}
}
label::Origin::Reference(_) | label::Origin::ReferenceRange(_) => {
if !is_label_defined(db, document, name) {
TokenModifiers::UNDEFINED
} else {
TokenModifiers::NONE
}
}
};
let range = label.range(db);
builder.push(Token {
range,
kind: TokenKind::Label,
modifiers,
});
}
Some(())
}
fn is_label_defined(db: &dyn Db, child: Document, name: &str) -> bool {
Workspace::get(db)
.related(db, child)
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).labels(db))
.filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
.any(|label| label.name(db).text(db) == name)
}
fn is_label_referenced(db: &dyn Db, child: Document, name: &str) -> bool {
Workspace::get(db)
.related(db, child)
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).labels(db))
.filter(|label| {
matches!(
label.origin(db),
label::Origin::Reference(_) | label::Origin::ReferenceRange(_)
)
})
.any(|label| label.name(db).text(db) == name)
}

View file

@ -28,7 +28,7 @@ use crate::{
build::{self, BuildParams, BuildResult, BuildStatus},
completion::{self, builder::CompletionItemData},
definition, folding, formatting, forward_search, highlight, hover, inlay_hint, link,
reference, rename, symbol,
reference, rename, semantic_tokens, symbol,
workspace_command::{change_environment, clean, dep_graph},
},
normalize_uri,
@ -179,6 +179,14 @@ impl Server {
..Default::default()
}),
inlay_hint_provider: Some(OneOf::Left(true)),
semantic_tokens_provider: Some(
SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions {
work_done_progress_options: Default::default(),
legend: semantic_tokens::legend(),
range: Some(true),
full: Some(SemanticTokensFullOptions::Bool(false)),
}),
),
..ServerCapabilities::default()
}
}
@ -707,14 +715,6 @@ impl Server {
Ok(())
}
fn semantic_tokens_range(
&self,
_id: RequestId,
_params: SemanticTokensRangeParams,
) -> Result<()> {
Ok(())
}
fn build(&mut self, id: RequestId, params: BuildParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
@ -814,6 +814,18 @@ impl Server {
Ok(())
}
fn semantic_tokens_range(
&mut self,
id: RequestId,
params: SemanticTokensRangeParams,
) -> Result<()> {
self.run_with_db(id, move |db| {
semantic_tokens::find_all(db, &params.text_document.uri, params.range)
});
Ok(())
}
fn handle_file_event(&mut self, event: notify::Event) {
let mut changed = false;