fix: semantic tokens.

This commit is contained in:
ya7010 2025-06-18 01:53:44 +09:00
parent 65d0b03831
commit 2c6bb8e207
12 changed files with 49 additions and 49 deletions

View file

@ -114,7 +114,7 @@ fn parse_string<'a>(
str_map: &mut HashMap<&'a str, StrId>,
) -> ValueId {
let span = token.span();
let value_str = &json_text[(usize::from(span.start()) + 1)..(usize::from(span.end()) - 1)];
let value_str = &json_text[(usize::from(span.start) + 1)..(usize::from(span.end) - 1)];
let str_id = str_map
.entry(value_str)
.or_insert_with(|| value_arena.str_arena_mut().alloc(value_str));
@ -123,7 +123,7 @@ fn parse_string<'a>(
fn parse_number(token: &Token, json_text: &str, value_arena: &mut ValueArena) -> Option<ValueId> {
let span = token.span();
let value_str = &json_text[span.start().into()..span.end().into()];
let value_str = &json_text[span.start.into()..span.end.into()];
value_str
.parse::<f64>()
.ok()
@ -132,7 +132,7 @@ fn parse_number(token: &Token, json_text: &str, value_arena: &mut ValueArena) ->
fn parse_bool(token: &Token, json_text: &str, value_arena: &mut ValueArena) -> Option<ValueId> {
let span = token.span();
let value_str = &json_text[span.start().into()..span.end().into()];
let value_str = &json_text[span.start.into()..span.end.into()];
match value_str {
"true" => Some(value_arena.alloc(Value::Bool(true))),
"false" => Some(value_arena.alloc(Value::Bool(false))),
@ -307,7 +307,7 @@ where
}
};
let key_span = key_token.span();
let key_str = &json_text[key_span.start().into()..key_span.end().into()];
let key_str = &json_text[key_span.start.into()..key_span.end.into()];
let key_str = &key_str[1..key_str.len() - 1];
let key_id = value_arena.str_arena_mut().alloc(key_str);
while let Some(token) = tokens.peek() {

View file

@ -33,7 +33,7 @@ pub fn lex(source: &str) -> Lexed {
for result in tokenize(source) {
let (last_span, last_range) = lexed.push_result_token(result);
last_offset = last_span.end();
last_offset = last_span.end;
last_position = last_range.end;
}

View file

@ -65,7 +65,7 @@ impl<'a> Parser<'a> {
let span = token.span();
let range = token.range();
// Get the string and advance the position
let raw_str = &self.source[span.start().into()..span.end().into()];
let raw_str = &self.source[span.start.into()..span.end.into()];
self.advance();
// Remove the quotation marks
@ -171,7 +171,7 @@ impl<'a> Parser<'a> {
let token = self.peek().unwrap();
let span = token.span();
let range = token.range();
let num_str = &self.source[span.start().into()..span.end().into()];
let num_str = &self.source[span.start.into()..span.end.into()];
self.advance();
// Parse as f64
@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
let token = self.peek().unwrap();
let span = token.span();
let range = token.range();
let bool_str = &self.source[span.start().into()..span.end().into()];
let bool_str = &self.source[span.start.into()..span.end.into()];
let value = bool_str == "true";
self.advance();

View file

@ -52,7 +52,7 @@ pub fn lex(source: &str) -> Lexed {
}
}
let (last_span, last_range) = lexed.push_result_token(result);
last_offset = last_span.end();
last_offset = last_span.end;
last_position = last_range.end;
}
@ -85,12 +85,12 @@ pub fn lex_document_header_comments(source: &str) -> Lexed {
}
was_joint = true;
let (last_span, last_range) = lexed.push_result_token(Ok(token));
last_offset = last_span.end();
last_offset = last_span.end;
last_position = last_range.end;
}
SyntaxKind::LINE_BREAK | SyntaxKind::WHITESPACE => {
let (last_span, last_range) = lexed.push_result_token(Ok(token));
last_offset = last_span.end();
last_offset = last_span.end;
last_position = last_range.end;
}
_ => break,

View file

@ -1,11 +1,12 @@
use tombi_ast::AstToken;
use tower_lsp::lsp_types::{Position, Range, SemanticToken};
use tower_lsp::lsp_types::SemanticToken;
use super::token_type::TokenType;
pub struct SemanticTokensBuilder {
tokens: Vec<SemanticToken>,
last_range: tombi_text::Range,
last_span: tombi_text::Span,
pub file_schema_range: Option<tombi_text::Range>,
}
@ -14,12 +15,14 @@ impl SemanticTokensBuilder {
Self {
tokens: Vec::new(),
last_range: tombi_text::Range::default(),
last_span: tombi_text::Span::default(),
file_schema_range,
}
}
pub fn add_token(&mut self, token_type: TokenType, elem: tombi_syntax::SyntaxElement) {
let range = elem.range();
let span = elem.span();
let relative = relative_range(range, self.last_range);
@ -27,12 +30,13 @@ impl SemanticTokensBuilder {
self.tokens.push(SemanticToken {
delta_line: relative.start.line as u32,
delta_start: relative.start.character as u32,
length: (relative.end.character - relative.start.character) as u32,
length: (span.end - self.last_span.start).into(),
token_type: token_type as u32,
token_modifiers_bitset: 0,
});
self.last_range = range;
self.last_span = span;
}
pub fn add_schema_url_comment(
@ -76,25 +80,25 @@ impl SemanticTokensBuilder {
}
}
fn relative_range(from: tombi_text::Range, to: tombi_text::Range) -> Range {
fn relative_range(from: tombi_text::Range, to: tombi_text::Range) -> tower_lsp::lsp_types::Range {
let line_diff = from.end.line - from.start.line;
let start = from.start - to.start;
let start = Position {
let start = tower_lsp::lsp_types::Position {
line: start.line,
character: start.column,
};
let end = if line_diff == 0 {
Position {
tower_lsp::lsp_types::Position {
line: start.line,
character: start.character + from.end.column - from.start.column,
}
} else {
Position {
tower_lsp::lsp_types::Position {
line: start.line + line_diff,
character: from.end.column,
}
};
Range { start, end }
tower_lsp::lsp_types::Range { start, end }
}

View file

@ -16,6 +16,7 @@ macro_rules! token_types {
}
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug)]
pub enum TokenType {
$($standard,)*
$($custom),*

View file

@ -87,8 +87,8 @@ impl<'a, 'b, 'c> Builder<'a, 'b, 'c> {
fn do_token(&mut self, kind: SyntaxKind, n_tokens: usize) {
let span = tombi_text::Span::new(
self.tokens[self.token_index].span().start(),
self.tokens[self.token_index + n_tokens].span().start(),
self.tokens[self.token_index].span().start,
self.tokens[self.token_index + n_tokens].span().start,
);
let text = &self.source[span];
self.token_index += n_tokens;

View file

@ -215,8 +215,8 @@ impl<'t> Parser<'t> {
BARE_KEY,
(
tombi_text::Span::new(
token.span().start(),
token.span().start() + tombi_text::Offset::of(parts[0]),
token.span().start,
token.span().start + tombi_text::Offset::of(parts[0]),
),
tombi_text::Range::new(
token.range().start,
@ -241,8 +241,8 @@ impl<'t> Parser<'t> {
T![.],
(
tombi_text::Span::new(
key1.span().end(),
key1.span().end() + tombi_text::Offset::of("."),
key1.span().end,
key1.span().end + tombi_text::Offset::of("."),
),
tombi_text::Range::new(
key1.range().end,
@ -269,8 +269,8 @@ impl<'t> Parser<'t> {
BARE_KEY,
(
tombi_text::Span::new(
dot.span().end(),
dot.span().end() + tombi_text::Offset::of(parts[1]),
dot.span().end,
dot.span().end + tombi_text::Offset::of(parts[1]),
),
tombi_text::Range::new(
dot.range().end,

View file

@ -904,7 +904,7 @@ impl SyntaxNode {
// recursion with a loop.
let span = self.span();
assert!(
span.start() <= offset && offset <= span.end(),
span.start <= offset && offset <= span.end,
"Bad offset: span {:?} offset {:?}",
span,
offset
@ -915,7 +915,7 @@ impl SyntaxNode {
let mut children = self.children_with_tokens().filter(|child| {
let child_span = child.span();
!child_span.is_empty() && (child_span.start() <= offset && offset <= child_span.end())
!child_span.is_empty() && (child_span.start <= offset && offset <= child_span.end)
});
let left = children.next().unwrap();
@ -1253,7 +1253,7 @@ impl SyntaxElement {
}
fn token_at_offset(&self, offset: tombi_text::Offset) -> TokenAtOffset<SyntaxToken> {
assert!(self.span().start() <= offset && offset <= self.span().end());
assert!(self.span().start <= offset && offset <= self.span().end);
match self {
NodeOrToken::Token(token) => TokenAtOffset::Single(token.clone()),
NodeOrToken::Node(node) => node.token_at_offset(offset),

View file

@ -59,7 +59,7 @@ impl SyntaxText {
let end = span.end().unwrap_or(tombi_text::Offset::new(self.len()));
assert!(start <= end);
let len = end - start;
let start = self.span.start() + start;
let start = self.span.start + start;
let end = start + len;
let span = tombi_text::Span::new(start, end);
@ -103,7 +103,7 @@ impl SyntaxText {
.filter_map(move |token| {
let token_span = token.span();
let span = span.intersect(token_span)?;
Some((token, span - token_span.start()))
Some((token, span - token_span.start))
})
}
}
@ -197,8 +197,8 @@ fn zip_texts<I: Iterator<Item = (SyntaxToken, tombi_text::Span)>>(
return Some(());
}
let advance = tombi_text::Offset::new(std::cmp::min(x.1.len(), y.1.len()));
x.1 = tombi_text::Span::new(x.1.start() + advance, x.1.end());
y.1 = tombi_text::Span::new(y.1.start() + advance, y.1.end());
x.1 = tombi_text::Span::new(x.1.start + advance, x.1.end);
y.1 = tombi_text::Span::new(y.1.start + advance, y.1.end);
}
}
@ -214,10 +214,10 @@ mod private {
impl SyntaxTextSpan for tombi_text::Span {
fn start(&self) -> Option<tombi_text::Offset> {
Some(tombi_text::Span::start(*self))
Some(self.start)
}
fn end(&self) -> Option<tombi_text::Offset> {
Some(tombi_text::Span::end(*self))
Some(self.end)
}
}

View file

@ -11,8 +11,8 @@ use crate::{Offset, RawOffset, RawTextSize};
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Span {
// Invariant: start <= end
start: Offset,
end: Offset,
pub start: Offset,
pub end: Offset,
}
impl std::fmt::Debug for Span {
@ -81,18 +81,6 @@ impl Span {
/// Identity methods.
impl Span {
/// The start point of this span.
#[inline]
pub const fn start(self) -> Offset {
self.start
}
/// The end point of this span.
#[inline]
pub const fn end(self) -> Offset {
self.end
}
/// The size of this span.
#[inline]
pub const fn len(self) -> RawOffset {

View file

@ -11,6 +11,13 @@
key1 = 1 # key value1 tailing comment
# key value2 leading comment1
key2 = "2"
key3 = """
multi line basic string
"""
key4 = '''
multi line literal string
'''
# key values end dangling comment1
# key values end dangling comment2