mirror of
https://github.com/roc-lang/roc.git
synced 2025-10-02 08:11:12 +00:00
First pass at semantic tokens
This commit is contained in:
parent
0437645293
commit
ce6c340ebc
10 changed files with 981 additions and 34 deletions
|
@ -3,18 +3,16 @@ use std::path::{Path, PathBuf};
|
|||
use bumpalo::Bump;
|
||||
use roc_can::{abilities::AbilitiesStore, expr::Declarations};
|
||||
use roc_collections::MutMap;
|
||||
use roc_fmt::{Ast, Buf};
|
||||
use roc_load::{CheckedModule, LoadedModule};
|
||||
use roc_module::symbol::{Interns, ModuleId, Symbol};
|
||||
use roc_packaging::cache::{self, RocCacheDir};
|
||||
use roc_parse::parser::SyntaxError;
|
||||
use roc_region::all::LineInfo;
|
||||
use roc_reporting::report::RocDocAllocator;
|
||||
use roc_solve_problem::TypeError;
|
||||
use roc_types::subs::Subs;
|
||||
use tower_lsp::lsp_types::{
|
||||
Diagnostic, GotoDefinitionResponse, Hover, HoverContents, Location, MarkedString, Position,
|
||||
Range, TextEdit, Url,
|
||||
Range, SemanticTokenType, SemanticTokens, SemanticTokensResult, TextEdit, Url,
|
||||
};
|
||||
|
||||
use crate::convert::{
|
||||
|
@ -22,6 +20,13 @@ use crate::convert::{
|
|||
ToRange, ToRocPosition,
|
||||
};
|
||||
|
||||
mod parse_ast;
|
||||
mod semantic_tokens;
|
||||
mod tokens;
|
||||
|
||||
use self::{parse_ast::Ast, semantic_tokens::arrange_semantic_tokens, tokens::Token};
|
||||
pub const HIGHLIGHT_TOKENS_LEGEND: &[SemanticTokenType] = Token::LEGEND;
|
||||
|
||||
pub(crate) struct GlobalAnalysis {
|
||||
pub documents: Vec<AnalyzedDocument>,
|
||||
}
|
||||
|
@ -345,40 +350,31 @@ impl AnalyzedDocument {
|
|||
pub fn format(&self) -> Option<Vec<TextEdit>> {
|
||||
let source = &self.source;
|
||||
let arena = &Bump::new();
|
||||
let ast = parse_all(arena, &self.source).ok()?;
|
||||
let mut buf = Buf::new_in(arena);
|
||||
fmt_all(&mut buf, &ast);
|
||||
let new_source = buf.as_str();
|
||||
|
||||
if source == new_source {
|
||||
let ast = Ast::parse(arena, source).ok()?;
|
||||
let fmt = ast.fmt();
|
||||
|
||||
if source == fmt.as_str() {
|
||||
None
|
||||
} else {
|
||||
let range = self.whole_document_range();
|
||||
let text_edit = TextEdit::new(range, new_source.to_string());
|
||||
let text_edit = TextEdit::new(range, fmt.to_string().to_string());
|
||||
Some(vec![text_edit])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_all<'a>(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'a>> {
|
||||
use roc_parse::{
|
||||
module::{module_defs, parse_header},
|
||||
parser::Parser,
|
||||
state::State,
|
||||
};
|
||||
|
||||
let (module, state) = parse_header(arena, State::new(src.as_bytes()))
|
||||
.map_err(|e| SyntaxError::Header(e.problem))?;
|
||||
|
||||
let (_, defs, _) = module_defs().parse(arena, state, 0).map_err(|(_, e)| e)?;
|
||||
|
||||
Ok(Ast { module, defs })
|
||||
}
|
||||
|
||||
fn fmt_all<'a>(buf: &mut Buf<'a>, ast: &'a Ast) {
|
||||
roc_fmt::module::fmt_module(buf, &ast.module);
|
||||
|
||||
roc_fmt::def::fmt_defs(buf, &ast.defs, 0);
|
||||
|
||||
buf.fmt_end_of_file();
|
||||
|
||||
pub fn semantic_tokens(&self) -> Option<SemanticTokensResult> {
|
||||
let source = &self.source;
|
||||
let arena = &Bump::new();
|
||||
|
||||
let ast = Ast::parse(arena, source).ok()?;
|
||||
let tokens = ast.semantic_tokens();
|
||||
|
||||
let data = arrange_semantic_tokens(tokens, &self.line_info);
|
||||
|
||||
Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||
result_id: None,
|
||||
data,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
|
59
crates/lang_srv/src/analysis/parse_ast.rs
Normal file
59
crates/lang_srv/src/analysis/parse_ast.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
use bumpalo::Bump;
|
||||
use roc_fmt::Buf;
|
||||
use roc_parse::{
|
||||
ast::{Defs, Module},
|
||||
parser::SyntaxError,
|
||||
};
|
||||
use roc_region::all::Loc;
|
||||
|
||||
use self::format::FormattedAst;
|
||||
|
||||
use super::tokens::{IterTokens, Token};
|
||||
|
||||
mod format;
|
||||
|
||||
pub struct Ast<'a> {
|
||||
arena: &'a Bump,
|
||||
module: Module<'a>,
|
||||
defs: Defs<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Ast<'a> {
|
||||
pub fn parse(arena: &'a Bump, src: &'a str) -> Result<Ast<'a>, SyntaxError<'a>> {
|
||||
use roc_parse::{
|
||||
module::{module_defs, parse_header},
|
||||
parser::Parser,
|
||||
state::State,
|
||||
};
|
||||
|
||||
let (module, state) = parse_header(arena, State::new(src.as_bytes()))
|
||||
.map_err(|e| SyntaxError::Header(e.problem))?;
|
||||
|
||||
let (_, defs, _) = module_defs().parse(arena, state, 0).map_err(|(_, e)| e)?;
|
||||
|
||||
Ok(Ast {
|
||||
module,
|
||||
defs,
|
||||
arena,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fmt(&self) -> FormattedAst<'a> {
|
||||
let mut buf = Buf::new_in(self.arena);
|
||||
|
||||
roc_fmt::module::fmt_module(&mut buf, &self.module);
|
||||
|
||||
roc_fmt::def::fmt_defs(&mut buf, &self.defs, 0);
|
||||
|
||||
buf.fmt_end_of_file();
|
||||
|
||||
FormattedAst::new(buf)
|
||||
}
|
||||
|
||||
pub fn semantic_tokens(&self) -> impl IntoIterator<Item = Loc<Token>> + '_ {
|
||||
let header_tokens = self.module.iter_tokens(self.arena);
|
||||
let body_tokens = self.defs.iter_tokens(self.arena);
|
||||
|
||||
header_tokens.into_iter().chain(body_tokens)
|
||||
}
|
||||
}
|
21
crates/lang_srv/src/analysis/parse_ast/format.rs
Normal file
21
crates/lang_srv/src/analysis/parse_ast/format.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use roc_fmt::Buf;
|
||||
|
||||
pub struct FormattedAst<'a> {
|
||||
buf: Buf<'a>,
|
||||
}
|
||||
|
||||
impl<'a> FormattedAst<'a> {
|
||||
pub(crate) fn new(buf: Buf<'a>) -> Self {
|
||||
Self { buf }
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.buf.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for FormattedAst<'_> {
|
||||
fn to_string(&self) -> String {
|
||||
self.buf.as_str().to_owned()
|
||||
}
|
||||
}
|
47
crates/lang_srv/src/analysis/semantic_tokens.rs
Normal file
47
crates/lang_srv/src/analysis/semantic_tokens.rs
Normal file
|
@ -0,0 +1,47 @@
|
|||
use roc_region::all::{LineColumn, LineInfo, Loc};
|
||||
use tower_lsp::lsp_types::SemanticToken;
|
||||
|
||||
use super::tokens::Token;
|
||||
|
||||
pub fn arrange_semantic_tokens(
|
||||
tokens: impl IntoIterator<Item = Loc<Token>>,
|
||||
line_info: &LineInfo,
|
||||
) -> Vec<SemanticToken> {
|
||||
let tokens = tokens.into_iter();
|
||||
let (min, max) = tokens.size_hint();
|
||||
let size_hint = max.unwrap_or(min);
|
||||
let mut result = Vec::with_capacity(size_hint);
|
||||
|
||||
let mut last_line = 0;
|
||||
let mut last_start = 0;
|
||||
|
||||
for Loc {
|
||||
region,
|
||||
value: token,
|
||||
} in tokens
|
||||
{
|
||||
let length = region.len();
|
||||
|
||||
let LineColumn { line, column } = line_info.convert_pos(region.start());
|
||||
|
||||
let delta_line = line - last_line;
|
||||
let delta_start = if delta_line == 0 {
|
||||
column - last_start
|
||||
} else {
|
||||
column
|
||||
};
|
||||
|
||||
result.push(SemanticToken {
|
||||
delta_line,
|
||||
delta_start,
|
||||
length,
|
||||
token_type: token as u32,
|
||||
token_modifiers_bitset: 0,
|
||||
});
|
||||
|
||||
last_line = line;
|
||||
last_start = column;
|
||||
}
|
||||
|
||||
result
|
||||
}
|
784
crates/lang_srv/src/analysis/tokens.rs
Normal file
784
crates/lang_srv/src/analysis/tokens.rs
Normal file
|
@ -0,0 +1,784 @@
|
|||
use bumpalo::{
|
||||
collections::{CollectIn, Vec as BumpVec},
|
||||
vec as bumpvec, Bump,
|
||||
};
|
||||
use roc_module::called_via::{BinOp, UnaryOp};
|
||||
use roc_parse::{
|
||||
ast::{
|
||||
AbilityImpls, AbilityMember, AssignedField, Collection, Defs, Expr, Header, Implements,
|
||||
ImplementsAbilities, ImplementsAbility, ImplementsClause, Module, Pattern, PatternAs,
|
||||
RecordBuilderField, Spaced, StrLiteral, Tag, TypeAnnotation, TypeDef, TypeHeader, ValueDef,
|
||||
WhenBranch,
|
||||
},
|
||||
header::{
|
||||
AppHeader, ExposedName, HostedHeader, ImportsEntry, InterfaceHeader, ModuleName,
|
||||
PackageEntry, PackageHeader, PackageName, PlatformHeader, PlatformRequires, ProvidesTo, To,
|
||||
TypedIdent,
|
||||
},
|
||||
ident::{Accessor, UppercaseIdent},
|
||||
};
|
||||
use roc_region::all::{Loc, Region};
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
macro_rules! tokens {
|
||||
($($token:ident => $lsp_token:literal),* $(,)?) => {
|
||||
pub enum Token {
|
||||
$($token),*
|
||||
}
|
||||
|
||||
fn _non_redundant_lsp_tokens() {
|
||||
match "" {
|
||||
$($lsp_token => (),)*
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub const LEGEND: &[SemanticTokenType] = &[
|
||||
$(SemanticTokenType::new($lsp_token)),*
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to use predefined values at
|
||||
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens
|
||||
tokens! {
|
||||
Module => "namespace",
|
||||
Type => "type",
|
||||
Ability => "interface",
|
||||
TypeVariable => "typeParameter",
|
||||
Parameter => "parameter",
|
||||
Variable => "variable",
|
||||
Field => "property",
|
||||
Tag => "enumMember",
|
||||
Function => "function",
|
||||
Keyword => "keyword",
|
||||
String => "string",
|
||||
Number => "number",
|
||||
Operator => "operator",
|
||||
Comment => "comment",
|
||||
}
|
||||
|
||||
fn onetoken(token: Token, region: Region, arena: &Bump) -> BumpVec<Loc<Token>> {
|
||||
bumpvec![in arena; Loc::at(region, token)]
|
||||
}
|
||||
|
||||
fn field_token(region: Region, arena: &Bump) -> BumpVec<Loc<Token>> {
|
||||
onetoken(Token::Field, region, arena)
|
||||
}
|
||||
|
||||
trait HasToken {
|
||||
fn token(&self) -> Token;
|
||||
}
|
||||
|
||||
impl<T: HasToken> HasToken for Spaced<'_, T> {
|
||||
fn token(&self) -> Token {
|
||||
self.item().token()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for ModuleName<'_> {
|
||||
fn token(&self) -> Token {
|
||||
Token::Module
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for &str {
|
||||
fn token(&self) -> Token {
|
||||
if self.chars().next().unwrap().is_uppercase() {
|
||||
Token::Type
|
||||
} else {
|
||||
Token::Variable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for ExposedName<'_> {
|
||||
fn token(&self) -> Token {
|
||||
self.as_str().token()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for PackageName<'_> {
|
||||
fn token(&self) -> Token {
|
||||
Token::Module
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for StrLiteral<'_> {
|
||||
fn token(&self) -> Token {
|
||||
Token::String
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for UppercaseIdent<'_> {
|
||||
fn token(&self) -> Token {
|
||||
Token::Type
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for To<'_> {
|
||||
fn token(&self) -> Token {
|
||||
match self {
|
||||
To::ExistingPackage(_) => Token::Module,
|
||||
To::NewPackage(_) => Token::Module,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for BinOp {
|
||||
fn token(&self) -> Token {
|
||||
Token::Operator
|
||||
}
|
||||
}
|
||||
|
||||
impl HasToken for UnaryOp {
|
||||
fn token(&self) -> Token {
|
||||
Token::Operator
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IterTokens {
|
||||
// Use a vec until "impl trait in trait" is stabilized
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>>;
|
||||
}
|
||||
|
||||
impl<T: HasToken> IterTokens for Loc<T> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
onetoken(self.value.token(), self.region, arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IterTokens> IterTokens for Spaced<'_, T> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.item().iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IterTokens> IterTokens for Collection<'_, T> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.items
|
||||
.iter()
|
||||
.flat_map(|item| item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IterTokens> IterTokens for &[T] {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.iter()
|
||||
.flat_map(|item| item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IterTokens, U: IterTokens> IterTokens for (T, U) {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let (a, b) = self;
|
||||
a.iter_tokens(arena)
|
||||
.into_iter()
|
||||
.chain(b.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Module<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
comments: _,
|
||||
header,
|
||||
} = self;
|
||||
header.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Header<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
Header::Interface(ih) => ih.iter_tokens(arena),
|
||||
Header::App(app) => app.iter_tokens(arena),
|
||||
Header::Package(pkg) => pkg.iter_tokens(arena),
|
||||
Header::Platform(pf) => pf.iter_tokens(arena),
|
||||
Header::Hosted(h) => h.iter_tokens(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for InterfaceHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
before_name: _,
|
||||
name,
|
||||
exposes,
|
||||
imports,
|
||||
} = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(exposes.item.iter_tokens(arena))
|
||||
.chain(imports.item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for AppHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
before_name: _,
|
||||
name,
|
||||
packages,
|
||||
imports,
|
||||
provides,
|
||||
} = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(packages.iter().flat_map(|p| p.item.iter_tokens(arena)))
|
||||
.chain(imports.iter().flat_map(|i| i.item.iter_tokens(arena)))
|
||||
.chain(provides.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for PackageHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
before_name: _,
|
||||
name,
|
||||
exposes,
|
||||
packages,
|
||||
} = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(exposes.item.iter_tokens(arena))
|
||||
.chain(packages.item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for PlatformHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
before_name: _,
|
||||
name,
|
||||
requires,
|
||||
exposes,
|
||||
packages,
|
||||
imports,
|
||||
provides,
|
||||
} = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(requires.item.iter_tokens(arena))
|
||||
.chain(exposes.item.iter_tokens(arena))
|
||||
.chain(packages.item.iter_tokens(arena))
|
||||
.chain(imports.item.iter_tokens(arena))
|
||||
.chain(provides.item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for HostedHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
before_name: _,
|
||||
name,
|
||||
exposes,
|
||||
imports,
|
||||
generates: _,
|
||||
generates_with,
|
||||
} = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(exposes.item.iter_tokens(arena))
|
||||
.chain(imports.item.iter_tokens(arena))
|
||||
.chain(generates_with.item.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Spaced<'_, ImportsEntry<'_>>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self.value.item() {
|
||||
ImportsEntry::Module(_module_name, names) => names.iter_tokens(arena),
|
||||
ImportsEntry::Package(_pkg, _module_name, names) => names.iter_tokens(arena),
|
||||
ImportsEntry::IngestedFile(_str, idents) => idents.iter_tokens(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Spaced<'_, PackageEntry<'_>>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let PackageEntry {
|
||||
shorthand: _,
|
||||
spaces_after_shorthand: _,
|
||||
package_name,
|
||||
} = self.value.item();
|
||||
|
||||
package_name.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Spaced<'_, TypedIdent<'_>>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.item().iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for TypedIdent<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
ident,
|
||||
spaces_before_colon: _,
|
||||
ann,
|
||||
} = self;
|
||||
|
||||
(ident.iter_tokens(arena).into_iter())
|
||||
.chain(ann.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for ProvidesTo<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
provides_keyword: _,
|
||||
entries,
|
||||
types,
|
||||
to_keyword: _,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
(entries.iter_tokens(arena).into_iter())
|
||||
.chain(types.iter().flat_map(|t| t.iter_tokens(arena)))
|
||||
.chain(to.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for PlatformRequires<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self { rigids, signature } = self;
|
||||
|
||||
(rigids.iter_tokens(arena).into_iter())
|
||||
.chain(signature.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<TypeAnnotation<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self.value {
|
||||
TypeAnnotation::Function(params, ret) => (params.iter_tokens(arena).into_iter())
|
||||
.chain(ret.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::Apply(_mod, _type, args) => args.iter_tokens(arena),
|
||||
TypeAnnotation::BoundVariable(_) => onetoken(Token::Type, self.region, arena),
|
||||
TypeAnnotation::As(ty, _, as_ty) => (ty.iter_tokens(arena).into_iter())
|
||||
.chain(as_ty.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::Record { fields, ext } => (fields.iter_tokens(arena).into_iter())
|
||||
.chain(ext.iter().flat_map(|t| t.iter_tokens(arena)))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::Tuple { elems, ext } => (elems.iter_tokens(arena).into_iter())
|
||||
.chain(ext.iter().flat_map(|t| t.iter_tokens(arena)))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::TagUnion { tags, ext } => (tags.iter_tokens(arena).into_iter())
|
||||
.chain(ext.iter().flat_map(|t| t.iter_tokens(arena)))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::Inferred => onetoken(Token::Type, self.region, arena),
|
||||
TypeAnnotation::Wildcard => onetoken(Token::Type, self.region, arena),
|
||||
TypeAnnotation::Where(ty, implements) => (ty.iter_tokens(arena).into_iter())
|
||||
.chain(implements.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
TypeAnnotation::SpaceBefore(ty, _) | TypeAnnotation::SpaceAfter(ty, _) => {
|
||||
Loc::at(self.region, *ty).iter_tokens(arena)
|
||||
}
|
||||
TypeAnnotation::Malformed(_) => bumpvec![in arena;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IterTokens for Loc<AssignedField<'_, T>>
|
||||
where
|
||||
Loc<T>: IterTokens,
|
||||
{
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IterTokens for AssignedField<'_, T>
|
||||
where
|
||||
Loc<T>: IterTokens,
|
||||
{
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
AssignedField::RequiredValue(field, _, ty)
|
||||
| AssignedField::OptionalValue(field, _, ty) => (field_token(field.region, arena)
|
||||
.into_iter())
|
||||
.chain(ty.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
AssignedField::LabelOnly(s) => s.iter_tokens(arena),
|
||||
AssignedField::SpaceBefore(af, _) | AssignedField::SpaceAfter(af, _) => {
|
||||
af.iter_tokens(arena)
|
||||
}
|
||||
AssignedField::Malformed(_) => bumpvec![in arena;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Tag<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Tag<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
Tag::Apply { name, args } => (onetoken(Token::Tag, name.region, arena).into_iter())
|
||||
.chain(args.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Tag::SpaceBefore(t, _) | Tag::SpaceAfter(t, _) => t.iter_tokens(arena),
|
||||
Tag::Malformed(_) => bumpvec![in arena;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for TypeHeader<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self { name, vars } = self;
|
||||
|
||||
(name.iter_tokens(arena).into_iter())
|
||||
.chain(vars.iter().map(|v| v.with_value(Token::Type)))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<ImplementsClause<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for ImplementsClause<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self { var, abilities } = self;
|
||||
(var.iter_tokens(arena).into_iter())
|
||||
.chain(abilities.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Defs<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.defs()
|
||||
.flat_map(|item| match item {
|
||||
Ok(type_def) => type_def.iter_tokens(arena),
|
||||
Err(value_def) => value_def.iter_tokens(arena),
|
||||
})
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for TypeDef<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
TypeDef::Alias { header, ann } => (header.iter_tokens(arena).into_iter())
|
||||
.chain(ann.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
TypeDef::Opaque {
|
||||
header,
|
||||
typ,
|
||||
derived,
|
||||
} => (header.iter_tokens(arena).into_iter())
|
||||
.chain(typ.iter_tokens(arena))
|
||||
.chain(derived.iter().flat_map(|t| t.iter_tokens(arena)))
|
||||
.collect_in(arena),
|
||||
TypeDef::Ability {
|
||||
header: TypeHeader { name, vars },
|
||||
loc_implements,
|
||||
members,
|
||||
} => (onetoken(Token::Ability, name.region, arena).into_iter())
|
||||
.chain(vars.iter().map(|v| v.with_value(Token::Type)))
|
||||
.chain(loc_implements.iter_tokens(arena))
|
||||
.chain(members.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<ImplementsAbilities<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for ImplementsAbilities<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
ImplementsAbilities::Implements(impls) => impls.iter_tokens(arena),
|
||||
ImplementsAbilities::SpaceBefore(i, _) | ImplementsAbilities::SpaceAfter(i, _) => {
|
||||
i.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<ImplementsAbility<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for ImplementsAbility<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
ImplementsAbility::ImplementsAbility { ability, impls } => {
|
||||
(ability.iter_tokens(arena).into_iter())
|
||||
.chain(impls.iter().flat_map(|i| i.iter_tokens(arena)))
|
||||
.collect_in(arena)
|
||||
}
|
||||
ImplementsAbility::SpaceBefore(ia, _) | ImplementsAbility::SpaceAfter(ia, _) => {
|
||||
ia.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<AbilityImpls<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
self.value.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for AbilityImpls<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
AbilityImpls::AbilityImpls(fields) => fields.iter_tokens(arena),
|
||||
AbilityImpls::SpaceBefore(ai, _) | AbilityImpls::SpaceAfter(ai, _) => {
|
||||
ai.iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Implements<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self.value {
|
||||
Implements::Implements => onetoken(Token::Keyword, self.region, arena),
|
||||
Implements::SpaceBefore(i, _) | Implements::SpaceAfter(i, _) => {
|
||||
Loc::at(self.region, *i).iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for AbilityMember<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self { name, typ } = self;
|
||||
(onetoken(Token::Function, name.region, arena).into_iter())
|
||||
.chain(typ.iter_tokens(arena))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for ValueDef<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self {
|
||||
ValueDef::Annotation(pattern, annotation) => (pattern.iter_tokens(arena).into_iter())
|
||||
.chain(annotation.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
ValueDef::Body(pattern, body) => (pattern.iter_tokens(arena).into_iter())
|
||||
.chain(body.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
ValueDef::AnnotatedBody {
|
||||
ann_pattern,
|
||||
ann_type,
|
||||
comment: _,
|
||||
body_pattern,
|
||||
body_expr,
|
||||
} => (ann_pattern.iter_tokens(arena).into_iter())
|
||||
.chain(ann_type.iter_tokens(arena))
|
||||
.chain(body_pattern.iter_tokens(arena))
|
||||
.chain(body_expr.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
ValueDef::Dbg {
|
||||
preceding_comment,
|
||||
condition,
|
||||
}
|
||||
| ValueDef::Expect {
|
||||
preceding_comment,
|
||||
condition,
|
||||
}
|
||||
| ValueDef::ExpectFx {
|
||||
preceding_comment,
|
||||
condition,
|
||||
} => (onetoken(Token::Comment, *preceding_comment, arena).into_iter())
|
||||
.chain(condition.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for &Loc<Expr<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
(**self).iter_tokens(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Expr<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let region = self.region;
|
||||
match self.value {
|
||||
Expr::Float(_) => onetoken(Token::Number, region, arena),
|
||||
Expr::Num(_) => onetoken(Token::Number, region, arena),
|
||||
Expr::NonBase10Int { .. } => onetoken(Token::Number, region, arena),
|
||||
Expr::Str(_) => onetoken(Token::String, region, arena),
|
||||
Expr::SingleQuote(_) => onetoken(Token::String, region, arena),
|
||||
Expr::RecordAccess(rcd, _field) => Loc::at(region, *rcd).iter_tokens(arena),
|
||||
Expr::AccessorFunction(accessor) => Loc::at(region, accessor).iter_tokens(arena),
|
||||
Expr::TupleAccess(tup, _field) => Loc::at(region, *tup).iter_tokens(arena),
|
||||
Expr::List(lst) => lst.iter_tokens(arena),
|
||||
Expr::RecordUpdate { update, fields } => (update.iter_tokens(arena).into_iter())
|
||||
.chain(fields.iter().flat_map(|f| f.iter_tokens(arena)))
|
||||
.collect_in(arena),
|
||||
Expr::Record(rcd) => rcd.iter_tokens(arena),
|
||||
Expr::Tuple(tup) => tup.iter_tokens(arena),
|
||||
Expr::RecordBuilder(rb) => rb.iter_tokens(arena),
|
||||
Expr::IngestedFile(_path, ty) => ty.iter_tokens(arena),
|
||||
Expr::Var { .. } => onetoken(Token::Variable, region, arena),
|
||||
Expr::Underscore(_) => onetoken(Token::Variable, region, arena),
|
||||
Expr::Crash => onetoken(Token::Keyword, region, arena),
|
||||
Expr::Tag(_) => onetoken(Token::Tag, region, arena),
|
||||
Expr::OpaqueRef(_) => onetoken(Token::Type, region, arena),
|
||||
Expr::Closure(patterns, body) => (patterns.iter_tokens(arena).into_iter())
|
||||
.chain(body.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::Defs(defs, exprs) => (defs.iter_tokens(arena).into_iter())
|
||||
.chain(exprs.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::Backpassing(patterns, e1, e2) => (patterns.iter_tokens(arena).into_iter())
|
||||
.chain(e1.iter_tokens(arena))
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::Expect(e1, e2) => (e1.iter_tokens(arena).into_iter())
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::Dbg(e1, e2) => (e1.iter_tokens(arena).into_iter())
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::Apply(e1, e2, _called_via) => (e1.iter_tokens(arena).into_iter())
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::BinOps(e1, e2) => (e1.iter_tokens(arena).into_iter())
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::UnaryOp(e1, op) => (op.iter_tokens(arena).into_iter())
|
||||
.chain(e1.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::If(e1, e2) => (e1.iter_tokens(arena).into_iter())
|
||||
.chain(e2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::When(e, branches) => (e.iter_tokens(arena).into_iter())
|
||||
.chain(branches.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Expr::SpaceBefore(e, _) | Expr::SpaceAfter(e, _) => {
|
||||
Loc::at(region, *e).iter_tokens(arena)
|
||||
}
|
||||
Expr::ParensAround(e) => Loc::at(region, *e).iter_tokens(arena),
|
||||
Expr::MultipleRecordBuilders(e) => e.iter_tokens(arena),
|
||||
Expr::UnappliedRecordBuilder(e) => e.iter_tokens(arena),
|
||||
Expr::MalformedIdent(_, _) | Expr::MalformedClosure | Expr::PrecedenceConflict(_) => {
|
||||
bumpvec![in arena;]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Accessor<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self.value {
|
||||
Accessor::RecordField(_) => onetoken(Token::Function, self.region, arena),
|
||||
Accessor::TupleIndex(_) => onetoken(Token::Function, self.region, arena),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<RecordBuilderField<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
match self.value {
|
||||
RecordBuilderField::Value(field, _, e)
|
||||
| RecordBuilderField::ApplyValue(field, _, _, e) => field_token(field.region, arena)
|
||||
.into_iter()
|
||||
.chain(e.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
RecordBuilderField::LabelOnly(field) => field_token(field.region, arena),
|
||||
RecordBuilderField::SpaceBefore(rbf, _) | RecordBuilderField::SpaceAfter(rbf, _) => {
|
||||
Loc::at(self.region, *rbf).iter_tokens(arena)
|
||||
}
|
||||
RecordBuilderField::Malformed(_) => bumpvec![in arena;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for &WhenBranch<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let WhenBranch {
|
||||
patterns,
|
||||
value,
|
||||
guard,
|
||||
} = self;
|
||||
|
||||
(patterns.iter_tokens(arena).into_iter())
|
||||
.chain(value.iter_tokens(arena))
|
||||
.chain(guard.iter().flat_map(|g| g.iter_tokens(arena)))
|
||||
.collect_in(arena)
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for Loc<Pattern<'_>> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let region = self.region;
|
||||
match self.value {
|
||||
Pattern::Identifier(_) => onetoken(Token::Variable, region, arena),
|
||||
Pattern::Tag(_) => onetoken(Token::Tag, region, arena),
|
||||
Pattern::OpaqueRef(_) => onetoken(Token::Type, region, arena),
|
||||
Pattern::Apply(p1, p2) => (p1.iter_tokens(arena).into_iter())
|
||||
.chain(p2.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Pattern::RecordDestructure(ps) => ps.iter_tokens(arena),
|
||||
Pattern::RequiredField(_field, p) => p.iter_tokens(arena),
|
||||
Pattern::OptionalField(_field, p) => p.iter_tokens(arena),
|
||||
Pattern::NumLiteral(_) => onetoken(Token::Number, region, arena),
|
||||
Pattern::NonBase10Literal { .. } => onetoken(Token::Number, region, arena),
|
||||
Pattern::FloatLiteral(_) => onetoken(Token::Number, region, arena),
|
||||
Pattern::StrLiteral(_) => onetoken(Token::String, region, arena),
|
||||
Pattern::Underscore(_) => onetoken(Token::Variable, region, arena),
|
||||
Pattern::SingleQuote(_) => onetoken(Token::String, region, arena),
|
||||
Pattern::Tuple(ps) => ps.iter_tokens(arena),
|
||||
Pattern::List(ps) => ps.iter_tokens(arena),
|
||||
Pattern::ListRest(None) => bumpvec![in arena;],
|
||||
Pattern::ListRest(Some((_, pas))) => pas.iter_tokens(arena),
|
||||
Pattern::As(p1, pas) => (p1.iter_tokens(arena).into_iter())
|
||||
.chain(pas.iter_tokens(arena))
|
||||
.collect_in(arena),
|
||||
Pattern::SpaceBefore(p, _) | Pattern::SpaceAfter(p, _) => {
|
||||
Loc::at(region, *p).iter_tokens(arena)
|
||||
}
|
||||
Pattern::QualifiedIdentifier { .. } => onetoken(Token::Variable, region, arena),
|
||||
Pattern::Malformed(_) | Pattern::MalformedIdent(_, _) => bumpvec![in arena;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IterTokens for PatternAs<'_> {
|
||||
fn iter_tokens<'a>(&self, arena: &'a Bump) -> BumpVec<'a, Loc<Token>> {
|
||||
let Self {
|
||||
spaces_before: _,
|
||||
identifier,
|
||||
} = self;
|
||||
|
||||
onetoken(Token::Variable, identifier.region, arena)
|
||||
}
|
||||
}
|
|
@ -1,7 +1,9 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use roc_module::symbol::ModuleId;
|
||||
use tower_lsp::lsp_types::{Diagnostic, GotoDefinitionResponse, Hover, Position, TextEdit, Url};
|
||||
use tower_lsp::lsp_types::{
|
||||
Diagnostic, GotoDefinitionResponse, Hover, Position, SemanticTokensResult, TextEdit, Url,
|
||||
};
|
||||
|
||||
use crate::analysis::{AnalyzedDocument, GlobalAnalysis};
|
||||
|
||||
|
@ -74,4 +76,9 @@ impl Registry {
|
|||
let document = self.document_by_url(url)?;
|
||||
document.format()
|
||||
}
|
||||
|
||||
pub fn semantic_tokens(&mut self, url: &Url) -> Option<SemanticTokensResult> {
|
||||
let document = self.document_by_url(url)?;
|
||||
document.semantic_tokens()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use analysis::HIGHLIGHT_TOKENS_LEGEND;
|
||||
use parking_lot::{Mutex, MutexGuard};
|
||||
use registry::{DocumentChange, Registry};
|
||||
use tower_lsp::jsonrpc::Result;
|
||||
|
@ -48,12 +49,25 @@ impl RocLs {
|
|||
work_done_progress: None,
|
||||
},
|
||||
};
|
||||
let semantic_tokens_provider =
|
||||
SemanticTokensServerCapabilities::SemanticTokensOptions(SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: None,
|
||||
},
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: HIGHLIGHT_TOKENS_LEGEND.into(),
|
||||
token_modifiers: vec![],
|
||||
},
|
||||
range: None,
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
});
|
||||
|
||||
ServerCapabilities {
|
||||
text_document_sync: Some(text_document_sync),
|
||||
hover_provider: Some(hover_provider),
|
||||
definition_provider: Some(OneOf::Right(definition_provider)),
|
||||
document_formatting_provider: Some(OneOf::Right(document_formatting_provider)),
|
||||
semantic_tokens_provider: Some(semantic_tokens_provider),
|
||||
..ServerCapabilities::default()
|
||||
}
|
||||
}
|
||||
|
@ -162,6 +176,19 @@ impl LanguageServer for RocLs {
|
|||
|
||||
panic_wrapper(|| self.registry().formatting(&text_document.uri))
|
||||
}
|
||||
|
||||
async fn semantic_tokens_full(
|
||||
&self,
|
||||
params: SemanticTokensParams,
|
||||
) -> Result<Option<SemanticTokensResult>> {
|
||||
let SemanticTokensParams {
|
||||
text_document,
|
||||
work_done_progress_params: _,
|
||||
partial_result_params: _,
|
||||
} = params;
|
||||
|
||||
panic_wrapper(|| self.registry().semantic_tokens(&text_document.uri))
|
||||
}
|
||||
}
|
||||
|
||||
fn panic_wrapper<T>(f: impl FnOnce() -> Option<T> + std::panic::UnwindSafe) -> Result<Option<T>> {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue