Semantic Ranges

This commit is contained in:
Jeremy Kolb 2020-02-25 08:38:50 -05:00 committed by kjeremy
parent d3040c0deb
commit 8f6f864547
6 changed files with 92 additions and 16 deletions

View file

@ -430,6 +430,13 @@ impl Analysis {
self.with_db(|db| syntax_highlighting::highlight(db, file_id)) self.with_db(|db| syntax_highlighting::highlight(db, file_id))
} }
/// Computes syntax highlighting for the given file range.
pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
self.with_db(|db| {
syntax_highlighting::highlight_range(db, frange.file_id, Some(frange.range))
})
}
/// Computes syntax highlighting for the given file. /// Computes syntax highlighting for the given file.
pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable<String> { pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable<String> {
self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))

View file

@ -5,8 +5,8 @@ use ra_db::SourceDatabase;
use ra_ide_db::{defs::NameDefinition, RootDatabase}; use ra_ide_db::{defs::NameDefinition, RootDatabase};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
WalkEvent, T, TextRange, WalkEvent, T,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -69,6 +69,16 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
let _p = profile("highlight"); let _p = profile("highlight");
highlight_range(db, file_id, None)
}
pub(crate) fn highlight_range(
db: &RootDatabase,
file_id: FileId,
range: Option<TextRange>,
) -> Vec<HighlightedRange> {
let _p = profile("highlight_range");
let parse = db.parse(file_id); let parse = db.parse(file_id);
let root = parse.tree().syntax().clone(); let root = parse.tree().syntax().clone();
@ -79,6 +89,15 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
let mut in_macro_call = None; let mut in_macro_call = None;
// Determine the root based on the range
let root = match range {
Some(range) => match root.covering_element(range) {
NodeOrToken::Node(node) => node,
NodeOrToken::Token(token) => token.parent(),
},
None => root,
};
for event in root.preorder_with_tokens() { for event in root.preorder_with_tokens() {
match event { match event {
WalkEvent::Enter(node) => match node.kind() { WalkEvent::Enter(node) => match node.kind() {
@ -374,7 +393,10 @@ mod tests {
use test_utils::{assert_eq_text, project_dir, read_text}; use test_utils::{assert_eq_text, project_dir, read_text};
use crate::mock_analysis::{single_file, MockAnalysis}; use crate::{
mock_analysis::{single_file, MockAnalysis},
FileRange, TextRange,
};
#[test] #[test]
fn test_highlighting() { fn test_highlighting() {
@ -475,4 +497,25 @@ fn bar() {
let _ = host.analysis().highlight(file_id).unwrap(); let _ = host.analysis().highlight(file_id).unwrap();
// eprintln!("elapsed: {:?}", t.elapsed()); // eprintln!("elapsed: {:?}", t.elapsed());
} }
#[test]
fn test_ranges() {
let (analysis, file_id) = single_file(
r#"
#[derive(Clone, Debug)]
struct Foo {
pub x: i32,
pub y: i32,
}"#,
);
let highlights = &analysis
.highlight_range(FileRange {
file_id,
range: TextRange::offset_len(82.into(), 1.into()), // "x"
})
.unwrap();
assert_eq!(highlights[0].tag, "field");
}
} }

View file

@ -7,9 +7,9 @@ use lsp_types::{
CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend, SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions, WorkDoneProgressOptions,
}; };
pub fn server_capabilities() -> ServerCapabilities { pub fn server_capabilities() -> ServerCapabilities {
@ -60,7 +60,7 @@ pub fn server_capabilities() -> ServerCapabilities {
execute_command_provider: None, execute_command_provider: None,
workspace: None, workspace: None,
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions( semantic_tokens_provider: Some(
SemanticTokensOptions { SemanticTokensOptions {
legend: SemanticTokensLegend { legend: SemanticTokensLegend {
token_types: semantic_tokens::supported_token_types().iter().cloned().collect(), token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
@ -71,9 +71,11 @@ pub fn server_capabilities() -> ServerCapabilities {
}, },
document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
..SemanticTokensOptions::default() range_provider: Some(true),
}, work_done_progress_options: Default::default(),
)), }
.into(),
),
experimental: Default::default(), experimental: Default::default(),
} }
} }

View file

@ -527,8 +527,9 @@ fn on_request(
.on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
.on::<req::Ssr>(handlers::handle_ssr)?
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)? .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
.on::<req::Ssr>(handlers::handle_ssr)?
.finish(); .finish();
Ok(()) Ok(())
} }

View file

@ -17,8 +17,8 @@ use lsp_types::{
Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType, PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation, SemanticTokens, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
TextDocumentIdentifier, TextEdit, WorkspaceEdit, SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit,
}; };
use ra_ide::{ use ra_ide::{
AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
@ -1092,3 +1092,25 @@ pub fn handle_semantic_tokens(
Ok(Some(tokens.into())) Ok(Some(tokens.into()))
} }
pub fn handle_semantic_tokens_range(
world: WorldSnapshot,
params: SemanticTokensRangeParams,
) -> Result<Option<SemanticTokensRangeResult>> {
let _p = profile("handle_semantic_tokens_range");
let frange = (&params.text_document, params.range).try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(frange.file_id)?;
let mut builder = SemanticTokensBuilder::default();
for h in world.analysis().highlight_range(frange)?.into_iter() {
let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
let (token_type, token_modifiers) = type_and_modifiers.conv();
builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
}
let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
Ok(Some(tokens.into()))
}

View file

@ -12,9 +12,10 @@ pub use lsp_types::{
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities, SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit,
WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
}; };
pub enum AnalyzerStatus {} pub enum AnalyzerStatus {}