mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-11-01 12:24:29 +00:00
Auto merge of #134681 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
485f5e80e6
113 changed files with 3098 additions and 2167 deletions
|
|
@ -153,13 +153,13 @@ fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId {
|
|||
/// This expands the given macro call, but with different arguments. This is
|
||||
/// used for completion, where we want to see what 'would happen' if we insert a
|
||||
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
||||
/// token returned.
|
||||
/// token(s) returned with their priority.
|
||||
pub fn expand_speculative(
|
||||
db: &dyn ExpandDatabase,
|
||||
actual_macro_call: MacroCallId,
|
||||
speculative_args: &SyntaxNode,
|
||||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
|
||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||
let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
|
||||
|
||||
|
|
@ -303,17 +303,19 @@ pub fn expand_speculative(
|
|||
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
|
||||
|
||||
let syntax_node = node.syntax_node();
|
||||
let (token, _) = rev_tmap
|
||||
let token = rev_tmap
|
||||
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
|
||||
.filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
|
||||
.min_by_key(|(t, ctx)| {
|
||||
.map(|(t, ctx)| {
|
||||
// prefer tokens of the same kind and text, as well as non opaque marked ones
|
||||
// Note the inversion of the score here, as we want to prefer the first token in case
|
||||
// of all tokens having the same score
|
||||
ctx.is_opaque(db) as u8
|
||||
let ranking = ctx.is_opaque(db) as u8
|
||||
+ 2 * (t.kind() != token_to_map.kind()) as u8
|
||||
+ 4 * ((t.text() != token_to_map.text()) as u8)
|
||||
})?;
|
||||
+ 4 * ((t.text() != token_to_map.text()) as u8);
|
||||
(t, ranking)
|
||||
})
|
||||
.collect();
|
||||
Some((node.syntax_node(), token))
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue