mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-27 10:17:15 +00:00
Merge pull request #18977 from ChayimFriedman2/fix-upmapping
fix: Fix missing upmapping in trait impls completion
This commit is contained in:
commit
c78cc2baa9
2 changed files with 68 additions and 13 deletions
|
|
@ -85,7 +85,7 @@ fn complete_trait_impl_name(
|
|||
name: &Option<ast::Name>,
|
||||
kind: ImplCompletionKind,
|
||||
) -> Option<()> {
|
||||
let item = match name {
|
||||
let macro_file_item = match name {
|
||||
Some(name) => name.syntax().parent(),
|
||||
None => {
|
||||
let token = &ctx.token;
|
||||
|
|
@ -96,12 +96,12 @@ fn complete_trait_impl_name(
|
|||
.parent()
|
||||
}
|
||||
}?;
|
||||
let item = ctx.sema.original_syntax_node_rooted(&item)?;
|
||||
let real_file_item = ctx.sema.original_syntax_node_rooted(¯o_file_item)?;
|
||||
// item -> ASSOC_ITEM_LIST -> IMPL
|
||||
let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
|
||||
let impl_def = ast::Impl::cast(macro_file_item.parent()?.parent()?)?;
|
||||
let replacement_range = {
|
||||
// ctx.sema.original_ast_node(item)?;
|
||||
let first_child = item
|
||||
let first_child = real_file_item
|
||||
.children_with_tokens()
|
||||
.find(|child| {
|
||||
!matches!(
|
||||
|
|
@ -109,7 +109,7 @@ fn complete_trait_impl_name(
|
|||
SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| SyntaxElement::Node(item.clone()));
|
||||
.unwrap_or_else(|| SyntaxElement::Node(real_file_item.clone()));
|
||||
|
||||
TextRange::new(first_child.text_range().start(), ctx.source_range().end())
|
||||
};
|
||||
|
|
@ -133,8 +133,11 @@ pub(crate) fn complete_trait_impl_item_by_name(
|
|||
acc,
|
||||
ctx,
|
||||
ImplCompletionKind::All,
|
||||
match name_ref {
|
||||
Some(name) => name.syntax().text_range(),
|
||||
match name_ref
|
||||
.as_ref()
|
||||
.and_then(|name| ctx.sema.original_syntax_node_rooted(name.syntax()))
|
||||
{
|
||||
Some(name) => name.text_range(),
|
||||
None => ctx.source_range(),
|
||||
},
|
||||
impl_,
|
||||
|
|
@ -516,7 +519,7 @@ fn function_declaration(
|
|||
mod tests {
|
||||
use expect_test::expect;
|
||||
|
||||
use crate::tests::{check_edit, check_no_kw};
|
||||
use crate::tests::{check, check_edit, check_no_kw};
|
||||
|
||||
#[test]
|
||||
fn no_completion_inside_fn() {
|
||||
|
|
@ -1639,4 +1642,51 @@ impl DesugaredAsyncTrait for () {
|
|||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn within_attr_macro() {
|
||||
check(
|
||||
r#"
|
||||
//- proc_macros: identity
|
||||
trait Trait {
|
||||
fn foo(&self) {}
|
||||
fn bar(&self) {}
|
||||
fn baz(&self) {}
|
||||
}
|
||||
|
||||
#[proc_macros::identity]
|
||||
impl Trait for () {
|
||||
f$0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
me fn bar(..)
|
||||
me fn baz(..)
|
||||
me fn foo(..)
|
||||
md proc_macros
|
||||
kw crate::
|
||||
kw self::
|
||||
"#]],
|
||||
);
|
||||
check(
|
||||
r#"
|
||||
//- proc_macros: identity
|
||||
trait Trait {
|
||||
fn foo(&self) {}
|
||||
fn bar(&self) {}
|
||||
fn baz(&self) {}
|
||||
}
|
||||
|
||||
#[proc_macros::identity]
|
||||
impl Trait for () {
|
||||
fn $0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
me fn bar(..)
|
||||
me fn baz(..)
|
||||
me fn foo(..)
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
|
|||
use ide_db::{active_parameter::ActiveParameter, RootDatabase};
|
||||
use itertools::Either;
|
||||
use syntax::{
|
||||
algo::{ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
|
||||
algo::{self, ancestors_at_offset, find_node_at_offset, non_trivia_sibling},
|
||||
ast::{
|
||||
self, AttrKind, HasArgList, HasGenericArgs, HasGenericParams, HasLoopBody, HasName,
|
||||
NameOrNameRef,
|
||||
|
|
@ -85,6 +85,11 @@ pub(super) fn expand_and_analyze(
|
|||
})
|
||||
}
|
||||
|
||||
fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Option<SyntaxToken> {
|
||||
let token = file.token_at_offset(offset).left_biased()?;
|
||||
algo::skip_whitespace_token(token, Direction::Prev)
|
||||
}
|
||||
|
||||
/// Expand attributes and macro calls at the current cursor position for both the original file
|
||||
/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
|
||||
/// and speculative states stay in sync.
|
||||
|
|
@ -125,9 +130,7 @@ fn expand(
|
|||
|
||||
// Left biased since there may already be an identifier token there, and we appended to it.
|
||||
if !sema.might_be_inside_macro_call(&fake_ident_token)
|
||||
&& original_file
|
||||
.token_at_offset(original_offset + relative_offset)
|
||||
.left_biased()
|
||||
&& token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
|
||||
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
|
||||
{
|
||||
// Recursion base case.
|
||||
|
|
@ -143,9 +146,11 @@ fn expand(
|
|||
|
||||
let parent_item =
|
||||
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
|
||||
let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
|
||||
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
|
||||
let ancestor_items = iter::successors(
|
||||
Option::zip(
|
||||
find_node_at_offset::<ast::Item>(&original_file, original_offset),
|
||||
original_node,
|
||||
find_node_at_offset::<ast::Item>(
|
||||
&speculative_file,
|
||||
fake_ident_token.text_range().start(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue