Speculatively expand attributes in completions

This commit is contained in:
Lukas Wirth 2021-09-02 18:54:09 +02:00
parent c3eb646487
commit 2b907652ee
7 changed files with 238 additions and 66 deletions

View file

@ -210,6 +210,7 @@ impl ChangeFixture {
let proc_lib_file = file_id; let proc_lib_file = file_id;
file_id.0 += 1; file_id.0 += 1;
let (proc_macro, source) = test_proc_macros(&proc_macros);
let mut fs = FileSet::default(); let mut fs = FileSet::default();
fs.insert( fs.insert(
proc_lib_file, proc_lib_file,
@ -217,7 +218,7 @@ impl ChangeFixture {
); );
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
change.change_file(proc_lib_file, Some(Arc::new(String::new()))); change.change_file(proc_lib_file, Some(Arc::new(String::from(source))));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();
@ -228,7 +229,7 @@ impl ChangeFixture {
CfgOptions::default(), CfgOptions::default(),
CfgOptions::default(), CfgOptions::default(),
Env::default(), Env::default(),
test_proc_macros(&proc_macros), proc_macro,
); );
for krate in all_crates { for krate in all_crates {
@ -250,14 +251,22 @@ impl ChangeFixture {
} }
} }
fn test_proc_macros(proc_macros: &[String]) -> Vec<ProcMacro> { fn test_proc_macros(proc_macros: &[String]) -> (Vec<ProcMacro>, String) {
std::array::IntoIter::new([ProcMacro { // The source here is only required so that paths to the macros exist and are resolvable.
let source = r#"
#[proc_macro_attribute]
pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
item
}
"#;
let proc_macros = std::array::IntoIter::new([ProcMacro {
name: "identity".into(), name: "identity".into(),
kind: crate::ProcMacroKind::Attr, kind: crate::ProcMacroKind::Attr,
expander: Arc::new(IdentityProcMacroExpander), expander: Arc::new(IdentityProcMacroExpander),
}]) }])
.filter(|pm| proc_macros.iter().any(|name| name == &pm.name)) .filter(|pm| proc_macros.iter().any(|name| name == &pm.name))
.collect() .collect();
(proc_macros, source.into())
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]

View file

@ -166,6 +166,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map) self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
} }
pub fn speculative_expand_attr_macro(
&self,
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
}
// FIXME: Rename to descend_into_macros_single // FIXME: Rename to descend_into_macros_single
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros(token).pop().unwrap() self.imp.descend_into_macros(token).pop().unwrap()
@ -452,7 +461,24 @@ impl<'db> SemanticsImpl<'db> {
hir_expand::db::expand_speculative( hir_expand::db::expand_speculative(
self.db.upcast(), self.db.upcast(),
macro_call_id, macro_call_id,
speculative_args, speculative_args.syntax(),
token_to_map,
)
}
fn speculative_expand_attr(
&self,
actual_macro_call: &ast::Item,
speculative_args: &ast::Item,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let sa = self.analyze(actual_macro_call.syntax());
let macro_call = InFile::new(sa.file_id, actual_macro_call.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
hir_expand::db::expand_speculative(
self.db.upcast(),
macro_call_id,
speculative_args.syntax(),
token_to_map, token_to_map,
) )
} }

View file

@ -141,27 +141,40 @@ pub trait AstDatabase: SourceDatabase {
pub fn expand_speculative( pub fn expand_speculative(
db: &dyn AstDatabase, db: &dyn AstDatabase,
actual_macro_call: MacroCallId, actual_macro_call: MacroCallId,
speculative_args: &ast::TokenTree, speculative_args: &SyntaxNode,
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax()); let loc = db.lookup_intern_macro(actual_macro_call);
let range = let macro_def = db.macro_def(loc.def)?;
token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
let token_id = tmap_1.token_by_range(range)?;
let macro_def = { // Fetch token id in the speculative args
let loc: MacroCallLoc = db.lookup_intern_macro(actual_macro_call); let censor = censor_for_macro_input(&loc, &speculative_args);
db.macro_def(loc.def)? let (tt, args_tmap) = mbe::syntax_node_to_token_tree_censored(&speculative_args, censor);
let range = token_to_map.text_range().checked_sub(speculative_args.text_range().start())?;
let token_id = args_tmap.token_by_range(range)?;
let speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
let attr_arg = match &loc.kind {
// FIXME make attr arg speculative as well
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(&tt).shift_all(&mut attr_args);
Some(attr_args)
}
_ => None,
};
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
} else {
macro_def.expand(db, actual_macro_call, &tt)
}; };
let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
let expand_to = macro_expand_to(db, actual_macro_call); let expand_to = macro_expand_to(db, actual_macro_call);
let (node, rev_tmap) =
let (node, tmap_2) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to).ok()?; token_tree_to_syntax_node(&speculative_expansion.value, expand_to).ok()?;
let token_id = macro_def.map_id_down(token_id); let token_id = macro_def.map_id_down(token_id);
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?; let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
let token = node.syntax_node().covering_element(range).into_token()?; let token = node.syntax_node().covering_element(range).into_token()?;
Some((node.syntax_node(), token)) Some((node.syntax_node(), token))
} }
@ -259,7 +272,19 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
let loc = db.lookup_intern_macro(id); let loc = db.lookup_intern_macro(id);
let node = SyntaxNode::new_root(arg); let node = SyntaxNode::new_root(arg);
let censor = match loc.kind { let censor = censor_for_macro_input(&loc, &node);
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = None;
}
Some(Arc::new((tt, tmap)))
}
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Option<TextRange> {
match loc.kind {
MacroCallKind::FnLike { .. } => None, MacroCallKind::FnLike { .. } => None,
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) { MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => item Some(item) => item
@ -275,15 +300,7 @@ fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree,
} }
None => None, None => None,
}, },
};
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = None;
} }
Some(Arc::new((tt, tmap)))
} }
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
@ -367,11 +384,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Ar
None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()), None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()),
}; };
let macro_rules = match db.macro_def(loc.def) { let expander = match db.macro_def(loc.def) {
Some(it) => it, Some(it) => it,
None => return ExpandResult::str_err("Failed to find macro definition".into()), None => return ExpandResult::str_err("Failed to find macro definition".into()),
}; };
let ExpandResult { value: tt, err } = macro_rules.expand(db, id, &macro_arg.0); let ExpandResult { value: tt, err } = expander.expand(db, id, &macro_arg.0);
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
let count = tt.count(); let count = tt.count();
// XXX: Make ExpandResult a real error and use .map_err instead? // XXX: Make ExpandResult a real error and use .map_err instead?

View file

@ -370,6 +370,7 @@ impl ExpansionInfo {
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> { ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let token_id = if let Some(item) = item { let token_id = if let Some(item) = item {
// check if we are mapping down in an attribute input
let call_id = match self.expanded.file_id.0 { let call_id = match self.expanded.file_id.0 {
HirFileIdRepr::FileId(_) => return None, HirFileIdRepr::FileId(_) => return None,
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id, HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id,

View file

@ -175,48 +175,90 @@ impl<'a> CompletionContext<'a> {
incomplete_let: false, incomplete_let: false,
no_completion_required: false, no_completion_required: false,
}; };
ctx.expand_and_fill(
let mut original_file = original_file.syntax().clone(); original_file.syntax().clone(),
let mut speculative_file = file_with_fake_ident.syntax().clone(); file_with_fake_ident.syntax().clone(),
let mut offset = position.offset; position.offset,
let mut fake_ident_token = fake_ident_token; fake_ident_token,
);
// Are we inside a macro call? Some(ctx)
while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( }
find_node_at_offset::<ast::MacroCall>(&original_file, offset), fn expand_and_fill(
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset), &mut self,
) { mut original_file: SyntaxNode,
if actual_macro_call.path().as_ref().map(|s| s.syntax().text()) mut speculative_file: SyntaxNode,
!= macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()) mut offset: TextSize,
{ mut fake_ident_token: SyntaxToken,
break; ) {
} loop {
let speculative_args = match macro_call_with_fake_ident.token_tree() { if let (Some(actual_item), Some(item_with_fake_ident)) = (
Some(tt) => tt, find_node_at_offset::<ast::Item>(&original_file, offset),
None => break, find_node_at_offset::<ast::Item>(&speculative_file, offset),
};
if let (Some(actual_expansion), Some(speculative_expansion)) = (
ctx.sema.expand(&actual_macro_call),
ctx.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token,
),
) { ) {
let new_offset = speculative_expansion.1.text_range().start(); match (
if new_offset > actual_expansion.text_range().end() { self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
&actual_item,
&item_with_fake_ident,
fake_ident_token.clone(),
),
) {
(Some(actual_expansion), Some(speculative_expansion)) => {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
continue;
}
(None, None) => (),
_ => break,
}
}
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
if mac_call_path0 != mac_call_path1 {
break;
}
let speculative_args = match macro_call_with_fake_ident.token_tree() {
Some(tt) => tt,
None => break,
};
if let (Some(actual_expansion), Some(speculative_expansion)) = (
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token,
),
) {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
} else {
break; break;
} }
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
} else { } else {
break; break;
} }
} }
ctx.fill(&original_file, speculative_file, offset);
Some(ctx) self.fill(&original_file, speculative_file, offset);
} }
/// Checks whether completions in that particular case don't make much sense. /// Checks whether completions in that particular case don't make much sense.

View file

@ -15,6 +15,7 @@ mod item_list;
mod item; mod item;
mod pattern; mod pattern;
mod predicate; mod predicate;
mod proc_macros;
mod record; mod record;
mod sourcegen; mod sourcegen;
mod type_pos; mod type_pos;
@ -23,7 +24,7 @@ mod visibility;
use std::mem; use std::mem;
use hir::{PrefixKind, Semantics}; use hir::{db::DefDatabase, PrefixKind, Semantics};
use ide_db::{ use ide_db::{
base_db::{fixture::ChangeFixture, FileLoader, FilePosition}, base_db::{fixture::ChangeFixture, FileLoader, FilePosition},
helpers::{ helpers::{
@ -96,6 +97,7 @@ fn completion_list_with_config(config: CompletionConfig, ra_fixture: &str) -> St
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) { pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
let change_fixture = ChangeFixture::parse(ra_fixture); let change_fixture = ChangeFixture::parse(ra_fixture);
let mut database = RootDatabase::default(); let mut database = RootDatabase::default();
database.set_enable_proc_attr_macros(true);
database.apply_change(change_fixture.change); database.apply_change(change_fixture.change);
let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)"); let (file_id, range_or_offset) = change_fixture.file_position.expect("expected a marker ($0)");
let offset = range_or_offset.expect_offset(); let offset = range_or_offset.expect_offset();

View file

@ -0,0 +1,75 @@
//! Completion tests for expressions.
use expect_test::{expect, Expect};
use crate::tests::completion_list;
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual)
}
#[test]
fn complete_dot_in_attr() {
check(
r#"
//- proc_macros: identity
pub struct Foo;
impl Foo {
fn foo(&self) {}
}
#[proc_macros::identity]
fn main() {
Foo.$0
}
"#,
expect![[r#"
me foo() fn(&self)
sn ref &expr
sn refm &mut expr
sn match match expr {}
sn box Box::new(expr)
sn ok Ok(expr)
sn err Err(expr)
sn some Some(expr)
sn dbg dbg!(expr)
sn dbgr dbg!(&expr)
sn call function(expr)
sn let let
sn letm let mut
"#]],
)
}
#[test]
fn complete_dot_in_attr2() {
check(
r#"
//- proc_macros: identity
pub struct Foo;
impl Foo {
fn foo(&self) {}
}
#[proc_macros::identity]
fn main() {
Foo.f$0
}
"#,
expect![[r#"
me foo() fn(&self)
sn ref &expr
sn refm &mut expr
sn match match expr {}
sn box Box::new(expr)
sn ok Ok(expr)
sn err Err(expr)
sn some Some(expr)
sn dbg dbg!(expr)
sn dbgr dbg!(&expr)
sn call function(expr)
sn let let
sn letm let mut
"#]],
)
}