11664: fix: Properly handle proc-macro crate types for nameres r=Veykril a=Veykril

bors r+

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2022-03-09 21:45:42 +00:00 committed by GitHub
commit 5b51cb835a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 219 additions and 150 deletions

View file

@ -154,18 +154,21 @@ impl RawAttrs {
return smallvec![attr.clone()];
}
let subtree = match attr.input.as_deref() {
Some(AttrInput::TokenTree(it, _)) => it,
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
};
// Input subtree is: `(cfg, $(attr),+)`
// Split it up into a `cfg` subtree and the `attr` subtrees.
// FIXME: There should be a common API for this.
let mut parts = subtree.token_trees.split(
|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ','),
);
let cfg = parts.next().unwrap();
let mut parts = subtree.token_trees.split(|tt| {
matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))
});
let cfg = match parts.next() {
Some(it) => it,
None => return smallvec![],
};
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
let cfg = CfgExpr::parse(&cfg);
let index = attr.id;
@ -259,17 +262,8 @@ impl Attrs {
}
pub fn docs(&self) -> Option<Documentation> {
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(s) => Some(s),
AttrInput::TokenTree(..) => None,
});
let indent = docs
.clone()
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0);
let docs = self.by_key("doc").attrs().filter_map(|attr| attr.string_value());
let indent = doc_indent(self);
let mut buf = String::new();
for doc in docs {
// str::lines doesn't yield anything for the empty string
@ -507,18 +501,9 @@ impl AttrsWithOwner {
&self,
db: &dyn DefDatabase,
) -> Option<(Documentation, DocsRangeMap)> {
// FIXME: code duplication in `docs` above
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(s) => Some((s, attr.id)),
AttrInput::TokenTree(..) => None,
});
let indent = docs
.clone()
.flat_map(|(s, _)| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0);
let docs =
self.by_key("doc").attrs().filter_map(|attr| attr.string_value().map(|s| (s, attr.id)));
let indent = doc_indent(self);
let mut buf = String::new();
let mut mapping = Vec::new();
for (doc, idx) in docs {
@ -557,6 +542,18 @@ impl AttrsWithOwner {
}
}
fn doc_indent(attrs: &Attrs) -> usize {
attrs
.by_key("doc")
.attrs()
.filter_map(|attr| attr.string_value())
.flat_map(|s| s.lines())
.filter(|line| !line.chars().all(|c| c.is_whitespace()))
.map(|line| line.chars().take_while(|c| c.is_whitespace()).count())
.min()
.unwrap_or(0)
}
fn inner_attributes(
syntax: &SyntaxNode,
) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
@ -773,45 +770,58 @@ impl Attr {
Self::from_src(db, ast, hygiene, id)
}
pub fn path(&self) -> &ModPath {
&self.path
}
}
impl Attr {
/// #[path = "string"]
pub fn string_value(&self) -> Option<&SmolStr> {
match self.input.as_deref()? {
AttrInput::Literal(it) => Some(it),
_ => None,
}
}
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
_ => None,
}
}
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
AttrInput::TokenTree(subtree, _) => Some(subtree),
_ => None,
}
}
/// Parses this attribute as a token tree consisting of comma separated paths.
pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
let args = match self.input.as_deref() {
Some(AttrInput::TokenTree(args, _)) => args,
_ => return None,
};
let args = self.token_tree_value()?;
if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) {
return None;
}
let paths = args
.token_trees
.iter()
.group_by(|tt| {
matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))
})
.into_iter()
.filter(|(comma, _)| !*comma)
.map(|(_, tts)| {
let segments = tts.filter_map(|tt| match tt {
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
.map(|tts| {
let segments = tts.iter().filter_map(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
_ => None,
});
ModPath::from_segments(PathKind::Plain, segments)
})
.collect::<Vec<_>>();
});
Some(paths.into_iter())
}
pub fn path(&self) -> &ModPath {
&self.path
}
pub fn string_value(&self) -> Option<&SmolStr> {
match self.input.as_deref()? {
AttrInput::Literal(it) => Some(it),
_ => None,
}
Some(paths)
}
}
@ -823,17 +833,11 @@ pub struct AttrQuery<'attr> {
impl<'attr> AttrQuery<'attr> {
pub fn tt_values(self) -> impl Iterator<Item = &'attr Subtree> {
self.attrs().filter_map(|attr| match attr.input.as_deref()? {
AttrInput::TokenTree(it, _) => Some(it),
_ => None,
})
self.attrs().filter_map(|attr| attr.token_tree_value())
}
pub fn string_value(self) -> Option<&'attr SmolStr> {
self.attrs().find_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(it) => Some(it),
_ => None,
})
self.attrs().find_map(|attr| attr.string_value())
}
pub fn exists(self) -> bool {

View file

@ -22,10 +22,10 @@ use itertools::Itertools;
use la_arena::Idx;
use limit::Limit;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ast;
use syntax::{ast, SmolStr};
use crate::{
attr::{Attr, AttrId, AttrInput, Attrs},
attr::{Attr, AttrId, Attrs},
attr_macro_as_call_id,
db::DefDatabase,
derive_macro_as_call_id,
@ -61,7 +61,8 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
let mut deps = FxHashMap::default();
// populate external prelude and dependency list
for dep in &crate_graph[def_map.krate].dependencies {
let krate = &crate_graph[def_map.krate];
for dep in &krate.dependencies {
tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
let dep_def_map = db.crate_def_map(dep.crate_id);
let dep_root = dep_def_map.module_id(dep_def_map.root);
@ -73,9 +74,9 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
}
}
let cfg_options = &crate_graph[def_map.krate].cfg_options;
let proc_macros = &crate_graph[def_map.krate].proc_macro;
let proc_macros = proc_macros
let cfg_options = &krate.cfg_options;
let proc_macros = krate
.proc_macro
.iter()
.enumerate()
.map(|(idx, it)| {
@ -87,6 +88,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
)
})
.collect();
let is_proc_macro = krate.is_proc_macro;
let mut collector = DefCollector {
db,
@ -103,6 +105,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
from_glob_import: Default::default(),
skip_attrs: Default::default(),
derive_helpers_in_scope: Default::default(),
is_proc_macro,
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@ -243,6 +246,7 @@ struct DefCollector<'a> {
/// empty when proc. macro support is disabled (in which case we still do name resolution for
/// them).
proc_macros: Vec<(Name, ProcMacroExpander)>,
is_proc_macro: bool,
exports_proc_macros: bool,
from_glob_import: PerNsGlobImports,
/// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
@ -277,27 +281,29 @@ impl DefCollector<'_> {
};
if *attr_name == hir_expand::name![recursion_limit] {
if let Some(input) = &attr.input {
if let AttrInput::Literal(limit) = &**input {
if let Ok(limit) = limit.parse() {
self.def_map.recursion_limit = Some(limit);
}
if let Some(limit) = attr.string_value() {
if let Ok(limit) = limit.parse() {
self.def_map.recursion_limit = Some(limit);
}
}
continue;
}
if *attr_name == hir_expand::name![crate_type] {
if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
self.is_proc_macro = true;
}
continue;
}
let attr_is_register_like = *attr_name == hir_expand::name![register_attr]
|| *attr_name == hir_expand::name![register_tool];
if !attr_is_register_like {
continue;
}
let registered_name = match attr.input.as_deref() {
Some(AttrInput::TokenTree(subtree, _)) => match &*subtree.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(name))] => name.as_name(),
_ => continue,
},
let registered_name = match attr.single_ident_value() {
Some(ident) => ident.as_name(),
_ => continue,
};
@ -404,8 +410,7 @@ impl DefCollector<'_> {
}
self.unresolved_imports = unresolved_imports;
// FIXME: This condition should instead check if this is a `proc-macro` type crate.
if self.exports_proc_macros {
if self.is_proc_macro {
// A crate exporting procedural macros is not allowed to export anything else.
//
// Additionally, while the proc macro entry points must be `pub`, they are not publicly
@ -1555,22 +1560,21 @@ impl ModCollector<'_, '_> {
let fn_id =
FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
let is_proc_macro = attrs.parse_proc_macro_decl(&it.name);
let vis = match is_proc_macro {
Some(proc_macro) => {
// FIXME: this should only be done in the root module of `proc-macro` crates, not everywhere
let module_id = def_map.module_id(def_map.root());
self.def_collector.export_proc_macro(
proc_macro,
ItemTreeId::new(self.tree_id, id),
fn_id,
module_id,
);
Visibility::Module(module_id)
let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
if self.def_collector.is_proc_macro {
if self.module_id == def_map.root {
if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
let crate_root = def_map.module_id(def_map.root);
self.def_collector.export_proc_macro(
proc_macro,
ItemTreeId::new(self.tree_id, id),
fn_id,
crate_root,
);
}
}
None => resolve_vis(def_map, &self.item_tree[it.visibility]),
};
}
update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
}
ModItem::Struct(id) => {
@ -2099,6 +2103,7 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
derive_helpers_in_scope: Default::default(),
is_proc_macro: false,
};
collector.seed_with_top_level();
collector.collect();

View file

@ -809,6 +809,7 @@ pub macro derive($item:item) {}
struct S;
//- /proc.rs crate:proc
#![crate_type="proc-macro"]
#[proc_macro_derive(Derive, attributes(helper))]
fn derive() {}
"#,
@ -886,29 +887,30 @@ indirect_macro!();
#[test]
fn resolves_proc_macros() {
check(
r"
struct TokenStream;
r#"
#![crate_type="proc-macro"]
struct TokenStream;
#[proc_macro]
pub fn function_like_macro(args: TokenStream) -> TokenStream {
args
}
#[proc_macro]
pub fn function_like_macro(args: TokenStream) -> TokenStream {
args
}
#[proc_macro_attribute]
pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
item
}
#[proc_macro_attribute]
pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
item
}
#[proc_macro_derive(DummyTrait)]
pub fn derive_macro(_item: TokenStream) -> TokenStream {
TokenStream
}
#[proc_macro_derive(DummyTrait)]
pub fn derive_macro(_item: TokenStream) -> TokenStream {
TokenStream
}
#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
TokenStream
}
",
#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
TokenStream
}
"#,
expect![[r#"
crate
AnotherTrait: m
@ -927,33 +929,34 @@ fn proc_macro_censoring() {
// Make sure that only proc macros are publicly exported from proc-macro crates.
check(
r"
//- /main.rs crate:main deps:macros
pub use macros::*;
r#"
//- /main.rs crate:main deps:macros
pub use macros::*;
//- /macros.rs crate:macros
pub struct TokenStream;
//- /macros.rs crate:macros
#![crate_type="proc-macro"]
pub struct TokenStream;
#[proc_macro]
pub fn function_like_macro(args: TokenStream) -> TokenStream {
args
}
#[proc_macro]
pub fn function_like_macro(args: TokenStream) -> TokenStream {
args
}
#[proc_macro_attribute]
pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
item
}
#[proc_macro_attribute]
pub fn attribute_macro(_args: TokenStream, item: TokenStream) -> TokenStream {
item
}
#[proc_macro_derive(DummyTrait)]
pub fn derive_macro(_item: TokenStream) -> TokenStream {
TokenStream
}
#[proc_macro_derive(DummyTrait)]
pub fn derive_macro(_item: TokenStream) -> TokenStream {
TokenStream
}
#[macro_export]
macro_rules! mbe {
() => {};
}
",
#[macro_export]
macro_rules! mbe {
() => {};
}
"#,
expect![[r#"
crate
DummyTrait: m
@ -966,14 +969,15 @@ fn proc_macro_censoring() {
#[test]
fn collects_derive_helpers() {
let def_map = compute_crate_def_map(
r"
struct TokenStream;
r#"
#![crate_type="proc-macro"]
struct TokenStream;
#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
TokenStream
}
",
#[proc_macro_derive(AnotherTrait, attributes(helper_attr))]
pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
TokenStream
}
"#,
);
assert_eq!(def_map.exported_derives.len(), 1);