mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 06:11:35 +00:00
Auto merge of #16789 - wyatt-herkamp:cfg_attr, r=Veykril
cfg Attribute Stripping for Proc Macro Expansion
This will attempt to process cfg attributes and cfg_attr attributes for proc macro expansion.

Closes #8434 , #11657, and #13904
This commit is contained in:
commit
d3e6fea315
8 changed files with 486 additions and 23 deletions
|
@ -31,4 +31,4 @@ mbe.workspace = true
|
||||||
syntax.workspace = true
|
syntax.workspace = true
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
@ -47,6 +47,7 @@ impl CfgExpr {
|
||||||
pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr {
|
pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr {
|
||||||
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
|
next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
|
/// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
|
||||||
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
|
pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> {
|
||||||
match self {
|
match self {
|
||||||
|
@ -62,7 +63,6 @@ impl CfgExpr {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
|
fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
|
||||||
let name = match it.next() {
|
let name = match it.next() {
|
||||||
None => return None,
|
None => return None,
|
||||||
|
|
|
@ -528,3 +528,121 @@ impl < > $crate::fmt::Debug for Command< > where {
|
||||||
}"#]],
|
}"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_debug_expand_with_cfg() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: derive, fmt
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct HideAndShow {
|
||||||
|
#[cfg(never)]
|
||||||
|
always_hide: u32,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
always_show: u32,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum HideAndShowEnum {
|
||||||
|
#[cfg(never)]
|
||||||
|
AlwaysHide,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
AlwaysShow{
|
||||||
|
#[cfg(never)]
|
||||||
|
always_hide: u32,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
always_show: u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct HideAndShow {
|
||||||
|
#[cfg(never)]
|
||||||
|
always_hide: u32,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
always_show: u32,
|
||||||
|
}
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum HideAndShowEnum {
|
||||||
|
#[cfg(never)]
|
||||||
|
AlwaysHide,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
AlwaysShow{
|
||||||
|
#[cfg(never)]
|
||||||
|
always_hide: u32,
|
||||||
|
#[cfg(not(never))]
|
||||||
|
always_show: u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl < > $crate::fmt::Debug for HideAndShow< > where {
|
||||||
|
fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result {
|
||||||
|
match self {
|
||||||
|
HideAndShow {
|
||||||
|
always_show: always_show,
|
||||||
|
}
|
||||||
|
=>f.debug_struct("HideAndShow").field("always_show", &always_show).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl < > $crate::fmt::Debug for HideAndShowEnum< > where {
|
||||||
|
fn fmt(&self , f: &mut $crate::fmt::Formatter) -> $crate::fmt::Result {
|
||||||
|
match self {
|
||||||
|
HideAndShowEnum::AlwaysShow {
|
||||||
|
always_show: always_show,
|
||||||
|
}
|
||||||
|
=>f.debug_struct("AlwaysShow").field("always_show", &always_show).finish(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_default_expand_with_cfg() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore: derive, default
|
||||||
|
#[derive(Default)]
|
||||||
|
struct Foo {
|
||||||
|
field1: i32,
|
||||||
|
#[cfg(never)]
|
||||||
|
field2: (),
|
||||||
|
}
|
||||||
|
#[derive(Default)]
|
||||||
|
enum Bar {
|
||||||
|
Foo,
|
||||||
|
#[cfg_attr(not(never), default)]
|
||||||
|
Bar,
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
#[derive(Default)]
|
||||||
|
struct Foo {
|
||||||
|
field1: i32,
|
||||||
|
#[cfg(never)]
|
||||||
|
field2: (),
|
||||||
|
}
|
||||||
|
#[derive(Default)]
|
||||||
|
enum Bar {
|
||||||
|
Foo,
|
||||||
|
#[cfg_attr(not(never), default)]
|
||||||
|
Bar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl < > $crate::default::Default for Foo< > where {
|
||||||
|
fn default() -> Self {
|
||||||
|
Foo {
|
||||||
|
field1: $crate::default::Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl < > $crate::default::Default for Bar< > where {
|
||||||
|
fn default() -> Self {
|
||||||
|
Bar::Bar
|
||||||
|
}
|
||||||
|
}"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
327
crates/hir-expand/src/cfg_process.rs
Normal file
327
crates/hir-expand/src/cfg_process.rs
Normal file
|
@ -0,0 +1,327 @@
|
||||||
|
//! Processes out #[cfg] and #[cfg_attr] attributes from the input for the derive macro
|
||||||
|
use std::iter::Peekable;
|
||||||
|
|
||||||
|
use cfg::{CfgAtom, CfgExpr};
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
|
use syntax::{
|
||||||
|
ast::{self, Attr, HasAttrs, Meta, VariantList},
|
||||||
|
AstNode, NodeOrToken, SyntaxElement, SyntaxNode, T,
|
||||||
|
};
|
||||||
|
use tracing::{debug, warn};
|
||||||
|
use tt::SmolStr;
|
||||||
|
|
||||||
|
use crate::{db::ExpandDatabase, MacroCallKind, MacroCallLoc};
|
||||||
|
|
||||||
|
fn check_cfg_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
|
||||||
|
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
debug!("Evaluating cfg {}", attr);
|
||||||
|
let cfg = parse_from_attr_meta(attr.meta()?)?;
|
||||||
|
debug!("Checking cfg {:?}", cfg);
|
||||||
|
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
|
||||||
|
Some(enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check_cfg_attr_attr(attr: &Attr, loc: &MacroCallLoc, db: &dyn ExpandDatabase) -> Option<bool> {
|
||||||
|
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
debug!("Evaluating cfg_attr {}", attr);
|
||||||
|
let cfg_expr = parse_from_attr_meta(attr.meta()?)?;
|
||||||
|
debug!("Checking cfg_attr {:?}", cfg_expr);
|
||||||
|
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg_expr) != Some(false);
|
||||||
|
Some(enabled)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
|
||||||
|
items: impl Iterator<Item = I>,
|
||||||
|
loc: &MacroCallLoc,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
remove: &mut FxHashSet<SyntaxElement>,
|
||||||
|
) -> Option<()> {
|
||||||
|
for item in items {
|
||||||
|
let field_attrs = item.attrs();
|
||||||
|
'attrs: for attr in field_attrs {
|
||||||
|
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
|
||||||
|
debug!("censoring type {:?}", item.syntax());
|
||||||
|
remove.insert(item.syntax().clone().into());
|
||||||
|
// We need to remove the , as well
|
||||||
|
remove_possible_comma(&item, remove);
|
||||||
|
break 'attrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
|
||||||
|
if enabled {
|
||||||
|
debug!("Removing cfg_attr tokens {:?}", attr);
|
||||||
|
let meta = attr.meta()?;
|
||||||
|
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
|
||||||
|
remove.extend(removes_from_cfg_attr);
|
||||||
|
} else {
|
||||||
|
debug!("censoring type cfg_attr {:?}", item.syntax());
|
||||||
|
remove.insert(attr.syntax().clone().into());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||||
|
enum CfgExprStage {
|
||||||
|
/// Stripping the CFGExpr part of the attribute
|
||||||
|
StrippigCfgExpr,
|
||||||
|
/// Found the comma after the CFGExpr. Will keep all tokens until the next comma or the end of the attribute
|
||||||
|
FoundComma,
|
||||||
|
/// Everything following the attribute. This could be another attribute or the end of the attribute.
|
||||||
|
// FIXME: cfg_attr with multiple attributes will not be handled correctly. We will only keep the first attribute
|
||||||
|
// Related Issue: https://github.com/rust-lang/rust-analyzer/issues/10110
|
||||||
|
EverythingElse,
|
||||||
|
}
|
||||||
|
/// This function creates its own set of tokens to remove. To help prevent malformed syntax as input.
|
||||||
|
fn remove_tokens_within_cfg_attr(meta: Meta) -> Option<FxHashSet<SyntaxElement>> {
|
||||||
|
let mut remove: FxHashSet<SyntaxElement> = FxHashSet::default();
|
||||||
|
debug!("Enabling attribute {}", meta);
|
||||||
|
let meta_path = meta.path()?;
|
||||||
|
debug!("Removing {:?}", meta_path.syntax());
|
||||||
|
remove.insert(meta_path.syntax().clone().into());
|
||||||
|
|
||||||
|
let meta_tt = meta.token_tree()?;
|
||||||
|
debug!("meta_tt {}", meta_tt);
|
||||||
|
let mut stage = CfgExprStage::StrippigCfgExpr;
|
||||||
|
for tt in meta_tt.token_trees_and_tokens() {
|
||||||
|
debug!("Checking {:?}. Stage: {:?}", tt, stage);
|
||||||
|
match (stage, tt) {
|
||||||
|
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Node(node)) => {
|
||||||
|
remove.insert(node.syntax().clone().into());
|
||||||
|
}
|
||||||
|
(CfgExprStage::StrippigCfgExpr, syntax::NodeOrToken::Token(token)) => {
|
||||||
|
if token.kind() == T![,] {
|
||||||
|
stage = CfgExprStage::FoundComma;
|
||||||
|
}
|
||||||
|
remove.insert(token.into());
|
||||||
|
}
|
||||||
|
(CfgExprStage::FoundComma, syntax::NodeOrToken::Token(token))
|
||||||
|
if (token.kind() == T![,] || token.kind() == T![')']) =>
|
||||||
|
{
|
||||||
|
// The end of the attribute or separator for the next attribute
|
||||||
|
stage = CfgExprStage::EverythingElse;
|
||||||
|
remove.insert(token.into());
|
||||||
|
}
|
||||||
|
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Node(node)) => {
|
||||||
|
remove.insert(node.syntax().clone().into());
|
||||||
|
}
|
||||||
|
(CfgExprStage::EverythingElse, syntax::NodeOrToken::Token(token)) => {
|
||||||
|
remove.insert(token.into());
|
||||||
|
}
|
||||||
|
// This is an actual attribute
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if stage != CfgExprStage::EverythingElse {
|
||||||
|
warn!("Invalid cfg_attr attribute. {:?}", meta_tt);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some(remove)
|
||||||
|
}
|
||||||
|
/// Removes a possible comma after the [AstNode]
|
||||||
|
fn remove_possible_comma(item: &impl AstNode, res: &mut FxHashSet<SyntaxElement>) {
|
||||||
|
if let Some(comma) = item.syntax().next_sibling_or_token().filter(|it| it.kind() == T![,]) {
|
||||||
|
res.insert(comma);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn process_enum(
|
||||||
|
variants: VariantList,
|
||||||
|
loc: &MacroCallLoc,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
remove: &mut FxHashSet<SyntaxElement>,
|
||||||
|
) -> Option<()> {
|
||||||
|
'variant: for variant in variants.variants() {
|
||||||
|
for attr in variant.attrs() {
|
||||||
|
if check_cfg_attr(&attr, loc, db).map(|enabled| !enabled).unwrap_or_default() {
|
||||||
|
// Rustc does not strip the attribute if it is enabled. So we will will leave it
|
||||||
|
debug!("censoring type {:?}", variant.syntax());
|
||||||
|
remove.insert(variant.syntax().clone().into());
|
||||||
|
// We need to remove the , as well
|
||||||
|
remove_possible_comma(&variant, remove);
|
||||||
|
continue 'variant;
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
|
||||||
|
if enabled {
|
||||||
|
debug!("Removing cfg_attr tokens {:?}", attr);
|
||||||
|
let meta = attr.meta()?;
|
||||||
|
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
|
||||||
|
remove.extend(removes_from_cfg_attr);
|
||||||
|
} else {
|
||||||
|
debug!("censoring type cfg_attr {:?}", variant.syntax());
|
||||||
|
remove.insert(attr.syntax().clone().into());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(fields) = variant.field_list() {
|
||||||
|
match fields {
|
||||||
|
ast::FieldList::RecordFieldList(fields) => {
|
||||||
|
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
|
||||||
|
}
|
||||||
|
ast::FieldList::TupleFieldList(fields) => {
|
||||||
|
process_has_attrs_with_possible_comma(fields.fields(), loc, db, remove)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn process_cfg_attrs(
|
||||||
|
node: &SyntaxNode,
|
||||||
|
loc: &MacroCallLoc,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
) -> Option<FxHashSet<SyntaxElement>> {
|
||||||
|
// FIXME: #[cfg_eval] is not implemented. But it is not stable yet
|
||||||
|
if !matches!(loc.kind, MacroCallKind::Derive { .. }) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let mut remove = FxHashSet::default();
|
||||||
|
|
||||||
|
let item = ast::Item::cast(node.clone())?;
|
||||||
|
for attr in item.attrs() {
|
||||||
|
if let Some(enabled) = check_cfg_attr_attr(&attr, loc, db) {
|
||||||
|
if enabled {
|
||||||
|
debug!("Removing cfg_attr tokens {:?}", attr);
|
||||||
|
let meta = attr.meta()?;
|
||||||
|
let removes_from_cfg_attr = remove_tokens_within_cfg_attr(meta)?;
|
||||||
|
remove.extend(removes_from_cfg_attr);
|
||||||
|
} else {
|
||||||
|
debug!("censoring type cfg_attr {:?}", item.syntax());
|
||||||
|
remove.insert(attr.syntax().clone().into());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match item {
|
||||||
|
ast::Item::Struct(it) => match it.field_list()? {
|
||||||
|
ast::FieldList::RecordFieldList(fields) => {
|
||||||
|
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
|
||||||
|
}
|
||||||
|
ast::FieldList::TupleFieldList(fields) => {
|
||||||
|
process_has_attrs_with_possible_comma(fields.fields(), loc, db, &mut remove)?;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ast::Item::Enum(it) => {
|
||||||
|
process_enum(it.variant_list()?, loc, db, &mut remove)?;
|
||||||
|
}
|
||||||
|
ast::Item::Union(it) => {
|
||||||
|
process_has_attrs_with_possible_comma(
|
||||||
|
it.record_field_list()?.fields(),
|
||||||
|
loc,
|
||||||
|
db,
|
||||||
|
&mut remove,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
Some(remove)
|
||||||
|
}
|
||||||
|
/// Parses a `cfg` attribute from the meta
|
||||||
|
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> {
|
||||||
|
let tt = meta.token_tree()?;
|
||||||
|
let mut iter = tt.token_trees_and_tokens().skip(1).peekable();
|
||||||
|
next_cfg_expr_from_syntax(&mut iter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_cfg_expr_from_syntax<I>(iter: &mut Peekable<I>) -> Option<CfgExpr>
|
||||||
|
where
|
||||||
|
I: Iterator<Item = NodeOrToken<ast::TokenTree, syntax::SyntaxToken>>,
|
||||||
|
{
|
||||||
|
let name = match iter.next() {
|
||||||
|
None => return None,
|
||||||
|
Some(NodeOrToken::Token(element)) => match element.kind() {
|
||||||
|
syntax::T![ident] => SmolStr::new(element.text()),
|
||||||
|
_ => return Some(CfgExpr::Invalid),
|
||||||
|
},
|
||||||
|
Some(_) => return Some(CfgExpr::Invalid),
|
||||||
|
};
|
||||||
|
let result = match name.as_str() {
|
||||||
|
"all" | "any" | "not" => {
|
||||||
|
let mut preds = Vec::new();
|
||||||
|
let Some(NodeOrToken::Node(tree)) = iter.next() else {
|
||||||
|
return Some(CfgExpr::Invalid);
|
||||||
|
};
|
||||||
|
let mut tree_iter = tree.token_trees_and_tokens().skip(1).peekable();
|
||||||
|
while tree_iter
|
||||||
|
.peek()
|
||||||
|
.filter(
|
||||||
|
|element| matches!(element, NodeOrToken::Token(token) if (token.kind() != syntax::T![')'])),
|
||||||
|
)
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
let pred = next_cfg_expr_from_syntax(&mut tree_iter);
|
||||||
|
if let Some(pred) = pred {
|
||||||
|
preds.push(pred);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let group = match name.as_str() {
|
||||||
|
"all" => CfgExpr::All(preds),
|
||||||
|
"any" => CfgExpr::Any(preds),
|
||||||
|
"not" => CfgExpr::Not(Box::new(preds.pop().unwrap_or(CfgExpr::Invalid))),
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
Some(group)
|
||||||
|
}
|
||||||
|
_ => match iter.peek() {
|
||||||
|
Some(NodeOrToken::Token(element)) if (element.kind() == syntax::T![=]) => {
|
||||||
|
iter.next();
|
||||||
|
match iter.next() {
|
||||||
|
Some(NodeOrToken::Token(value_token))
|
||||||
|
if (value_token.kind() == syntax::SyntaxKind::STRING) =>
|
||||||
|
{
|
||||||
|
let value = value_token.text();
|
||||||
|
let value = SmolStr::new(value.trim_matches('"'));
|
||||||
|
Some(CfgExpr::Atom(CfgAtom::KeyValue { key: name, value }))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Some(CfgExpr::Atom(CfgAtom::Flag(name))),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
if let Some(NodeOrToken::Token(element)) = iter.peek() {
|
||||||
|
if element.kind() == syntax::T![,] {
|
||||||
|
iter.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use cfg::DnfExpr;
|
||||||
|
use expect_test::{expect, Expect};
|
||||||
|
use syntax::{ast::Attr, AstNode, SourceFile};
|
||||||
|
|
||||||
|
use crate::cfg_process::parse_from_attr_meta;
|
||||||
|
|
||||||
|
fn check_dnf_from_syntax(input: &str, expect: Expect) {
|
||||||
|
let parse = SourceFile::parse(input);
|
||||||
|
let node = match parse.tree().syntax().descendants().find_map(Attr::cast) {
|
||||||
|
Some(it) => it,
|
||||||
|
None => {
|
||||||
|
let node = std::any::type_name::<Attr>();
|
||||||
|
panic!("Failed to make ast node `{node}` from text {input}")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let node = node.clone_subtree();
|
||||||
|
assert_eq!(node.syntax().text_range().start(), 0.into());
|
||||||
|
|
||||||
|
let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap();
|
||||||
|
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
|
||||||
|
expect.assert_eq(&actual);
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn cfg_from_attr() {
|
||||||
|
check_dnf_from_syntax(r#"#[cfg(test)]"#, expect![[r#"#![cfg(test)]"#]]);
|
||||||
|
check_dnf_from_syntax(r#"#[cfg(not(never))]"#, expect![[r#"#![cfg(not(never))]"#]]);
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,7 +8,7 @@ use rustc_hash::FxHashSet;
|
||||||
use span::{AstIdMap, SyntaxContextData, SyntaxContextId};
|
use span::{AstIdMap, SyntaxContextData, SyntaxContextId};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs},
|
ast::{self, HasAttrs},
|
||||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||||
};
|
};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ use crate::{
|
||||||
attrs::collect_attrs,
|
attrs::collect_attrs,
|
||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
|
cfg_process,
|
||||||
declarative::DeclarativeMacroExpander,
|
declarative::DeclarativeMacroExpander,
|
||||||
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
||||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
||||||
|
@ -150,12 +151,16 @@ pub fn expand_speculative(
|
||||||
),
|
),
|
||||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||||
|
let censor_cfg =
|
||||||
|
cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
|
||||||
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
|
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
|
||||||
fixups.append.retain(|it, _| match it {
|
fixups.append.retain(|it, _| match it {
|
||||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
|
||||||
syntax::NodeOrToken::Token(_) => true,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
|
it => !censor.contains(it) && !censor_cfg.contains(it),
|
||||||
});
|
});
|
||||||
fixups.remove.extend(censor);
|
fixups.remove.extend(censor);
|
||||||
|
fixups.remove.extend(censor_cfg);
|
||||||
|
|
||||||
(
|
(
|
||||||
mbe::syntax_node_to_token_tree_modified(
|
mbe::syntax_node_to_token_tree_modified(
|
||||||
speculative_args,
|
speculative_args,
|
||||||
|
@ -408,12 +413,16 @@ fn macro_arg(
|
||||||
),
|
),
|
||||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||||
let censor = censor_for_macro_input(&loc, &syntax);
|
let censor = censor_for_macro_input(&loc, &syntax);
|
||||||
|
let censor_cfg =
|
||||||
|
cfg_process::process_cfg_attrs(&syntax, &loc, db).unwrap_or_default();
|
||||||
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
|
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
|
||||||
fixups.append.retain(|it, _| match it {
|
fixups.append.retain(|it, _| match it {
|
||||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
|
||||||
syntax::NodeOrToken::Token(_) => true,
|
syntax::NodeOrToken::Token(_) => true,
|
||||||
|
it => !censor.contains(it) && !censor_cfg.contains(it),
|
||||||
});
|
});
|
||||||
fixups.remove.extend(censor);
|
fixups.remove.extend(censor);
|
||||||
|
fixups.remove.extend(censor_cfg);
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||||
&syntax,
|
&syntax,
|
||||||
|
@ -461,7 +470,7 @@ fn macro_arg(
|
||||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||||
/// - attributes expect the invoking attribute to be stripped
|
/// - attributes expect the invoking attribute to be stripped
|
||||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxElement> {
|
||||||
// FIXME: handle `cfg_attr`
|
// FIXME: handle `cfg_attr`
|
||||||
(|| {
|
(|| {
|
||||||
let censor = match loc.kind {
|
let censor = match loc.kind {
|
||||||
|
@ -477,7 +486,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
// we need to know about all macro calls for the given ast item here
|
// we need to know about all macro calls for the given ast item here
|
||||||
// so we require some kind of mapping...
|
// so we require some kind of mapping...
|
||||||
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
||||||
.map(|it| it.syntax().clone())
|
.map(|it| it.syntax().clone().into())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
||||||
|
@ -486,7 +495,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
||||||
collect_attrs(&ast::Item::cast(node.clone())?)
|
collect_attrs(&ast::Item::cast(node.clone())?)
|
||||||
.nth(invoc_attr_index.ast_index())
|
.nth(invoc_attr_index.ast_index())
|
||||||
.and_then(|x| Either::left(x.1))
|
.and_then(|x| Either::left(x.1))
|
||||||
.map(|attr| attr.syntax().clone())
|
.map(|attr| attr.syntax().clone().into())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ use crate::{
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub(crate) struct SyntaxFixups {
|
pub(crate) struct SyntaxFixups {
|
||||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||||
pub(crate) remove: FxHashSet<SyntaxNode>,
|
pub(crate) remove: FxHashSet<SyntaxElement>,
|
||||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ pub(crate) fn fixup_syntax(
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
) -> SyntaxFixups {
|
) -> SyntaxFixups {
|
||||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
let mut remove = FxHashSet::<SyntaxElement>::default();
|
||||||
let mut preorder = node.preorder();
|
let mut preorder = node.preorder();
|
||||||
let mut original = Vec::new();
|
let mut original = Vec::new();
|
||||||
let dummy_range = FIXUP_DUMMY_RANGE;
|
let dummy_range = FIXUP_DUMMY_RANGE;
|
||||||
|
@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax(
|
||||||
|
|
||||||
let node_range = node.text_range();
|
let node_range = node.text_range();
|
||||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||||
remove.insert(node.clone());
|
remove.insert(node.clone().into());
|
||||||
// the node contains an error node, we have to completely replace it by something valid
|
// the node contains an error node, we have to completely replace it by something valid
|
||||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
|
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
|
||||||
let idx = original.len() as u32;
|
let idx = original.len() as u32;
|
||||||
|
|
|
@ -22,8 +22,8 @@ pub mod proc_macro;
|
||||||
pub mod quote;
|
pub mod quote;
|
||||||
pub mod span_map;
|
pub mod span_map;
|
||||||
|
|
||||||
|
mod cfg_process;
|
||||||
mod fixup;
|
mod fixup;
|
||||||
|
|
||||||
use attrs::collect_attrs;
|
use attrs::collect_attrs;
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,7 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxElement>,
|
||||||
call_site: SpanData<Ctx>,
|
call_site: SpanData<Ctx>,
|
||||||
) -> tt::Subtree<SpanData<Ctx>>
|
) -> tt::Subtree<SpanData<Ctx>>
|
||||||
where
|
where
|
||||||
|
@ -629,7 +629,7 @@ struct Converter<SpanMap, S> {
|
||||||
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxElement>,
|
||||||
call_site: S,
|
call_site: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -638,7 +638,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
node: &SyntaxNode,
|
node: &SyntaxNode,
|
||||||
map: SpanMap,
|
map: SpanMap,
|
||||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||||
remove: FxHashSet<SyntaxNode>,
|
remove: FxHashSet<SyntaxElement>,
|
||||||
call_site: S,
|
call_site: S,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut this = Converter {
|
let mut this = Converter {
|
||||||
|
@ -660,16 +660,25 @@ impl<SpanMap, S> Converter<SpanMap, S> {
|
||||||
fn next_token(&mut self) -> Option<SyntaxToken> {
|
fn next_token(&mut self) -> Option<SyntaxToken> {
|
||||||
while let Some(ev) = self.preorder.next() {
|
while let Some(ev) = self.preorder.next() {
|
||||||
match ev {
|
match ev {
|
||||||
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
WalkEvent::Enter(token) => {
|
||||||
WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
if self.remove.contains(&token) {
|
||||||
self.preorder.skip_subtree();
|
match token {
|
||||||
if let Some(mut v) = self.append.remove(&n.into()) {
|
syntax::NodeOrToken::Token(_) => {
|
||||||
v.reverse();
|
continue;
|
||||||
self.current_leaves.extend(v);
|
}
|
||||||
return None;
|
node => {
|
||||||
|
self.preorder.skip_subtree();
|
||||||
|
if let Some(mut v) = self.append.remove(&node) {
|
||||||
|
v.reverse();
|
||||||
|
self.current_leaves.extend(v);
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if let syntax::NodeOrToken::Token(token) = token {
|
||||||
|
return Some(token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
WalkEvent::Enter(SyntaxElement::Node(_)) => (),
|
|
||||||
WalkEvent::Leave(ele) => {
|
WalkEvent::Leave(ele) => {
|
||||||
if let Some(mut v) = self.append.remove(&ele) {
|
if let Some(mut v) = self.append.remove(&ele) {
|
||||||
v.reverse();
|
v.reverse();
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue