mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 21:35:20 +00:00
Starting Fix for cfg stripping
This commit is contained in:
parent
00a0125372
commit
f45b080965
8 changed files with 302 additions and 25 deletions
178
crates/hir-expand/src/cfg_process.rs
Normal file
178
crates/hir-expand/src/cfg_process.rs
Normal file
|
@ -0,0 +1,178 @@
|
|||
use std::os::windows::process;
|
||||
|
||||
use mbe::syntax_node_to_token_tree;
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{
|
||||
ast::{self, Attr, FieldList, HasAttrs, RecordFieldList, TupleFieldList, Variant, VariantList},
|
||||
AstNode, SyntaxElement, SyntaxNode, T,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
use crate::{db::ExpandDatabase, span_map::SpanMap, MacroCallLoc};
|
||||
|
||||
fn check_cfg_attr(
|
||||
attr: &Attr,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> Option<bool> {
|
||||
attr.simple_name().as_deref().map(|v| v == "cfg")?;
|
||||
info!("Checking cfg attr {:?}", attr);
|
||||
let Some(tt) = attr.token_tree() else {
|
||||
info!("cfg attr has no expr {:?}", attr);
|
||||
return Some(true);
|
||||
};
|
||||
info!("Checking cfg {:?}", tt);
|
||||
let tt = tt.syntax().clone();
|
||||
// Convert to a tt::Subtree
|
||||
let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site);
|
||||
let cfg = cfg::CfgExpr::parse(&tt);
|
||||
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
|
||||
Some(enabled)
|
||||
}
|
||||
enum CfgAttrResult {
|
||||
Enabled(Attr),
|
||||
Disabled,
|
||||
}
|
||||
|
||||
fn check_cfg_attr_attr(
|
||||
attr: &Attr,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> Option<CfgAttrResult> {
|
||||
attr.simple_name().as_deref().map(|v| v == "cfg_attr")?;
|
||||
info!("Checking cfg_attr attr {:?}", attr);
|
||||
let Some(tt) = attr.token_tree() else {
|
||||
info!("cfg_attr attr has no expr {:?}", attr);
|
||||
return None;
|
||||
};
|
||||
info!("Checking cfg_attr {:?}", tt);
|
||||
let tt = tt.syntax().clone();
|
||||
// Convert to a tt::Subtree
|
||||
let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site);
|
||||
let cfg = cfg::CfgExpr::parse(&tt);
|
||||
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
|
||||
if enabled {
|
||||
// FIXME: Add the internal attribute
|
||||
Some(CfgAttrResult::Enabled(attr.clone()))
|
||||
} else {
|
||||
Some(CfgAttrResult::Disabled)
|
||||
}
|
||||
}
|
||||
|
||||
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
|
||||
items: impl Iterator<Item = I>,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
res: &mut FxHashSet<SyntaxElement>,
|
||||
) -> Option<()> {
|
||||
for item in items {
|
||||
let field_attrs = item.attrs();
|
||||
'attrs: for attr in field_attrs {
|
||||
let Some(enabled) = check_cfg_attr(&attr, loc, span_map, db) else {
|
||||
continue;
|
||||
};
|
||||
if enabled {
|
||||
//FIXME: Should we remove the cfg_attr?
|
||||
} else {
|
||||
info!("censoring type {:?}", item.syntax());
|
||||
res.insert(item.syntax().clone().into());
|
||||
// We need to remove the , as well
|
||||
if let Some(comma) = item.syntax().next_sibling_or_token() {
|
||||
if comma.kind() == T![,] {
|
||||
res.insert(comma.into());
|
||||
}
|
||||
}
|
||||
break 'attrs;
|
||||
}
|
||||
let Some(attr_result) = check_cfg_attr_attr(&attr, loc, span_map, db) else {
|
||||
continue;
|
||||
};
|
||||
match attr_result {
|
||||
CfgAttrResult::Enabled(attr) => {
|
||||
//FIXME: Replace the attribute with the internal attribute
|
||||
}
|
||||
CfgAttrResult::Disabled => {
|
||||
info!("censoring type {:?}", item.syntax());
|
||||
res.insert(attr.syntax().clone().into());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
fn process_enum(
|
||||
variants: VariantList,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
res: &mut FxHashSet<SyntaxElement>,
|
||||
) -> Option<()> {
|
||||
for variant in variants.variants() {
|
||||
'attrs: for attr in variant.attrs() {
|
||||
if !check_cfg_attr(&attr, loc, span_map, db)? {
|
||||
info!("censoring variant {:?}", variant.syntax());
|
||||
res.insert(variant.syntax().clone().into());
|
||||
if let Some(comma) = variant.syntax().next_sibling_or_token() {
|
||||
if comma.kind() == T![,] {
|
||||
res.insert(comma.into());
|
||||
}
|
||||
}
|
||||
break 'attrs;
|
||||
}
|
||||
}
|
||||
if let Some(fields) = variant.field_list() {
|
||||
match fields {
|
||||
ast::FieldList::RecordFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?;
|
||||
}
|
||||
ast::FieldList::TupleFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
/// Handle
|
||||
pub(crate) fn process_cfg_attrs(
|
||||
node: &SyntaxNode,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> Option<FxHashSet<SyntaxElement>> {
|
||||
let mut res = FxHashSet::default();
|
||||
let item = ast::Item::cast(node.clone())?;
|
||||
match item {
|
||||
ast::Item::Struct(it) => match it.field_list()? {
|
||||
ast::FieldList::RecordFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
fields.fields(),
|
||||
loc,
|
||||
span_map,
|
||||
db,
|
||||
&mut res,
|
||||
)?;
|
||||
}
|
||||
ast::FieldList::TupleFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
fields.fields(),
|
||||
loc,
|
||||
span_map,
|
||||
db,
|
||||
&mut res,
|
||||
)?;
|
||||
}
|
||||
},
|
||||
ast::Item::Enum(it) => {
|
||||
process_enum(it.variant_list()?, loc, span_map, db, &mut res)?;
|
||||
}
|
||||
// FIXME: Implement for other items
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Some(res)
|
||||
}
|
|
@ -7,15 +7,17 @@ use mbe::{syntax_node_to_token_tree, ValueResult};
|
|||
use rustc_hash::FxHashSet;
|
||||
use span::{AstIdMap, SyntaxContextData, SyntaxContextId};
|
||||
use syntax::{
|
||||
ast::{self, HasAttrs},
|
||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
ast::{self, Attr, HasAttrs},
|
||||
AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
use tracing::info;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
attrs::collect_attrs,
|
||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander,
|
||||
cfg_process,
|
||||
declarative::DeclarativeMacroExpander,
|
||||
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
|
||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
||||
|
@ -152,8 +154,8 @@ pub fn expand_speculative(
|
|||
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
it => !censor.contains(it),
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
(
|
||||
|
@ -408,12 +410,15 @@ fn macro_arg(
|
|||
),
|
||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||
let censor = censor_for_macro_input(&loc, &syntax);
|
||||
let censor_cfg = censor_cfg_elements(&syntax, &loc, &map, db);
|
||||
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
it => !censor.contains(it) && !censor_cfg.contains(it),
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
fixups.remove.extend(censor_cfg);
|
||||
|
||||
{
|
||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||
&syntax,
|
||||
|
@ -456,12 +461,19 @@ fn macro_arg(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn censor_cfg_elements(
|
||||
node: &SyntaxNode,
|
||||
loc: &MacroCallLoc,
|
||||
span_map: &SpanMap,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> FxHashSet<SyntaxElement> {
|
||||
cfg_process::process_cfg_attrs(node, loc, span_map, db).unwrap_or_default()
|
||||
}
|
||||
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
|
||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||
/// - attributes expect the invoking attribute to be stripped
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxElement> {
|
||||
// FIXME: handle `cfg_attr`
|
||||
(|| {
|
||||
let censor = match loc.kind {
|
||||
|
@ -477,7 +489,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
|||
// we need to know about all macro calls for the given ast item here
|
||||
// so we require some kind of mapping...
|
||||
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
|
||||
.map(|it| it.syntax().clone())
|
||||
.map(|it| it.syntax().clone().into())
|
||||
.collect()
|
||||
}
|
||||
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
|
||||
|
@ -486,7 +498,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
|
|||
collect_attrs(&ast::Item::cast(node.clone())?)
|
||||
.nth(invoc_attr_index.ast_index())
|
||||
.and_then(|x| Either::left(x.1))
|
||||
.map(|attr| attr.syntax().clone())
|
||||
.map(|attr| attr.syntax().clone().into())
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ use crate::{
|
|||
#[derive(Debug, Default)]
|
||||
pub(crate) struct SyntaxFixups {
|
||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||
pub(crate) remove: FxHashSet<SyntaxNode>,
|
||||
pub(crate) remove: FxHashSet<SyntaxElement>,
|
||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||
}
|
||||
|
||||
|
@ -51,7 +51,7 @@ pub(crate) fn fixup_syntax(
|
|||
call_site: Span,
|
||||
) -> SyntaxFixups {
|
||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||
let mut remove = FxHashSet::<SyntaxElement>::default();
|
||||
let mut preorder = node.preorder();
|
||||
let mut original = Vec::new();
|
||||
let dummy_range = FIXUP_DUMMY_RANGE;
|
||||
|
@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax(
|
|||
|
||||
let node_range = node.text_range();
|
||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||
remove.insert(node.clone());
|
||||
remove.insert(node.clone().into());
|
||||
// the node contains an error node, we have to completely replace it by something valid
|
||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
|
||||
let idx = original.len() as u32;
|
||||
|
|
|
@ -22,8 +22,8 @@ pub mod proc_macro;
|
|||
pub mod quote;
|
||||
pub mod span_map;
|
||||
|
||||
mod cfg_process;
|
||||
mod fixup;
|
||||
|
||||
use attrs::collect_attrs;
|
||||
use triomphe::Arc;
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue