Cleanup cfg check handling in expression store lowering

This commit is contained in:
Lukas Wirth 2025-04-29 10:49:41 +02:00
parent 7d9b839f9c
commit d11dbf648f
17 changed files with 234 additions and 188 deletions

View file

@ -1,6 +1,6 @@
//! A higher level attributes based on TokenTree, with also some shortcuts. //! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{borrow::Cow, hash::Hash, ops}; use std::{borrow::Cow, convert::identity, hash::Hash, ops};
use base_db::Crate; use base_db::Crate;
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
@ -8,6 +8,7 @@ use either::Either;
use hir_expand::{ use hir_expand::{
HirFileId, InFile, HirFileId, InFile,
attrs::{Attr, AttrId, RawAttrs, collect_attrs}, attrs::{Attr, AttrId, RawAttrs, collect_attrs},
span_map::SpanMapRef,
}; };
use intern::{Symbol, sym}; use intern::{Symbol, sym};
use la_arena::{ArenaMap, Idx, RawIdx}; use la_arena::{ArenaMap, Idx, RawIdx};
@ -45,8 +46,27 @@ impl Attrs {
(**self).iter().find(|attr| attr.id == id) (**self).iter().find(|attr| attr.id == id)
} }
pub(crate) fn filter(db: &dyn DefDatabase, krate: Crate, raw_attrs: RawAttrs) -> Attrs { pub(crate) fn expand_cfg_attr(
Attrs(raw_attrs.filter(db, krate)) db: &dyn DefDatabase,
krate: Crate,
raw_attrs: RawAttrs,
) -> Attrs {
Attrs(raw_attrs.expand_cfg_attr(db, krate))
}
pub(crate) fn is_cfg_enabled_for(
db: &dyn DefDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Result<(), CfgExpr> {
RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
.filter_map(|attr| attr.cfg())
.find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
true => None,
false => Some(cfg),
})
.map_or(Ok(()), Err)
} }
} }
@ -522,38 +542,41 @@ impl AttrsWithOwner {
GenericParamId::ConstParamId(it) => { GenericParamId::ConstParamId(it) => {
let src = it.parent().child_source(db); let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here. // FIXME: We should be never getting `None` here.
match src.value.get(it.local_id()) { return Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::from_attrs_owner( Some(val) => RawAttrs::new_expanded(
db, db,
src.with_value(val), val,
db.span_map(src.file_id).as_ref(), db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
), ),
None => RawAttrs::EMPTY, None => RawAttrs::EMPTY,
} });
} }
GenericParamId::TypeParamId(it) => { GenericParamId::TypeParamId(it) => {
let src = it.parent().child_source(db); let src = it.parent().child_source(db);
// FIXME: We should be never getting `None` here. // FIXME: We should be never getting `None` here.
match src.value.get(it.local_id()) { return Attrs(match src.value.get(it.local_id()) {
Some(val) => RawAttrs::from_attrs_owner( Some(val) => RawAttrs::new_expanded(
db, db,
src.with_value(val), val,
db.span_map(src.file_id).as_ref(), db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
), ),
None => RawAttrs::EMPTY, None => RawAttrs::EMPTY,
} });
} }
GenericParamId::LifetimeParamId(it) => { GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
// FIXME: We should be never getting `None` here. // FIXME: We should be never getting `None` here.
match src.value.get(it.local_id) { return Attrs(match src.value.get(it.local_id) {
Some(val) => RawAttrs::from_attrs_owner( Some(val) => RawAttrs::new_expanded(
db, db,
src.with_value(val), val,
db.span_map(src.file_id).as_ref(), db.span_map(src.file_id).as_ref(),
def.krate(db).cfg_options(db),
), ),
None => RawAttrs::EMPTY, None => RawAttrs::EMPTY,
} });
} }
}, },
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
@ -561,7 +584,7 @@ impl AttrsWithOwner {
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
}; };
let attrs = raw_attrs.filter(db, def.krate(db)); let attrs = raw_attrs.expand_cfg_attr(db, def.krate(db));
Attrs(attrs) Attrs(attrs)
} }

View file

@ -3,10 +3,11 @@
use std::mem; use std::mem;
use base_db::Crate; use base_db::Crate;
use cfg::CfgOptions;
use drop_bomb::DropBomb; use drop_bomb::DropBomb;
use hir_expand::{ use hir_expand::{
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId, ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
}; };
use span::{AstIdMap, Edition, SyntaxContext}; use span::{AstIdMap, Edition, SyntaxContext};
use syntax::ast::HasAttrs; use syntax::ast::HasAttrs;
@ -64,22 +65,13 @@ impl Expander {
} }
} }
pub(super) fn attrs(
&self,
db: &dyn DefDatabase,
krate: Crate,
has_attrs: &dyn HasAttrs,
) -> Attrs {
Attrs::filter(db, krate, RawAttrs::new(db, has_attrs, self.span_map.as_ref()))
}
pub(super) fn is_cfg_enabled( pub(super) fn is_cfg_enabled(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
krate: Crate,
has_attrs: &dyn HasAttrs, has_attrs: &dyn HasAttrs,
) -> bool { cfg_options: &CfgOptions,
self.attrs(db, krate, has_attrs).is_cfg_enabled(krate.cfg_options(db)) ) -> Result<(), cfg::CfgExpr> {
Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
} }
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext { pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {

View file

@ -7,6 +7,7 @@ mod path;
use std::mem; use std::mem;
use cfg::CfgOptions;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
HirFileId, InFile, Lookup, MacroDefId, HirFileId, InFile, Lookup, MacroDefId,
@ -81,8 +82,6 @@ pub(super) fn lower_body(
// even though they should be the same. Also, when the body comes from multiple expansions, their // even though they should be the same. Also, when the body comes from multiple expansions, their
// hygiene is different. // hygiene is different.
let krate = module.krate();
let mut self_param = None; let mut self_param = None;
let mut source_map_self_param = None; let mut source_map_self_param = None;
let mut params = vec![]; let mut params = vec![];
@ -100,9 +99,8 @@ pub(super) fn lower_body(
// and skip the body. // and skip the body.
if skip_body { if skip_body {
if let Some(param_list) = parameters { if let Some(param_list) = parameters {
if let Some(self_param_syn) = param_list if let Some(self_param_syn) =
.self_param() param_list.self_param().filter(|self_param| collector.check_cfg(self_param))
.filter(|self_param| collector.expander.is_cfg_enabled(db, krate, self_param))
{ {
let is_mutable = let is_mutable =
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none(); self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
@ -119,10 +117,7 @@ pub(super) fn lower_body(
source_map_self_param = source_map_self_param =
Some(collector.expander.in_file(AstPtr::new(&self_param_syn))); Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
} }
let count = param_list let count = param_list.params().filter(|it| collector.check_cfg(it)).count();
.params()
.filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
.count();
params = (0..count).map(|_| collector.missing_pat()).collect(); params = (0..count).map(|_| collector.missing_pat()).collect();
}; };
let body_expr = collector.missing_expr(); let body_expr = collector.missing_expr();
@ -138,9 +133,7 @@ pub(super) fn lower_body(
} }
if let Some(param_list) = parameters { if let Some(param_list) = parameters {
if let Some(self_param_syn) = if let Some(self_param_syn) = param_list.self_param().filter(|it| collector.check_cfg(it)) {
param_list.self_param().filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
{
let is_mutable = let is_mutable =
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none(); self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
let hygiene = self_param_syn let hygiene = self_param_syn
@ -157,7 +150,7 @@ pub(super) fn lower_body(
} }
for param in param_list.params() { for param in param_list.params() {
if collector.expander.is_cfg_enabled(db, krate, &param) { if collector.check_cfg(&param) {
let param_pat = collector.collect_pat_top(param.pat()); let param_pat = collector.collect_pat_top(param.pat());
params.push(param_pat); params.push(param_pat);
} }
@ -346,7 +339,7 @@ pub(crate) fn lower_function(
collector.collect_impl_trait(&mut expr_collector, |collector, mut impl_trait_lower_fn| { collector.collect_impl_trait(&mut expr_collector, |collector, mut impl_trait_lower_fn| {
if let Some(param_list) = fn_.value.param_list() { if let Some(param_list) = fn_.value.param_list() {
if let Some(param) = param_list.self_param() { if let Some(param) = param_list.self_param() {
let enabled = collector.expander.is_cfg_enabled(db, module.krate(), &param); let enabled = collector.check_cfg(&param);
if enabled { if enabled {
has_self_param = true; has_self_param = true;
params.push(match param.ty() { params.push(match param.ty() {
@ -381,7 +374,7 @@ pub(crate) fn lower_function(
} }
let p = param_list let p = param_list
.params() .params()
.filter(|param| collector.expander.is_cfg_enabled(db, module.krate(), param)) .filter(|param| collector.check_cfg(param))
.filter(|param| { .filter(|param| {
let is_variadic = param.dotdotdot_token().is_some(); let is_variadic = param.dotdotdot_token().is_some();
has_variadic |= is_variadic; has_variadic |= is_variadic;
@ -441,6 +434,7 @@ pub(crate) fn lower_function(
pub struct ExprCollector<'db> { pub struct ExprCollector<'db> {
db: &'db dyn DefDatabase, db: &'db dyn DefDatabase,
cfg_options: &'db CfgOptions,
expander: Expander, expander: Expander,
def_map: Arc<DefMap>, def_map: Arc<DefMap>,
local_def_map: Arc<LocalDefMap>, local_def_map: Arc<LocalDefMap>,
@ -553,6 +547,7 @@ impl ExprCollector<'_> {
let expander = Expander::new(db, current_file_id, &def_map); let expander = Expander::new(db, current_file_id, &def_map);
ExprCollector { ExprCollector {
db, db,
cfg_options: module.krate().cfg_options(db),
module, module,
def_map, def_map,
local_def_map, local_def_map,
@ -1026,7 +1021,9 @@ impl ExprCollector<'_> {
/// Returns `None` if and only if the expression is `#[cfg]`d out. /// Returns `None` if and only if the expression is `#[cfg]`d out.
fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> { fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
let syntax_ptr = AstPtr::new(&expr); let syntax_ptr = AstPtr::new(&expr);
self.check_cfg(&expr)?; if !self.check_cfg(&expr) {
return None;
}
// FIXME: Move some of these arms out into separate methods for clarity // FIXME: Move some of these arms out into separate methods for clarity
Some(match expr { Some(match expr {
@ -1114,6 +1111,7 @@ impl ExprCollector<'_> {
ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e), ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e), ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
ast::Expr::CallExpr(e) => { ast::Expr::CallExpr(e) => {
// FIXME: Remove this once we drop support for <1.86, https://github.com/rust-lang/rust/commit/ac9cb908ac4301dfc25e7a2edee574320022ae2c
let is_rustc_box = { let is_rustc_box = {
let attrs = e.attrs(); let attrs = e.attrs();
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box") attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
@ -1156,13 +1154,17 @@ impl ExprCollector<'_> {
match_arm_list match_arm_list
.arms() .arms()
.filter_map(|arm| { .filter_map(|arm| {
self.check_cfg(&arm).map(|()| MatchArm { if self.check_cfg(&arm) {
Some(MatchArm {
pat: self.collect_pat_top(arm.pat()), pat: self.collect_pat_top(arm.pat()),
expr: self.collect_expr_opt(arm.expr()), expr: self.collect_expr_opt(arm.expr()),
guard: arm guard: arm
.guard() .guard()
.map(|guard| self.collect_expr_opt(guard.condition())), .map(|guard| self.collect_expr_opt(guard.condition())),
}) })
} else {
None
}
}) })
.collect() .collect()
} else { } else {
@ -1230,7 +1232,9 @@ impl ExprCollector<'_> {
let fields = nfl let fields = nfl
.fields() .fields()
.filter_map(|field| { .filter_map(|field| {
self.check_cfg(&field)?; if !self.check_cfg(&field) {
return None;
}
let name = field.field_name()?.as_name(); let name = field.field_name()?.as_name();
@ -1483,7 +1487,9 @@ impl ExprCollector<'_> {
} }
fn maybe_collect_expr_as_pat(&mut self, expr: &ast::Expr) -> Option<PatId> { fn maybe_collect_expr_as_pat(&mut self, expr: &ast::Expr) -> Option<PatId> {
self.check_cfg(expr)?; if !self.check_cfg(expr) {
return None;
}
let syntax_ptr = AstPtr::new(expr); let syntax_ptr = AstPtr::new(expr);
let result = match expr { let result = match expr {
@ -1558,7 +1564,9 @@ impl ExprCollector<'_> {
let args = record_field_list let args = record_field_list
.fields() .fields()
.filter_map(|f| { .filter_map(|f| {
self.check_cfg(&f)?; if !self.check_cfg(&f) {
return None;
}
let field_expr = f.expr()?; let field_expr = f.expr()?;
let pat = self.collect_expr_as_pat(field_expr); let pat = self.collect_expr_as_pat(field_expr);
let name = f.field_name()?.as_name(); let name = f.field_name()?.as_name();
@ -2044,7 +2052,7 @@ impl ExprCollector<'_> {
fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) { fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) {
match s { match s {
ast::Stmt::LetStmt(stmt) => { ast::Stmt::LetStmt(stmt) => {
if self.check_cfg(&stmt).is_none() { if !self.check_cfg(&stmt) {
return; return;
} }
let pat = self.collect_pat_top(stmt.pat()); let pat = self.collect_pat_top(stmt.pat());
@ -2059,7 +2067,7 @@ impl ExprCollector<'_> {
ast::Stmt::ExprStmt(stmt) => { ast::Stmt::ExprStmt(stmt) => {
let expr = stmt.expr(); let expr = stmt.expr();
match &expr { match &expr {
Some(expr) if self.check_cfg(expr).is_none() => return, Some(expr) if !self.check_cfg(expr) => return,
_ => (), _ => (),
} }
let has_semi = stmt.semicolon_token().is_some(); let has_semi = stmt.semicolon_token().is_some();
@ -2074,7 +2082,7 @@ impl ExprCollector<'_> {
} }
} }
ast::Stmt::Item(ast::Item::MacroDef(macro_)) => { ast::Stmt::Item(ast::Item::MacroDef(macro_)) => {
if self.check_cfg(&macro_).is_none() { if !self.check_cfg(&macro_) {
return; return;
} }
let Some(name) = macro_.name() else { let Some(name) = macro_.name() else {
@ -2086,7 +2094,7 @@ impl ExprCollector<'_> {
self.collect_macro_def(statements, macro_id); self.collect_macro_def(statements, macro_id);
} }
ast::Stmt::Item(ast::Item::MacroRules(macro_)) => { ast::Stmt::Item(ast::Item::MacroRules(macro_)) => {
if self.check_cfg(&macro_).is_none() { if !self.check_cfg(&macro_) {
return; return;
} }
let Some(name) = macro_.name() else { let Some(name) = macro_.name() else {
@ -2360,7 +2368,9 @@ impl ExprCollector<'_> {
let args = record_pat_field_list let args = record_pat_field_list
.fields() .fields()
.filter_map(|f| { .filter_map(|f| {
self.check_cfg(&f)?; if !self.check_cfg(&f) {
return None;
}
let ast_pat = f.pat()?; let ast_pat = f.pat()?;
let pat = self.collect_pat(ast_pat, binding_list); let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name(); let name = f.field_name()?.as_name();
@ -2536,25 +2546,18 @@ impl ExprCollector<'_> {
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when /// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
/// not. /// not.
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> { fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
let attrs = self.expander.attrs(self.db, self.module.krate(), owner); let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
match attrs.cfg() { match enabled {
Some(cfg) => { Ok(()) => true,
let cfg_options = self.module.krate().cfg_options(self.db); Err(cfg) => {
if cfg_options.check(&cfg) != Some(false) {
return Some(());
}
self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode { self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())), node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg, cfg,
opts: cfg_options.clone(), opts: self.cfg_options.clone(),
}); });
false
None
} }
None => Some(()),
} }
} }

View file

@ -110,7 +110,7 @@ impl GenericParamsCollector {
fn lower_param_list(&mut self, ec: &mut ExprCollector<'_>, params: ast::GenericParamList) { fn lower_param_list(&mut self, ec: &mut ExprCollector<'_>, params: ast::GenericParamList) {
for generic_param in params.generic_params() { for generic_param in params.generic_params() {
let enabled = ec.expander.is_cfg_enabled(ec.db, ec.module.krate(), &generic_param); let enabled = ec.check_cfg(&generic_param);
if !enabled { if !enabled {
continue; continue;
} }

View file

@ -179,7 +179,7 @@ impl ItemTree {
/// Returns the inner attributes of the source file. /// Returns the inner attributes of the source file.
pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs { pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
Attrs::filter( Attrs::expand_cfg_attr(
db, db,
krate, krate,
self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(), self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(),
@ -191,7 +191,7 @@ impl ItemTree {
} }
pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs { pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs {
Attrs::filter(db, krate, self.raw_attrs(of).clone()) Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
} }
/// Returns a count of a few, expensive items. /// Returns a count of a few, expensive items.

View file

@ -2,7 +2,7 @@
use std::{borrow::Cow, fmt, ops}; use std::{borrow::Cow, fmt, ops};
use base_db::Crate; use base_db::Crate;
use cfg::CfgExpr; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use intern::{Interned, Symbol, sym}; use intern::{Interned, Symbol, sym};
@ -14,11 +14,10 @@ use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree}; use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
use triomphe::ThinArc; use triomphe::ThinArc;
use crate::name::Name;
use crate::{ use crate::{
InFile,
db::ExpandDatabase, db::ExpandDatabase,
mod_path::ModPath, mod_path::ModPath,
name::Name,
span_map::SpanMapRef, span_map::SpanMapRef,
tt::{self, TopSubtree, token_to_literal}, tt::{self, TopSubtree, token_to_literal},
}; };
@ -49,15 +48,48 @@ impl RawAttrs {
owner: &dyn ast::HasAttrs, owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>, span_map: SpanMapRef<'_>,
) -> Self { ) -> Self {
let entries: Vec<_> = collect_attrs(owner) let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
.filter_map(|(id, attr)| match attr {
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
/// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
pub fn new_expanded(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
cfg_options: &CfgOptions,
) -> Self {
let entries: Vec<_> =
Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
}
pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> impl Iterator<Item = Attr> {
collect_attrs(owner).filter_map(move |(id, attr)| match attr {
Either::Left(attr) => { Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id)) attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
} }
Either::Right(comment) => comment.doc_comment().map(|doc| { Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
let span = span_map.span_for_range(comment.syntax().text_range()); let span = span_map.span_for_range(comment.syntax().text_range());
let (text, kind) = let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
Attr { Attr {
id, id,
input: Some(Box::new(AttrInput::Literal(tt::Literal { input: Some(Box::new(AttrInput::Literal(tt::Literal {
@ -70,24 +102,18 @@ impl RawAttrs {
ctxt: span.ctx, ctxt: span.ctx,
} }
}), }),
Either::Right(_) => None,
}) })
.collect();
let entries = if entries.is_empty() {
None
} else {
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
};
RawAttrs { entries }
} }
pub fn from_attrs_owner( pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>, owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>, span_map: SpanMapRef<'_>,
) -> Self { cfg_options: &CfgOptions,
Self::new(db, owner.value, span_map) ) -> impl Iterator<Item = Attr> {
Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
} }
pub fn merge(&self, other: Self) -> Self { pub fn merge(&self, other: Self) -> Self {
@ -114,9 +140,8 @@ impl RawAttrs {
} }
} }
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. /// Processes `cfg_attr`s
// FIXME: This should return a different type, signaling it was filtered? pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
pub fn filter(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
let has_cfg_attrs = let has_cfg_attrs =
self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr)); self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
if !has_cfg_attrs { if !has_cfg_attrs {
@ -126,37 +151,8 @@ impl RawAttrs {
let cfg_options = krate.cfg_options(db); let cfg_options = krate.cfg_options(db);
let new_attrs = self let new_attrs = self
.iter() .iter()
.flat_map(|attr| -> SmallVec<[_; 1]> { .cloned()
let is_cfg_attr = attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr); .flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
if !is_cfg_attr {
return smallvec![attr.clone()];
}
let subtree = match attr.token_tree_value() {
Some(it) => it,
_ => return smallvec![attr.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![attr.clone()],
};
let index = attr.id;
let attrs = parts
.enumerate()
.take(1 << AttrId::CFG_ATTR_BITS)
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect()
}
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let entries = if new_attrs.is_empty() { let entries = if new_attrs.is_empty() {
None None
@ -316,6 +312,42 @@ impl Attr {
pub fn path(&self) -> &ModPath { pub fn path(&self) -> &ModPath {
&self.path &self.path
} }
pub fn expand_cfg_attr(
self,
db: &dyn ExpandDatabase,
cfg_options: &CfgOptions,
) -> impl IntoIterator<Item = Self> {
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
if !is_cfg_attr {
return smallvec![self.clone()];
}
let subtree = match self.token_tree_value() {
Some(it) => it,
_ => return smallvec![self.clone()],
};
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
Some(it) => it,
None => return smallvec![self.clone()],
};
let index = self.id;
let attrs = parts
.enumerate()
.take(1 << AttrId::CFG_ATTR_BITS)
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
let cfg = CfgExpr::parse(&cfg);
if cfg_options.check(&cfg) == Some(false) {
smallvec![]
} else {
cov_mark::hit!(cfg_attr_active);
attrs.collect::<SmallVec<[_; 1]>>()
}
}
} }
impl Attr { impl Attr {

View file

@ -82,7 +82,7 @@ impl DeclarativeMacroExpander {
let transparency = |node| { let transparency = |node| {
// ... would be nice to have the item tree here // ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate); let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
match attrs match attrs
.iter() .iter()
.find(|it| { .find(|it| {

View file

@ -90,9 +90,11 @@ pub fn parallel_prime_caches(
}; };
for id in 0..num_worker_threads { for id in 0..num_worker_threads {
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) stdx::thread::Builder::new(
stdx::thread::ThreadIntent::Worker,
format!("PrimeCaches#{id}"),
)
.allow_leak(true) .allow_leak(true)
.name(format!("PrimeCaches#{id}"))
.spawn({ .spawn({
let worker = prime_caches_worker.clone(); let worker = prime_caches_worker.clone();
let db = db.clone(); let db = db.clone();

View file

@ -182,10 +182,8 @@ fn with_extra_thread(
thread_intent: stdx::thread::ThreadIntent, thread_intent: stdx::thread::ThreadIntent,
f: impl FnOnce() -> anyhow::Result<()> + Send + 'static, f: impl FnOnce() -> anyhow::Result<()> + Send + 'static,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let handle = stdx::thread::Builder::new(thread_intent) let handle =
.name(thread_name.into()) stdx::thread::Builder::new(thread_intent, thread_name).stack_size(STACK_SIZE).spawn(f)?;
.stack_size(STACK_SIZE)
.spawn(f)?;
handle.join()?; handle.join()?;

View file

@ -15,8 +15,10 @@ impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
const STACK_SIZE: usize = 1024 * 1024 * 8; const STACK_SIZE: usize = 1024 * 1024 * 8;
let handle = stdx::thread::Builder::new(stdx::thread::ThreadIntent::LatencySensitive) let handle = stdx::thread::Builder::new(
.name("BIG_STACK_THREAD".into()) stdx::thread::ThreadIntent::LatencySensitive,
"BIG_STACK_THREAD",
)
.stack_size(STACK_SIZE) .stack_size(STACK_SIZE)
.spawn(|| self.run_()) .spawn(|| self.run_())
.unwrap(); .unwrap();

View file

@ -15,8 +15,10 @@ impl flags::UnresolvedReferences {
pub fn run(self) -> anyhow::Result<()> { pub fn run(self) -> anyhow::Result<()> {
const STACK_SIZE: usize = 1024 * 1024 * 8; const STACK_SIZE: usize = 1024 * 1024 * 8;
let handle = stdx::thread::Builder::new(stdx::thread::ThreadIntent::LatencySensitive) let handle = stdx::thread::Builder::new(
.name("BIG_STACK_THREAD".into()) stdx::thread::ThreadIntent::LatencySensitive,
"BIG_STACK_THREAD",
)
.stack_size(STACK_SIZE) .stack_size(STACK_SIZE)
.spawn(|| self.run_()) .spawn(|| self.run_())
.unwrap(); .unwrap();

View file

@ -148,8 +148,8 @@ impl<T: Sized + Send + 'static> CommandHandle<T> {
let stderr = child.0.stderr().take().unwrap(); let stderr = child.0.stderr().take().unwrap();
let actor = CargoActor::<T>::new(parser, sender, stdout, stderr); let actor = CargoActor::<T>::new(parser, sender, stdout, stderr);
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) let thread =
.name("CommandHandle".to_owned()) stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "CommandHandle")
.spawn(move || actor.run()) .spawn(move || actor.run())
.expect("failed to spawn thread"); .expect("failed to spawn thread");
Ok(CommandHandle { program, arguments, current_dir, child, thread, _phantom: PhantomData }) Ok(CommandHandle { program, arguments, current_dir, child, thread, _phantom: PhantomData })

View file

@ -133,8 +133,8 @@ impl FlycheckHandle {
let actor = let actor =
FlycheckActor::new(id, sender, config, sysroot_root, workspace_root, manifest_path); FlycheckActor::new(id, sender, config, sysroot_root, workspace_root, manifest_path);
let (sender, receiver) = unbounded::<StateChange>(); let (sender, receiver) = unbounded::<StateChange>();
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) let thread =
.name("Flycheck".to_owned()) stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, format!("Flycheck{id}"))
.spawn(move || actor.run(receiver)) .spawn(move || actor.run(receiver))
.expect("failed to spawn thread"); .expect("failed to spawn thread");
FlycheckHandle { id, sender, _thread: thread } FlycheckHandle { id, sender, _thread: thread }

View file

@ -298,8 +298,7 @@ impl Server {
) -> Server { ) -> Server {
let (connection, client) = Connection::memory(); let (connection, client) = Connection::memory();
let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "test server")
.name("test server".to_owned())
.spawn(move || main_loop(config, connection).unwrap()) .spawn(move || main_loop(config, connection).unwrap())
.expect("failed to spawn a thread"); .expect("failed to spawn a thread");

View file

@ -26,12 +26,12 @@ pub use pool::Pool;
/// # Panics /// # Panics
/// ///
/// Panics if failed to spawn the thread. /// Panics if failed to spawn the thread.
pub fn spawn<F, T>(intent: ThreadIntent, f: F) -> JoinHandle<T> pub fn spawn<F, T>(intent: ThreadIntent, name: String, f: F) -> JoinHandle<T>
where where
F: (FnOnce() -> T) + Send + 'static, F: (FnOnce() -> T) + Send + 'static,
T: Send + 'static, T: Send + 'static,
{ {
Builder::new(intent).spawn(f).expect("failed to spawn thread") Builder::new(intent, name).spawn(f).expect("failed to spawn thread")
} }
pub struct Builder { pub struct Builder {
@ -42,13 +42,8 @@ pub struct Builder {
impl Builder { impl Builder {
#[must_use] #[must_use]
pub fn new(intent: ThreadIntent) -> Self { pub fn new(intent: ThreadIntent, name: impl Into<String>) -> Self {
Self { intent, inner: jod_thread::Builder::new(), allow_leak: false } Self { intent, inner: jod_thread::Builder::new().name(name.into()), allow_leak: false }
}
#[must_use]
pub fn name(self, name: String) -> Self {
Self { inner: self.inner.name(name), ..self }
} }
#[must_use] #[must_use]

View file

@ -50,10 +50,9 @@ impl Pool {
let extant_tasks = Arc::new(AtomicUsize::new(0)); let extant_tasks = Arc::new(AtomicUsize::new(0));
let mut handles = Vec::with_capacity(threads); let mut handles = Vec::with_capacity(threads);
for _ in 0..threads { for idx in 0..threads {
let handle = Builder::new(INITIAL_INTENT) let handle = Builder::new(INITIAL_INTENT, format!("Worker{idx}",))
.stack_size(STACK_SIZE) .stack_size(STACK_SIZE)
.name("Worker".into())
.allow_leak(true) .allow_leak(true)
.spawn({ .spawn({
let extant_tasks = Arc::clone(&extant_tasks); let extant_tasks = Arc::clone(&extant_tasks);

View file

@ -38,8 +38,7 @@ impl loader::Handle for NotifyHandle {
fn spawn(sender: loader::Sender) -> NotifyHandle { fn spawn(sender: loader::Sender) -> NotifyHandle {
let actor = NotifyActor::new(sender); let actor = NotifyActor::new(sender);
let (sender, receiver) = unbounded::<Message>(); let (sender, receiver) = unbounded::<Message>();
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "VfsLoader")
.name("VfsLoader".to_owned())
.spawn(move || actor.run(receiver)) .spawn(move || actor.run(receiver))
.expect("failed to spawn thread"); .expect("failed to spawn thread");
NotifyHandle { sender, _thread: thread } NotifyHandle { sender, _thread: thread }