mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-28 10:39:45 +00:00
Cleanup cfg check handling in expression store lowering
This commit is contained in:
parent
7d9b839f9c
commit
d11dbf648f
17 changed files with 234 additions and 188 deletions
|
|
@ -1,6 +1,6 @@
|
|||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||
|
||||
use std::{borrow::Cow, hash::Hash, ops};
|
||||
use std::{borrow::Cow, convert::identity, hash::Hash, ops};
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
|
|
@ -8,6 +8,7 @@ use either::Either;
|
|||
use hir_expand::{
|
||||
HirFileId, InFile,
|
||||
attrs::{Attr, AttrId, RawAttrs, collect_attrs},
|
||||
span_map::SpanMapRef,
|
||||
};
|
||||
use intern::{Symbol, sym};
|
||||
use la_arena::{ArenaMap, Idx, RawIdx};
|
||||
|
|
@ -45,8 +46,27 @@ impl Attrs {
|
|||
(**self).iter().find(|attr| attr.id == id)
|
||||
}
|
||||
|
||||
pub(crate) fn filter(db: &dyn DefDatabase, krate: Crate, raw_attrs: RawAttrs) -> Attrs {
|
||||
Attrs(raw_attrs.filter(db, krate))
|
||||
pub(crate) fn expand_cfg_attr(
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
raw_attrs: RawAttrs,
|
||||
) -> Attrs {
|
||||
Attrs(raw_attrs.expand_cfg_attr(db, krate))
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled_for(
|
||||
db: &dyn DefDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Result<(), CfgExpr> {
|
||||
RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
|
||||
.filter_map(|attr| attr.cfg())
|
||||
.find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
|
||||
true => None,
|
||||
false => Some(cfg),
|
||||
})
|
||||
.map_or(Ok(()), Err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -522,38 +542,41 @@ impl AttrsWithOwner {
|
|||
GenericParamId::ConstParamId(it) => {
|
||||
let src = it.parent().child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
GenericParamId::TypeParamId(it) => {
|
||||
let src = it.parent().child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
GenericParamId::LifetimeParamId(it) => {
|
||||
let src = it.parent.child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
||||
|
|
@ -561,7 +584,7 @@ impl AttrsWithOwner {
|
|||
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
||||
};
|
||||
|
||||
let attrs = raw_attrs.filter(db, def.krate(db));
|
||||
let attrs = raw_attrs.expand_cfg_attr(db, def.krate(db));
|
||||
Attrs(attrs)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@
|
|||
use std::mem;
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::CfgOptions;
|
||||
use drop_bomb::DropBomb;
|
||||
use hir_expand::{
|
||||
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||
attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
|
||||
eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
|
||||
};
|
||||
use span::{AstIdMap, Edition, SyntaxContext};
|
||||
use syntax::ast::HasAttrs;
|
||||
|
|
@ -64,22 +65,13 @@ impl Expander {
|
|||
}
|
||||
}
|
||||
|
||||
pub(super) fn attrs(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
has_attrs: &dyn HasAttrs,
|
||||
) -> Attrs {
|
||||
Attrs::filter(db, krate, RawAttrs::new(db, has_attrs, self.span_map.as_ref()))
|
||||
}
|
||||
|
||||
pub(super) fn is_cfg_enabled(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
has_attrs: &dyn HasAttrs,
|
||||
) -> bool {
|
||||
self.attrs(db, krate, has_attrs).is_cfg_enabled(krate.cfg_options(db))
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Result<(), cfg::CfgExpr> {
|
||||
Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
|
||||
}
|
||||
|
||||
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ mod path;
|
|||
|
||||
use std::mem;
|
||||
|
||||
use cfg::CfgOptions;
|
||||
use either::Either;
|
||||
use hir_expand::{
|
||||
HirFileId, InFile, Lookup, MacroDefId,
|
||||
|
|
@ -81,8 +82,6 @@ pub(super) fn lower_body(
|
|||
// even though they should be the same. Also, when the body comes from multiple expansions, their
|
||||
// hygiene is different.
|
||||
|
||||
let krate = module.krate();
|
||||
|
||||
let mut self_param = None;
|
||||
let mut source_map_self_param = None;
|
||||
let mut params = vec![];
|
||||
|
|
@ -100,9 +99,8 @@ pub(super) fn lower_body(
|
|||
// and skip the body.
|
||||
if skip_body {
|
||||
if let Some(param_list) = parameters {
|
||||
if let Some(self_param_syn) = param_list
|
||||
.self_param()
|
||||
.filter(|self_param| collector.expander.is_cfg_enabled(db, krate, self_param))
|
||||
if let Some(self_param_syn) =
|
||||
param_list.self_param().filter(|self_param| collector.check_cfg(self_param))
|
||||
{
|
||||
let is_mutable =
|
||||
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
|
||||
|
|
@ -119,10 +117,7 @@ pub(super) fn lower_body(
|
|||
source_map_self_param =
|
||||
Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
|
||||
}
|
||||
let count = param_list
|
||||
.params()
|
||||
.filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
|
||||
.count();
|
||||
let count = param_list.params().filter(|it| collector.check_cfg(it)).count();
|
||||
params = (0..count).map(|_| collector.missing_pat()).collect();
|
||||
};
|
||||
let body_expr = collector.missing_expr();
|
||||
|
|
@ -138,9 +133,7 @@ pub(super) fn lower_body(
|
|||
}
|
||||
|
||||
if let Some(param_list) = parameters {
|
||||
if let Some(self_param_syn) =
|
||||
param_list.self_param().filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
|
||||
{
|
||||
if let Some(self_param_syn) = param_list.self_param().filter(|it| collector.check_cfg(it)) {
|
||||
let is_mutable =
|
||||
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
|
||||
let hygiene = self_param_syn
|
||||
|
|
@ -157,7 +150,7 @@ pub(super) fn lower_body(
|
|||
}
|
||||
|
||||
for param in param_list.params() {
|
||||
if collector.expander.is_cfg_enabled(db, krate, ¶m) {
|
||||
if collector.check_cfg(¶m) {
|
||||
let param_pat = collector.collect_pat_top(param.pat());
|
||||
params.push(param_pat);
|
||||
}
|
||||
|
|
@ -346,7 +339,7 @@ pub(crate) fn lower_function(
|
|||
collector.collect_impl_trait(&mut expr_collector, |collector, mut impl_trait_lower_fn| {
|
||||
if let Some(param_list) = fn_.value.param_list() {
|
||||
if let Some(param) = param_list.self_param() {
|
||||
let enabled = collector.expander.is_cfg_enabled(db, module.krate(), ¶m);
|
||||
let enabled = collector.check_cfg(¶m);
|
||||
if enabled {
|
||||
has_self_param = true;
|
||||
params.push(match param.ty() {
|
||||
|
|
@ -381,7 +374,7 @@ pub(crate) fn lower_function(
|
|||
}
|
||||
let p = param_list
|
||||
.params()
|
||||
.filter(|param| collector.expander.is_cfg_enabled(db, module.krate(), param))
|
||||
.filter(|param| collector.check_cfg(param))
|
||||
.filter(|param| {
|
||||
let is_variadic = param.dotdotdot_token().is_some();
|
||||
has_variadic |= is_variadic;
|
||||
|
|
@ -441,6 +434,7 @@ pub(crate) fn lower_function(
|
|||
|
||||
pub struct ExprCollector<'db> {
|
||||
db: &'db dyn DefDatabase,
|
||||
cfg_options: &'db CfgOptions,
|
||||
expander: Expander,
|
||||
def_map: Arc<DefMap>,
|
||||
local_def_map: Arc<LocalDefMap>,
|
||||
|
|
@ -553,6 +547,7 @@ impl ExprCollector<'_> {
|
|||
let expander = Expander::new(db, current_file_id, &def_map);
|
||||
ExprCollector {
|
||||
db,
|
||||
cfg_options: module.krate().cfg_options(db),
|
||||
module,
|
||||
def_map,
|
||||
local_def_map,
|
||||
|
|
@ -1026,7 +1021,9 @@ impl ExprCollector<'_> {
|
|||
/// Returns `None` if and only if the expression is `#[cfg]`d out.
|
||||
fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
|
||||
let syntax_ptr = AstPtr::new(&expr);
|
||||
self.check_cfg(&expr)?;
|
||||
if !self.check_cfg(&expr) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// FIXME: Move some of these arms out into separate methods for clarity
|
||||
Some(match expr {
|
||||
|
|
@ -1114,6 +1111,7 @@ impl ExprCollector<'_> {
|
|||
ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
|
||||
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
|
||||
ast::Expr::CallExpr(e) => {
|
||||
// FIXME: Remove this once we drop support for <1.86, https://github.com/rust-lang/rust/commit/ac9cb908ac4301dfc25e7a2edee574320022ae2c
|
||||
let is_rustc_box = {
|
||||
let attrs = e.attrs();
|
||||
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
|
||||
|
|
@ -1156,13 +1154,17 @@ impl ExprCollector<'_> {
|
|||
match_arm_list
|
||||
.arms()
|
||||
.filter_map(|arm| {
|
||||
self.check_cfg(&arm).map(|()| MatchArm {
|
||||
if self.check_cfg(&arm) {
|
||||
Some(MatchArm {
|
||||
pat: self.collect_pat_top(arm.pat()),
|
||||
expr: self.collect_expr_opt(arm.expr()),
|
||||
guard: arm
|
||||
.guard()
|
||||
.map(|guard| self.collect_expr_opt(guard.condition())),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
|
|
@ -1230,7 +1232,9 @@ impl ExprCollector<'_> {
|
|||
let fields = nfl
|
||||
.fields()
|
||||
.filter_map(|field| {
|
||||
self.check_cfg(&field)?;
|
||||
if !self.check_cfg(&field) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = field.field_name()?.as_name();
|
||||
|
||||
|
|
@ -1483,7 +1487,9 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
|
||||
fn maybe_collect_expr_as_pat(&mut self, expr: &ast::Expr) -> Option<PatId> {
|
||||
self.check_cfg(expr)?;
|
||||
if !self.check_cfg(expr) {
|
||||
return None;
|
||||
}
|
||||
let syntax_ptr = AstPtr::new(expr);
|
||||
|
||||
let result = match expr {
|
||||
|
|
@ -1558,7 +1564,9 @@ impl ExprCollector<'_> {
|
|||
let args = record_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
self.check_cfg(&f)?;
|
||||
if !self.check_cfg(&f) {
|
||||
return None;
|
||||
}
|
||||
let field_expr = f.expr()?;
|
||||
let pat = self.collect_expr_as_pat(field_expr);
|
||||
let name = f.field_name()?.as_name();
|
||||
|
|
@ -2044,7 +2052,7 @@ impl ExprCollector<'_> {
|
|||
fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) {
|
||||
match s {
|
||||
ast::Stmt::LetStmt(stmt) => {
|
||||
if self.check_cfg(&stmt).is_none() {
|
||||
if !self.check_cfg(&stmt) {
|
||||
return;
|
||||
}
|
||||
let pat = self.collect_pat_top(stmt.pat());
|
||||
|
|
@ -2059,7 +2067,7 @@ impl ExprCollector<'_> {
|
|||
ast::Stmt::ExprStmt(stmt) => {
|
||||
let expr = stmt.expr();
|
||||
match &expr {
|
||||
Some(expr) if self.check_cfg(expr).is_none() => return,
|
||||
Some(expr) if !self.check_cfg(expr) => return,
|
||||
_ => (),
|
||||
}
|
||||
let has_semi = stmt.semicolon_token().is_some();
|
||||
|
|
@ -2074,7 +2082,7 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
}
|
||||
ast::Stmt::Item(ast::Item::MacroDef(macro_)) => {
|
||||
if self.check_cfg(¯o_).is_none() {
|
||||
if !self.check_cfg(¯o_) {
|
||||
return;
|
||||
}
|
||||
let Some(name) = macro_.name() else {
|
||||
|
|
@ -2086,7 +2094,7 @@ impl ExprCollector<'_> {
|
|||
self.collect_macro_def(statements, macro_id);
|
||||
}
|
||||
ast::Stmt::Item(ast::Item::MacroRules(macro_)) => {
|
||||
if self.check_cfg(¯o_).is_none() {
|
||||
if !self.check_cfg(¯o_) {
|
||||
return;
|
||||
}
|
||||
let Some(name) = macro_.name() else {
|
||||
|
|
@ -2360,7 +2368,9 @@ impl ExprCollector<'_> {
|
|||
let args = record_pat_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
self.check_cfg(&f)?;
|
||||
if !self.check_cfg(&f) {
|
||||
return None;
|
||||
}
|
||||
let ast_pat = f.pat()?;
|
||||
let pat = self.collect_pat(ast_pat, binding_list);
|
||||
let name = f.field_name()?.as_name();
|
||||
|
|
@ -2536,25 +2546,18 @@ impl ExprCollector<'_> {
|
|||
|
||||
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
|
||||
/// not.
|
||||
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> {
|
||||
let attrs = self.expander.attrs(self.db, self.module.krate(), owner);
|
||||
match attrs.cfg() {
|
||||
Some(cfg) => {
|
||||
let cfg_options = self.module.krate().cfg_options(self.db);
|
||||
|
||||
if cfg_options.check(&cfg) != Some(false) {
|
||||
return Some(());
|
||||
}
|
||||
|
||||
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
|
||||
let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
|
||||
match enabled {
|
||||
Ok(()) => true,
|
||||
Err(cfg) => {
|
||||
self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
|
||||
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
|
||||
cfg,
|
||||
opts: cfg_options.clone(),
|
||||
opts: self.cfg_options.clone(),
|
||||
});
|
||||
|
||||
None
|
||||
false
|
||||
}
|
||||
None => Some(()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ impl GenericParamsCollector {
|
|||
|
||||
fn lower_param_list(&mut self, ec: &mut ExprCollector<'_>, params: ast::GenericParamList) {
|
||||
for generic_param in params.generic_params() {
|
||||
let enabled = ec.expander.is_cfg_enabled(ec.db, ec.module.krate(), &generic_param);
|
||||
let enabled = ec.check_cfg(&generic_param);
|
||||
if !enabled {
|
||||
continue;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -179,7 +179,7 @@ impl ItemTree {
|
|||
|
||||
/// Returns the inner attributes of the source file.
|
||||
pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
|
||||
Attrs::filter(
|
||||
Attrs::expand_cfg_attr(
|
||||
db,
|
||||
krate,
|
||||
self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(),
|
||||
|
|
@ -191,7 +191,7 @@ impl ItemTree {
|
|||
}
|
||||
|
||||
pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs {
|
||||
Attrs::filter(db, krate, self.raw_attrs(of).clone())
|
||||
Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
|
||||
}
|
||||
|
||||
/// Returns a count of a few, expensive items.
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
use std::{borrow::Cow, fmt, ops};
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::CfgExpr;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
use intern::{Interned, Symbol, sym};
|
||||
|
||||
|
|
@ -14,11 +14,10 @@ use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
|
|||
use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
|
||||
use triomphe::ThinArc;
|
||||
|
||||
use crate::name::Name;
|
||||
use crate::{
|
||||
InFile,
|
||||
db::ExpandDatabase,
|
||||
mod_path::ModPath,
|
||||
name::Name,
|
||||
span_map::SpanMapRef,
|
||||
tt::{self, TopSubtree, token_to_literal},
|
||||
};
|
||||
|
|
@ -49,15 +48,48 @@ impl RawAttrs {
|
|||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
let entries: Vec<_> = collect_attrs(owner)
|
||||
.filter_map(|(id, attr)| match attr {
|
||||
let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
|
||||
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
|
||||
};
|
||||
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
/// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
|
||||
pub fn new_expanded(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Self {
|
||||
let entries: Vec<_> =
|
||||
Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
|
||||
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
|
||||
};
|
||||
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> impl Iterator<Item = Attr> {
|
||||
collect_attrs(owner).filter_map(move |(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| {
|
||||
Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
|
||||
let span = span_map.span_for_range(comment.syntax().text_range());
|
||||
let (text, kind) =
|
||||
desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
|
||||
let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
|
||||
Attr {
|
||||
id,
|
||||
input: Some(Box::new(AttrInput::Literal(tt::Literal {
|
||||
|
|
@ -70,24 +102,18 @@ impl RawAttrs {
|
|||
ctxt: span.ctx,
|
||||
}
|
||||
}),
|
||||
Either::Right(_) => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
|
||||
};
|
||||
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
pub fn from_attrs_owner(
|
||||
pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: InFile<&dyn ast::HasAttrs>,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
Self::new(db, owner.value, span_map)
|
||||
cfg_options: &CfgOptions,
|
||||
) -> impl Iterator<Item = Attr> {
|
||||
Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
|
||||
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
|
||||
}
|
||||
|
||||
pub fn merge(&self, other: Self) -> Self {
|
||||
|
|
@ -114,9 +140,8 @@ impl RawAttrs {
|
|||
}
|
||||
}
|
||||
|
||||
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
|
||||
// FIXME: This should return a different type, signaling it was filtered?
|
||||
pub fn filter(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
|
||||
/// Processes `cfg_attr`s
|
||||
pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
|
||||
let has_cfg_attrs =
|
||||
self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
|
||||
if !has_cfg_attrs {
|
||||
|
|
@ -126,37 +151,8 @@ impl RawAttrs {
|
|||
let cfg_options = krate.cfg_options(db);
|
||||
let new_attrs = self
|
||||
.iter()
|
||||
.flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr = attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect()
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
|
||||
.collect::<Vec<_>>();
|
||||
let entries = if new_attrs.is_empty() {
|
||||
None
|
||||
|
|
@ -316,6 +312,42 @@ impl Attr {
|
|||
pub fn path(&self) -> &ModPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn expand_cfg_attr(
|
||||
self,
|
||||
db: &dyn ExpandDatabase,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> impl IntoIterator<Item = Self> {
|
||||
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![self.clone()];
|
||||
}
|
||||
|
||||
let subtree = match self.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![self.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![self.clone()],
|
||||
};
|
||||
let index = self.id;
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect::<SmallVec<[_; 1]>>()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Attr {
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ impl DeclarativeMacroExpander {
|
|||
|
||||
let transparency = |node| {
|
||||
// ... would be nice to have the item tree here
|
||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||
let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
|
||||
match attrs
|
||||
.iter()
|
||||
.find(|it| {
|
||||
|
|
|
|||
|
|
@ -90,9 +90,11 @@ pub fn parallel_prime_caches(
|
|||
};
|
||||
|
||||
for id in 0..num_worker_threads {
|
||||
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
stdx::thread::Builder::new(
|
||||
stdx::thread::ThreadIntent::Worker,
|
||||
format!("PrimeCaches#{id}"),
|
||||
)
|
||||
.allow_leak(true)
|
||||
.name(format!("PrimeCaches#{id}"))
|
||||
.spawn({
|
||||
let worker = prime_caches_worker.clone();
|
||||
let db = db.clone();
|
||||
|
|
|
|||
|
|
@ -182,10 +182,8 @@ fn with_extra_thread(
|
|||
thread_intent: stdx::thread::ThreadIntent,
|
||||
f: impl FnOnce() -> anyhow::Result<()> + Send + 'static,
|
||||
) -> anyhow::Result<()> {
|
||||
let handle = stdx::thread::Builder::new(thread_intent)
|
||||
.name(thread_name.into())
|
||||
.stack_size(STACK_SIZE)
|
||||
.spawn(f)?;
|
||||
let handle =
|
||||
stdx::thread::Builder::new(thread_intent, thread_name).stack_size(STACK_SIZE).spawn(f)?;
|
||||
|
||||
handle.join()?;
|
||||
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@ impl flags::Diagnostics {
|
|||
pub fn run(self) -> anyhow::Result<()> {
|
||||
const STACK_SIZE: usize = 1024 * 1024 * 8;
|
||||
|
||||
let handle = stdx::thread::Builder::new(stdx::thread::ThreadIntent::LatencySensitive)
|
||||
.name("BIG_STACK_THREAD".into())
|
||||
let handle = stdx::thread::Builder::new(
|
||||
stdx::thread::ThreadIntent::LatencySensitive,
|
||||
"BIG_STACK_THREAD",
|
||||
)
|
||||
.stack_size(STACK_SIZE)
|
||||
.spawn(|| self.run_())
|
||||
.unwrap();
|
||||
|
|
|
|||
|
|
@ -15,8 +15,10 @@ impl flags::UnresolvedReferences {
|
|||
pub fn run(self) -> anyhow::Result<()> {
|
||||
const STACK_SIZE: usize = 1024 * 1024 * 8;
|
||||
|
||||
let handle = stdx::thread::Builder::new(stdx::thread::ThreadIntent::LatencySensitive)
|
||||
.name("BIG_STACK_THREAD".into())
|
||||
let handle = stdx::thread::Builder::new(
|
||||
stdx::thread::ThreadIntent::LatencySensitive,
|
||||
"BIG_STACK_THREAD",
|
||||
)
|
||||
.stack_size(STACK_SIZE)
|
||||
.spawn(|| self.run_())
|
||||
.unwrap();
|
||||
|
|
|
|||
|
|
@ -148,8 +148,8 @@ impl<T: Sized + Send + 'static> CommandHandle<T> {
|
|||
let stderr = child.0.stderr().take().unwrap();
|
||||
|
||||
let actor = CargoActor::<T>::new(parser, sender, stdout, stderr);
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("CommandHandle".to_owned())
|
||||
let thread =
|
||||
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "CommandHandle")
|
||||
.spawn(move || actor.run())
|
||||
.expect("failed to spawn thread");
|
||||
Ok(CommandHandle { program, arguments, current_dir, child, thread, _phantom: PhantomData })
|
||||
|
|
|
|||
|
|
@ -133,8 +133,8 @@ impl FlycheckHandle {
|
|||
let actor =
|
||||
FlycheckActor::new(id, sender, config, sysroot_root, workspace_root, manifest_path);
|
||||
let (sender, receiver) = unbounded::<StateChange>();
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("Flycheck".to_owned())
|
||||
let thread =
|
||||
stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, format!("Flycheck{id}"))
|
||||
.spawn(move || actor.run(receiver))
|
||||
.expect("failed to spawn thread");
|
||||
FlycheckHandle { id, sender, _thread: thread }
|
||||
|
|
|
|||
|
|
@ -298,8 +298,7 @@ impl Server {
|
|||
) -> Server {
|
||||
let (connection, client) = Connection::memory();
|
||||
|
||||
let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("test server".to_owned())
|
||||
let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "test server")
|
||||
.spawn(move || main_loop(config, connection).unwrap())
|
||||
.expect("failed to spawn a thread");
|
||||
|
||||
|
|
|
|||
|
|
@ -26,12 +26,12 @@ pub use pool::Pool;
|
|||
/// # Panics
|
||||
///
|
||||
/// Panics if failed to spawn the thread.
|
||||
pub fn spawn<F, T>(intent: ThreadIntent, f: F) -> JoinHandle<T>
|
||||
pub fn spawn<F, T>(intent: ThreadIntent, name: String, f: F) -> JoinHandle<T>
|
||||
where
|
||||
F: (FnOnce() -> T) + Send + 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
Builder::new(intent).spawn(f).expect("failed to spawn thread")
|
||||
Builder::new(intent, name).spawn(f).expect("failed to spawn thread")
|
||||
}
|
||||
|
||||
pub struct Builder {
|
||||
|
|
@ -42,13 +42,8 @@ pub struct Builder {
|
|||
|
||||
impl Builder {
|
||||
#[must_use]
|
||||
pub fn new(intent: ThreadIntent) -> Self {
|
||||
Self { intent, inner: jod_thread::Builder::new(), allow_leak: false }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn name(self, name: String) -> Self {
|
||||
Self { inner: self.inner.name(name), ..self }
|
||||
pub fn new(intent: ThreadIntent, name: impl Into<String>) -> Self {
|
||||
Self { intent, inner: jod_thread::Builder::new().name(name.into()), allow_leak: false }
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
|
|
|
|||
|
|
@ -50,10 +50,9 @@ impl Pool {
|
|||
let extant_tasks = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let mut handles = Vec::with_capacity(threads);
|
||||
for _ in 0..threads {
|
||||
let handle = Builder::new(INITIAL_INTENT)
|
||||
for idx in 0..threads {
|
||||
let handle = Builder::new(INITIAL_INTENT, format!("Worker{idx}",))
|
||||
.stack_size(STACK_SIZE)
|
||||
.name("Worker".into())
|
||||
.allow_leak(true)
|
||||
.spawn({
|
||||
let extant_tasks = Arc::clone(&extant_tasks);
|
||||
|
|
|
|||
|
|
@ -38,8 +38,7 @@ impl loader::Handle for NotifyHandle {
|
|||
fn spawn(sender: loader::Sender) -> NotifyHandle {
|
||||
let actor = NotifyActor::new(sender);
|
||||
let (sender, receiver) = unbounded::<Message>();
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
|
||||
.name("VfsLoader".to_owned())
|
||||
let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker, "VfsLoader")
|
||||
.spawn(move || actor.run(receiver))
|
||||
.expect("failed to spawn thread");
|
||||
NotifyHandle { sender, _thread: thread }
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue